comment
stringlengths
22
3.02k
method_body
stringlengths
46
368k
target_code
stringlengths
0
181
method_body_after
stringlengths
12
368k
context_before
stringlengths
11
634k
context_after
stringlengths
11
632k
Thanks for the suggestion! I was unhappy about the various wait times and time-dependent checks, but didn't know a proper way to handle it.
public void testCachedStatsCleanedAfterCleanupInterval() throws Exception { final Duration cleanUpInterval2 = Duration.ofMillis(1); final long waitingTime = cleanUpInterval2.toMillis() + 10; Cache<JobVertexThreadInfoTracker.Key, JobVertexThreadInfoStats> vertexStatsCache = CacheBuilder.newBuilder() .concurrencyLevel(1) .expireAfterAccess(cleanUpInterval2.toMillis(), TimeUnit.MILLISECONDS) .recordStats() .build(); final JobVertexThreadInfoTracker<JobVertexThreadInfoStats> tracker = createThreadInfoTracker( cleanUpInterval2, STATS_REFRESH_INTERVAL, vertexStatsCache, threadInfoStatsDefaultSample); assertFalse(tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX).isPresent()); tracker.getResultAvailableFuture().get(); Thread.sleep(waitingTime); assertFalse(tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX).isPresent()); assertEquals(1, vertexStatsCache.stats().evictionCount()); }
Thread.sleep(waitingTime);
public void testCachedStatsCleanedAfterCleanupInterval() throws Exception { final Duration shortCleanUpInterval = Duration.ofMillis(1); CountDownLatch cacheExpired = new CountDownLatch(1); Cache<JobVertexThreadInfoTracker.Key, JobVertexThreadInfoStats> vertexStatsCache = createCache(shortCleanUpInterval, new LatchRemovalListener<>(cacheExpired)); final JobVertexThreadInfoTracker<JobVertexThreadInfoStats> tracker = createThreadInfoTracker( shortCleanUpInterval, STATS_REFRESH_INTERVAL, vertexStatsCache, threadInfoStatsDefaultSample); assertFalse(tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX).isPresent()); cacheExpired.await(); assertFalse(tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX).isPresent()); }
class JobVertexThreadInfoTrackerTest extends TestLogger { private static final int REQUEST_ID = 0; private static final ExecutionJobVertex EXECUTION_JOB_VERTEX = createExecutionJobVertex(); private static final ExecutionVertex[] TASK_VERTICES = EXECUTION_JOB_VERTEX.getTaskVertices(); private static final JobID JOB_ID = new JobID(); private static ThreadInfoSample threadInfoSample; private static JobVertexThreadInfoStats threadInfoStatsDefaultSample; private static final Duration CLEAN_UP_INTERVAL = Duration.ofSeconds(60); private static final Duration STATS_REFRESH_INTERVAL = Duration.ofSeconds(60); private static final Duration TIME_GAP = Duration.ofSeconds(60); private static final Duration SMALL_TIME_GAP = Duration.ofMillis(1); private static final Duration REQUEST_TIMEOUT = Duration.ofSeconds(10); private static final int NUMBER_OF_SAMPLES = 1; private static final int MAX_STACK_TRACE_DEPTH = 100; private static final Duration DELAY_BETWEEN_SAMPLES = Duration.ofMillis(50); @Rule public Timeout caseTimeout = new Timeout(10, TimeUnit.SECONDS); private static ScheduledExecutorService executor; @BeforeClass public static void setUp() { threadInfoSample = JvmUtils.createThreadInfoSample( Thread.currentThread().getId(), MAX_STACK_TRACE_DEPTH) .get(); threadInfoStatsDefaultSample = createThreadInfoStats( REQUEST_ID, SMALL_TIME_GAP, Collections.singletonList(threadInfoSample)); executor = Executors.newScheduledThreadPool(1); } @AfterClass public static void tearDown() { if (executor != null) { executor.shutdownNow(); } } /** Tests successful thread info stats request. */ @Test public void testGetThreadInfoStats() throws Exception { doInitialRequestAndVerifyResult(createThreadInfoTracker()); } /** Tests that cached result is reused within refresh interval. */ @Test public void testCachedStatsNotUpdatedWithinRefreshInterval() throws Exception { final int requestId2 = 1; final JobVertexThreadInfoStats threadInfoStats2 = createThreadInfoStats(requestId2, TIME_GAP, null); final JobVertexThreadInfoTracker<JobVertexThreadInfoStats> tracker = createThreadInfoTracker( STATS_REFRESH_INTERVAL, threadInfoStatsDefaultSample, threadInfoStats2); doInitialRequestAndVerifyResult(tracker); Optional<JobVertexThreadInfoStats> result = tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX); assertEquals(threadInfoStatsDefaultSample, result.get()); } /** Tests that cached result is NOT reused after refresh interval. */ @Test public void testCachedStatsUpdatedAfterRefreshInterval() throws Exception { final Duration threadInfoStatsRefreshInterval2 = Duration.ofMillis(1); final long waitingTime = threadInfoStatsRefreshInterval2.toMillis() + 10; final int requestId2 = 1; final JobVertexThreadInfoStats threadInfoStats2 = createThreadInfoStats( requestId2, TIME_GAP, Collections.singletonList(threadInfoSample)); final JobVertexThreadInfoTracker<JobVertexThreadInfoStats> tracker = createThreadInfoTracker( threadInfoStatsRefreshInterval2, threadInfoStatsDefaultSample, threadInfoStats2); doInitialRequestAndVerifyResult(tracker); Thread.sleep(waitingTime); Optional<JobVertexThreadInfoStats> result = tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX); assertExpectedEqualsReceived(threadInfoStats2, result); assertNotSame(result.get(), threadInfoStatsDefaultSample); } /** Tests that cached results are removed within the cleanup interval. */ @Test /** Tests that cached results are NOT removed within the cleanup interval. */ @Test public void testCachedStatsNotCleanedWithinCleanupInterval() throws Exception { final JobVertexThreadInfoTracker<JobVertexThreadInfoStats> tracker = createThreadInfoTracker(); doInitialRequestAndVerifyResult(tracker); tracker.cleanUpVertexStatsCache(); assertExpectedEqualsReceived( threadInfoStatsDefaultSample, tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX)); } /** Tests that cached results are not served after the shutdown. */ @Test public void testShutDown() throws Exception { final JobVertexThreadInfoTracker<JobVertexThreadInfoStats> tracker = createThreadInfoTracker(); doInitialRequestAndVerifyResult(tracker); tracker.shutDown(); assertFalse(tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX).isPresent()); assertFalse(tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX).isPresent()); } private void doInitialRequestAndVerifyResult( JobVertexThreadInfoTracker<JobVertexThreadInfoStats> tracker) throws InterruptedException, ExecutionException { assertFalse(tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX).isPresent()); tracker.getResultAvailableFuture().get(); assertExpectedEqualsReceived( threadInfoStatsDefaultSample, tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX)); } private static void assertExpectedEqualsReceived( JobVertexThreadInfoStats expected, Optional<JobVertexThreadInfoStats> receivedOptional) { assertTrue(receivedOptional.isPresent()); JobVertexThreadInfoStats received = receivedOptional.get(); assertEquals(expected.getRequestId(), received.getRequestId()); assertEquals(expected.getEndTime(), received.getEndTime()); assertEquals(TASK_VERTICES.length, received.getNumberOfSubtasks()); for (List<ThreadInfoSample> samples : received.getSamplesBySubtask().values()) { assertThat(samples.isEmpty(), is(false)); } } private JobVertexThreadInfoTracker<JobVertexThreadInfoStats> createThreadInfoTracker() { return createThreadInfoTracker(STATS_REFRESH_INTERVAL, threadInfoStatsDefaultSample); } private JobVertexThreadInfoTracker<JobVertexThreadInfoStats> createThreadInfoTracker( Duration statsRefreshInterval, JobVertexThreadInfoStats... stats) { return createThreadInfoTracker(CLEAN_UP_INTERVAL, statsRefreshInterval, null, stats); } private JobVertexThreadInfoTracker<JobVertexThreadInfoStats> createThreadInfoTracker( Duration cleanUpInterval, Duration statsRefreshInterval, Cache<JobVertexThreadInfoTracker.Key, JobVertexThreadInfoStats> vertexStatsCache, JobVertexThreadInfoStats... stats) { final ThreadInfoRequestCoordinator coordinator = new TestingThreadInfoRequestCoordinator(Runnable::run, REQUEST_TIMEOUT, stats); return JobVertexThreadInfoTrackerBuilder.newBuilder( JobVertexThreadInfoTrackerTest::createMockResourceManagerGateway, Function.identity(), executor, TestingUtils.TIMEOUT) .setCoordinator(coordinator) .setCleanUpInterval(cleanUpInterval) .setNumSamples(NUMBER_OF_SAMPLES) .setStatsRefreshInterval(statsRefreshInterval) .setDelayBetweenSamples(DELAY_BETWEEN_SAMPLES) .setMaxThreadInfoDepth(MAX_STACK_TRACE_DEPTH) .setVertexStatsCache(vertexStatsCache) .build(); } private static JobVertexThreadInfoStats createThreadInfoStats( int requestId, Duration timeGap, List<ThreadInfoSample> threadInfoSamples) { long startTime = System.currentTimeMillis(); long endTime = startTime + timeGap.toMillis(); final Map<ExecutionAttemptID, List<ThreadInfoSample>> threadInfoRatiosByTask = new HashMap<>(); for (ExecutionVertex vertex : TASK_VERTICES) { threadInfoRatiosByTask.put( vertex.getCurrentExecutionAttempt().getAttemptId(), threadInfoSamples); } return new JobVertexThreadInfoStats(requestId, startTime, endTime, threadInfoRatiosByTask); } private static ExecutionJobVertex createExecutionJobVertex() { try { JobVertex jobVertex = new JobVertex("testVertex"); jobVertex.setInvokableClass(AbstractInvokable.class); return ExecutionGraphTestUtils.getExecutionJobVertex(jobVertex); } catch (Exception e) { throw new RuntimeException("Failed to create ExecutionJobVertex."); } } private static CompletableFuture<ResourceManagerGateway> createMockResourceManagerGateway() { Function<ResourceID, CompletableFuture<TaskExecutorThreadInfoGateway>> function = (resourceID) -> CompletableFuture.completedFuture(null); TestingResourceManagerGateway testingResourceManagerGateway = new TestingResourceManagerGateway(); testingResourceManagerGateway.setRequestTaskExecutorGatewayFunction(function); return CompletableFuture.completedFuture(testingResourceManagerGateway); } /** * A {@link ThreadInfoRequestCoordinator} which returns the pre-generated thread info stats * directly. */ private static class TestingThreadInfoRequestCoordinator extends ThreadInfoRequestCoordinator { private final JobVertexThreadInfoStats[] jobVertexThreadInfoStats; private int counter = 0; TestingThreadInfoRequestCoordinator( Executor executor, Duration requestTimeout, JobVertexThreadInfoStats... jobVertexThreadInfoStats) { super(executor, requestTimeout); this.jobVertexThreadInfoStats = jobVertexThreadInfoStats; } @Override public CompletableFuture<JobVertexThreadInfoStats> triggerThreadInfoRequest( Map<ExecutionAttemptID, CompletableFuture<TaskExecutorThreadInfoGateway>> ignored1, int ignored2, Duration ignored3, int ignored4) { return CompletableFuture.completedFuture( jobVertexThreadInfoStats[(counter++) % jobVertexThreadInfoStats.length]); } } }
class JobVertexThreadInfoTrackerTest extends TestLogger { private static final int REQUEST_ID = 0; private static final ExecutionJobVertex EXECUTION_JOB_VERTEX = createExecutionJobVertex(); private static final ExecutionVertex[] TASK_VERTICES = EXECUTION_JOB_VERTEX.getTaskVertices(); private static final JobID JOB_ID = new JobID(); private static ThreadInfoSample threadInfoSample; private static JobVertexThreadInfoStats threadInfoStatsDefaultSample; private static final Duration CLEAN_UP_INTERVAL = Duration.ofSeconds(60); private static final Duration STATS_REFRESH_INTERVAL = Duration.ofSeconds(60); private static final Duration TIME_GAP = Duration.ofSeconds(60); private static final Duration SMALL_TIME_GAP = Duration.ofMillis(1); private static final Duration REQUEST_TIMEOUT = Duration.ofSeconds(10); private static final int NUMBER_OF_SAMPLES = 1; private static final int MAX_STACK_TRACE_DEPTH = 100; private static final Duration DELAY_BETWEEN_SAMPLES = Duration.ofMillis(50); @Rule public Timeout caseTimeout = new Timeout(10, TimeUnit.SECONDS); private static ScheduledExecutorService executor; @BeforeClass public static void setUp() { threadInfoSample = JvmUtils.createThreadInfoSample( Thread.currentThread().getId(), MAX_STACK_TRACE_DEPTH) .get(); threadInfoStatsDefaultSample = createThreadInfoStats( REQUEST_ID, SMALL_TIME_GAP, Collections.singletonList(threadInfoSample)); executor = Executors.newScheduledThreadPool(1); } @AfterClass public static void tearDown() { if (executor != null) { executor.shutdownNow(); } } /** Tests successful thread info stats request. */ @Test public void testGetThreadInfoStats() throws Exception { doInitialRequestAndVerifyResult(createThreadInfoTracker()); } /** Tests that cached result is reused within refresh interval. */ @Test public void testCachedStatsNotUpdatedWithinRefreshInterval() throws Exception { final JobVertexThreadInfoStats unusedThreadInfoStats = createThreadInfoStats(1, TIME_GAP, null); final JobVertexThreadInfoTracker<JobVertexThreadInfoStats> tracker = createThreadInfoTracker( STATS_REFRESH_INTERVAL, threadInfoStatsDefaultSample, unusedThreadInfoStats); doInitialRequestAndVerifyResult(tracker); Optional<JobVertexThreadInfoStats> result = tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX); assertEquals(threadInfoStatsDefaultSample, result.get()); } /** Tests that cached result is NOT reused after refresh interval. */ @Test public void testCachedStatsUpdatedAfterRefreshInterval() throws Exception { final Duration shortRefreshInterval = Duration.ofMillis(1); final JobVertexThreadInfoStats initialThreadInfoStats = createThreadInfoStats( Instant.now().minus(10, ChronoUnit.SECONDS), REQUEST_ID, Duration.ofMillis(5), Collections.singletonList(threadInfoSample)); final JobVertexThreadInfoStats threadInfoStatsAfterRefresh = createThreadInfoStats(1, TIME_GAP, Collections.singletonList(threadInfoSample)); CountDownLatch cacheRefreshed = new CountDownLatch(1); Cache<JobVertexThreadInfoTracker.Key, JobVertexThreadInfoStats> vertexStatsCache = createCache(CLEAN_UP_INTERVAL, new LatchRemovalListener<>(cacheRefreshed)); final JobVertexThreadInfoTracker<JobVertexThreadInfoStats> tracker = createThreadInfoTracker( CLEAN_UP_INTERVAL, shortRefreshInterval, vertexStatsCache, initialThreadInfoStats, threadInfoStatsAfterRefresh); assertFalse(tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX).isPresent()); tracker.getResultAvailableFuture().get(); assertExpectedEqualsReceived( initialThreadInfoStats, tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX)); cacheRefreshed.await(); Optional<JobVertexThreadInfoStats> result = tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX); assertExpectedEqualsReceived(threadInfoStatsAfterRefresh, result); } /** Tests that cached results are removed within the cleanup interval. */ @Test /** Tests that cached results are NOT removed within the cleanup interval. */ @Test public void testCachedStatsNotCleanedWithinCleanupInterval() throws Exception { final JobVertexThreadInfoTracker<JobVertexThreadInfoStats> tracker = createThreadInfoTracker(); doInitialRequestAndVerifyResult(tracker); tracker.cleanUpVertexStatsCache(); assertExpectedEqualsReceived( threadInfoStatsDefaultSample, tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX)); } /** Tests that cached results are not served after the shutdown. */ @Test public void testShutDown() throws Exception { final JobVertexThreadInfoTracker<JobVertexThreadInfoStats> tracker = createThreadInfoTracker(); doInitialRequestAndVerifyResult(tracker); tracker.shutDown(); assertFalse(tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX).isPresent()); assertFalse(tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX).isPresent()); } private Cache<JobVertexThreadInfoTracker.Key, JobVertexThreadInfoStats> createCache( Duration cleanUpInterval, RemovalListener<JobVertexThreadInfoTracker.Key, JobVertexThreadInfoStats> removalListener) { return CacheBuilder.newBuilder() .concurrencyLevel(1) .expireAfterAccess(cleanUpInterval.toMillis(), TimeUnit.MILLISECONDS) .removalListener(removalListener) .build(); } private void doInitialRequestAndVerifyResult( JobVertexThreadInfoTracker<JobVertexThreadInfoStats> tracker) throws InterruptedException, ExecutionException { assertFalse(tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX).isPresent()); tracker.getResultAvailableFuture().get(); assertExpectedEqualsReceived( threadInfoStatsDefaultSample, tracker.getVertexStats(JOB_ID, EXECUTION_JOB_VERTEX)); } private static void assertExpectedEqualsReceived( JobVertexThreadInfoStats expected, Optional<JobVertexThreadInfoStats> receivedOptional) { assertTrue(receivedOptional.isPresent()); JobVertexThreadInfoStats received = receivedOptional.get(); assertEquals(expected.getRequestId(), received.getRequestId()); assertEquals(expected.getEndTime(), received.getEndTime()); assertEquals(TASK_VERTICES.length, received.getNumberOfSubtasks()); for (List<ThreadInfoSample> samples : received.getSamplesBySubtask().values()) { assertThat(samples.isEmpty(), is(false)); } } private JobVertexThreadInfoTracker<JobVertexThreadInfoStats> createThreadInfoTracker() { return createThreadInfoTracker(STATS_REFRESH_INTERVAL, threadInfoStatsDefaultSample); } private JobVertexThreadInfoTracker<JobVertexThreadInfoStats> createThreadInfoTracker( Duration statsRefreshInterval, JobVertexThreadInfoStats... stats) { return createThreadInfoTracker(CLEAN_UP_INTERVAL, statsRefreshInterval, null, stats); } private JobVertexThreadInfoTracker<JobVertexThreadInfoStats> createThreadInfoTracker( Duration cleanUpInterval, Duration statsRefreshInterval, Cache<JobVertexThreadInfoTracker.Key, JobVertexThreadInfoStats> vertexStatsCache, JobVertexThreadInfoStats... stats) { final ThreadInfoRequestCoordinator coordinator = new TestingThreadInfoRequestCoordinator(Runnable::run, REQUEST_TIMEOUT, stats); return JobVertexThreadInfoTrackerBuilder.newBuilder( JobVertexThreadInfoTrackerTest::createMockResourceManagerGateway, Function.identity(), executor, TestingUtils.TIMEOUT) .setCoordinator(coordinator) .setCleanUpInterval(cleanUpInterval) .setNumSamples(NUMBER_OF_SAMPLES) .setStatsRefreshInterval(statsRefreshInterval) .setDelayBetweenSamples(DELAY_BETWEEN_SAMPLES) .setMaxThreadInfoDepth(MAX_STACK_TRACE_DEPTH) .setVertexStatsCache(vertexStatsCache) .build(); } private static JobVertexThreadInfoStats createThreadInfoStats( int requestId, Duration timeGap, List<ThreadInfoSample> threadInfoSamples) { return createThreadInfoStats(Instant.now(), requestId, timeGap, threadInfoSamples); } private static JobVertexThreadInfoStats createThreadInfoStats( Instant startTime, int requestId, Duration timeGap, List<ThreadInfoSample> threadInfoSamples) { Instant endTime = startTime.plus(timeGap); final Map<ExecutionAttemptID, List<ThreadInfoSample>> threadInfoRatiosByTask = new HashMap<>(); for (ExecutionVertex vertex : TASK_VERTICES) { threadInfoRatiosByTask.put( vertex.getCurrentExecutionAttempt().getAttemptId(), threadInfoSamples); } return new JobVertexThreadInfoStats( requestId, startTime.toEpochMilli(), endTime.toEpochMilli(), threadInfoRatiosByTask); } private static ExecutionJobVertex createExecutionJobVertex() { try { JobVertex jobVertex = new JobVertex("testVertex"); jobVertex.setInvokableClass(AbstractInvokable.class); return ExecutionGraphTestUtils.getExecutionJobVertex(jobVertex); } catch (Exception e) { throw new RuntimeException("Failed to create ExecutionJobVertex."); } } private static CompletableFuture<ResourceManagerGateway> createMockResourceManagerGateway() { Function<ResourceID, CompletableFuture<TaskExecutorThreadInfoGateway>> function = (resourceID) -> CompletableFuture.completedFuture(null); TestingResourceManagerGateway testingResourceManagerGateway = new TestingResourceManagerGateway(); testingResourceManagerGateway.setRequestTaskExecutorGatewayFunction(function); return CompletableFuture.completedFuture(testingResourceManagerGateway); } /** * A {@link ThreadInfoRequestCoordinator} which returns the pre-generated thread info stats * directly. */ private static class TestingThreadInfoRequestCoordinator extends ThreadInfoRequestCoordinator { private final JobVertexThreadInfoStats[] jobVertexThreadInfoStats; private int counter = 0; TestingThreadInfoRequestCoordinator( Executor executor, Duration requestTimeout, JobVertexThreadInfoStats... jobVertexThreadInfoStats) { super(executor, requestTimeout); this.jobVertexThreadInfoStats = jobVertexThreadInfoStats; } @Override public CompletableFuture<JobVertexThreadInfoStats> triggerThreadInfoRequest( Map<ExecutionAttemptID, CompletableFuture<TaskExecutorThreadInfoGateway>> ignored1, int ignored2, Duration ignored3, int ignored4) { return CompletableFuture.completedFuture( jobVertexThreadInfoStats[(counter++) % jobVertexThreadInfoStats.length]); } } private static class LatchRemovalListener<K, V> implements RemovalListener<K, V> { private final CountDownLatch latch; private LatchRemovalListener(CountDownLatch latch) { this.latch = latch; } @Override public void onRemoval(@Nonnull RemovalNotification<K, V> removalNotification) { latch.countDown(); } } }
If we have many windows, we'll have many calls to ProcessElement and block sequentially, only overlapping closes with single window processing instead of multiple. Could we instead block if the # of closing but not yet closed writers exceeds some amount (which could be controlled by an option)?
public void processElement(ProcessContext c, BoundedWindow window) throws Exception { getDynamicDestinations().setSideInputAccessorFromProcessContext(c); Map<DestinationT, Writer<DestinationT, OutputT>> writers = Maps.newHashMap(); for (UserT input : c.element().getValue()) { DestinationT destination = getDynamicDestinations().getDestination(input); Writer<DestinationT, OutputT> writer = writers.get(destination); if (writer == null) { String uuid = UUID.randomUUID().toString(); LOG.info( "Opening writer {} for window {} pane {} destination {}", uuid, window, c.pane(), destination); writer = writeOperation.createWriter(); writer.setDestination(destination); writer.open(uuid); writers.put(destination, writer); } writeOrClose(writer, getDynamicDestinations().formatRecord(input)); } try { MoreFutures.get(MoreFutures.allAsList(closeFutures)); } finally { closeFutures.clear(); } for (Map.Entry<DestinationT, Writer<DestinationT, OutputT>> entry : writers.entrySet()) { int shard = c.element().getKey().getShardNumber(); checkArgument( shard != UNKNOWN_SHARDNUM, "Shard should have been set, but is unset for element %s", c.element()); Writer<DestinationT, OutputT> writer = entry.getValue(); deferredOutput.add( KV.of( c.timestamp(), new FileResult<>(writer.getOutputFile(), shard, window, c.pane(), entry.getKey()))); closeWriterInBackground(writer); } }
public void processElement(ProcessContext c, BoundedWindow window) throws Exception { getDynamicDestinations().setSideInputAccessorFromProcessContext(c); PaneInfo paneInfo = c.pane(); DestinationT destination = getDynamicDestinations().getDestination(c.element()); WriterKey<DestinationT> key = new WriterKey<>(window, c.pane(), destination); Writer<DestinationT, OutputT> writer = writers.get(key); if (writer == null) { if (getMaxNumWritersPerBundle() < 0 || writers.size() <= getMaxNumWritersPerBundle()) { String uuid = UUID.randomUUID().toString(); LOG.info( "Opening writer {} for window {} pane {} destination {}", uuid, window, paneInfo, destination); writer = writeOperation.createWriter(); writer.setDestination(destination); writer.open(uuid); writers.put(key, writer); LOG.debug("Done opening writer"); } else { if (spilledShardNum == UNKNOWN_SHARDNUM) { spilledShardNum = ThreadLocalRandom.current().nextInt(SPILLED_RECORD_SHARDING_FACTOR); } else { spilledShardNum = (spilledShardNum + 1) % SPILLED_RECORD_SHARDING_FACTOR; } c.output( unwrittenRecordsTag, KV.of( ShardedKey.of(hashDestination(destination, destinationCoder), spilledShardNum), c.element())); return; } } writeOrClose(writer, getDynamicDestinations().formatRecord(c.element())); }
class WriteUnshardedTempFilesFn extends DoFn<UserT, FileResult<DestinationT>> { private final @Nullable TupleTag<KV<ShardedKey<Integer>, UserT>> unwrittenRecordsTag; private final Coder<DestinationT> destinationCoder; private @Nullable Map<WriterKey<DestinationT>, Writer<DestinationT, OutputT>> writers; private int spilledShardNum = UNKNOWN_SHARDNUM; WriteUnshardedTempFilesFn( @Nullable TupleTag<KV<ShardedKey<Integer>, UserT>> unwrittenRecordsTag, Coder<DestinationT> destinationCoder) { this.unwrittenRecordsTag = unwrittenRecordsTag; this.destinationCoder = destinationCoder; } @StartBundle public void startBundle(StartBundleContext c) { writers = Maps.newHashMap(); } @ProcessElement @FinishBundle public void finishBundle(FinishBundleContext c) throws Exception { for (Map.Entry<WriterKey<DestinationT>, Writer<DestinationT, OutputT>> entry : writers.entrySet()) { WriterKey<DestinationT> key = entry.getKey(); Writer<DestinationT, OutputT> writer = entry.getValue(); try { writer.close(); } catch (Exception e) { writer.cleanup(); throw e; } BoundedWindow window = key.window; c.output( new FileResult<>( writer.getOutputFile(), UNKNOWN_SHARDNUM, window, key.paneInfo, key.destination), window.maxTimestamp(), window); } } }
class WriteUnshardedTempFilesFn extends DoFn<UserT, FileResult<DestinationT>> { private final @Nullable TupleTag<KV<ShardedKey<Integer>, UserT>> unwrittenRecordsTag; private final Coder<DestinationT> destinationCoder; private @Nullable Map<WriterKey<DestinationT>, Writer<DestinationT, OutputT>> writers; private int spilledShardNum = UNKNOWN_SHARDNUM; WriteUnshardedTempFilesFn( @Nullable TupleTag<KV<ShardedKey<Integer>, UserT>> unwrittenRecordsTag, Coder<DestinationT> destinationCoder) { this.unwrittenRecordsTag = unwrittenRecordsTag; this.destinationCoder = destinationCoder; } @StartBundle public void startBundle(StartBundleContext c) { writers = Maps.newHashMap(); } @ProcessElement @FinishBundle public void finishBundle(FinishBundleContext c) throws Exception { for (Map.Entry<WriterKey<DestinationT>, Writer<DestinationT, OutputT>> entry : writers.entrySet()) { WriterKey<DestinationT> key = entry.getKey(); Writer<DestinationT, OutputT> writer = entry.getValue(); try { writer.close(); } catch (Exception e) { writer.cleanup(); throw e; } BoundedWindow window = key.window; c.output( new FileResult<>( writer.getOutputFile(), UNKNOWN_SHARDNUM, window, key.paneInfo, key.destination), window.maxTimestamp(), window); } } }
`targetColumns` may not include some of the auto-increment key columns, but the union of `targetColumns` and all auto-increment key columns are all the table columns, is that OK?
public static void analyze(InsertStmt insertStmt, ConnectContext session) { QueryRelation query = insertStmt.getQueryStatement().getQueryRelation(); new QueryAnalyzer(session).analyze(insertStmt.getQueryStatement()); List<Table> tables = new ArrayList<>(); AnalyzerUtils.collectSpecifyExternalTables(insertStmt.getQueryStatement(), tables, Table::isHiveTable); tables.stream().map(table -> (HiveTable) table) .forEach(table -> table.useMetadataCache(false)); /* * Target table */ Table table; if (insertStmt.getTargetTable() != null) { table = insertStmt.getTargetTable(); } else { table = getTargetTable(insertStmt, session); } if (table instanceof OlapTable) { OlapTable olapTable = (OlapTable) table; List<Long> targetPartitionIds = Lists.newArrayList(); PartitionNames targetPartitionNames = insertStmt.getTargetPartitionNames(); if (insertStmt.isSpecifyPartitionNames()) { if (targetPartitionNames.getPartitionNames().isEmpty()) { throw new SemanticException("No partition specified in partition lists", targetPartitionNames.getPos()); } List<String> deduplicatePartitionNames = targetPartitionNames.getPartitionNames().stream().distinct().collect(Collectors.toList()); if (deduplicatePartitionNames.size() != targetPartitionNames.getPartitionNames().size()) { insertStmt.setTargetPartitionNames(new PartitionNames(targetPartitionNames.isTemp(), deduplicatePartitionNames, targetPartitionNames.getPartitionColNames(), targetPartitionNames.getPartitionColValues(), targetPartitionNames.getPos())); } for (String partitionName : deduplicatePartitionNames) { if (Strings.isNullOrEmpty(partitionName)) { throw new SemanticException("there are empty partition name", targetPartitionNames.getPos()); } Partition partition = olapTable.getPartition(partitionName, targetPartitionNames.isTemp()); if (partition == null) { throw new SemanticException("Unknown partition '%s' in table '%s'", partitionName, olapTable.getName(), targetPartitionNames.getPos()); } targetPartitionIds.add(partition.getId()); } } else if (insertStmt.isStaticKeyPartitionInsert()) { checkStaticKeyPartitionInsert(insertStmt, table, targetPartitionNames); } else { for (Partition partition : olapTable.getPartitions()) { targetPartitionIds.add(partition.getId()); } if (targetPartitionIds.isEmpty()) { throw new SemanticException("data cannot be inserted into table with empty partition." + "Use `SHOW PARTITIONS FROM %s` to see the currently partitions of this table. ", olapTable.getName()); } } insertStmt.setTargetPartitionIds(targetPartitionIds); } if (table.isIcebergTable() || table.isHiveTable()) { if (table.isHiveTable() && table.isUnPartitioned() && HiveWriteUtils.isS3Url(table.getTableLocation()) && insertStmt.isOverwrite()) { throw new SemanticException("Unsupported insert overwrite hive unpartitioned table with s3 location"); } if (table.isHiveTable() && ((HiveTable) table).getHiveTableType() != HiveTable.HiveTableType.MANAGED_TABLE && !session.getSessionVariable().enableWriteHiveExternalTable()) { throw new SemanticException("Only support to write hive managed table, tableType: " + ((HiveTable) table).getHiveTableType()); } PartitionNames targetPartitionNames = insertStmt.getTargetPartitionNames(); List<String> tablePartitionColumnNames = table.getPartitionColumnNames(); if (insertStmt.getTargetColumnNames() != null) { for (String partitionColName : tablePartitionColumnNames) { if (!insertStmt.getTargetColumnNames().contains(partitionColName)) { throw new SemanticException("Must include partition column %s", partitionColName); } } } else if (insertStmt.isStaticKeyPartitionInsert()) { checkStaticKeyPartitionInsert(insertStmt, table, targetPartitionNames); } List<Column> partitionColumns = tablePartitionColumnNames.stream() .map(table::getColumn) .collect(Collectors.toList()); for (Column column : partitionColumns) { if (isUnSupportedPartitionColumnType(column.getType())) { throw new SemanticException("Unsupported partition column type [%s] for %s table sink", column.getType().canonicalName(), table.getType()); } } } List<Column> targetColumns; Set<String> mentionedColumns = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER); if (insertStmt.getTargetColumnNames() == null) { if (table instanceof OlapTable) { targetColumns = new ArrayList<>(((OlapTable) table).getBaseSchemaWithoutGeneratedColumn()); mentionedColumns = ((OlapTable) table).getBaseSchemaWithoutGeneratedColumn().stream() .map(Column::getName).collect(Collectors.toSet()); } else { targetColumns = new ArrayList<>(table.getBaseSchema()); mentionedColumns = table.getBaseSchema().stream().map(Column::getName).collect(Collectors.toSet()); } } else { targetColumns = new ArrayList<>(); List<Column> autoIncrementKeyColumns = table.getBaseSchema().stream().filter(Column::isKey) .filter(Column::isAutoIncrement).collect(Collectors.toList()); int numSpecifiedKeyColumns = autoIncrementKeyColumns.size(); for (String colName : insertStmt.getTargetColumnNames()) { Column column = table.getColumn(colName); if (column == null) { throw new SemanticException("Unknown column '%s' in '%s'", colName, table.getName()); } if (column.isGeneratedColumn()) { throw new SemanticException("generated column '%s' can not be specified", colName); } if (!mentionedColumns.add(colName)) { throw new SemanticException("Column '%s' specified twice", colName); } if (column.isKey() && !column.isAutoIncrement()) { numSpecifiedKeyColumns++; } targetColumns.add(column); } if (table.isOlapTable()) { OlapTable olapTable = (OlapTable) table; if (olapTable.getKeysType().equals(KeysType.PRIMARY_KEYS)) { if (numSpecifiedKeyColumns != olapTable.getKeysNum()) { throw new SemanticException("should specify all key columns when insert to primary key table"); } if (targetColumns.size() < olapTable.getBaseSchemaWithoutGeneratedColumn().size()) { insertStmt.setUsePartialUpdate(); } } } } if (!insertStmt.usePartialUpdate()) { for (Column column : table.getBaseSchema()) { Column.DefaultValueType defaultValueType = column.getDefaultValueType(); if (defaultValueType == Column.DefaultValueType.NULL && !column.isAllowNull() && !column.isAutoIncrement() && !column.isGeneratedColumn() && !mentionedColumns.contains(column.getName())) { StringBuilder msg = new StringBuilder(); for (String s : mentionedColumns) { msg.append(" ").append(s).append(" "); } throw new SemanticException("'%s' must be explicitly mentioned in column permutation: %s", column.getName(), msg.toString()); } } } int mentionedColumnSize = mentionedColumns.size(); if ((table.isIcebergTable() || table.isHiveTable()) && insertStmt.isStaticKeyPartitionInsert()) { mentionedColumnSize -= table.getPartitionColumnNames().size(); } if (query.getRelationFields().size() != mentionedColumnSize) { throw new SemanticException("Column count doesn't match value count"); } if (query instanceof ValuesRelation) { ValuesRelation valuesRelation = (ValuesRelation) query; for (List<Expr> row : valuesRelation.getRows()) { for (int columnIdx = 0; columnIdx < row.size(); ++columnIdx) { Column column = targetColumns.get(columnIdx); Column.DefaultValueType defaultValueType = column.getDefaultValueType(); if (row.get(columnIdx) instanceof DefaultValueExpr && defaultValueType == Column.DefaultValueType.NULL && !column.isAutoIncrement()) { throw new SemanticException("Column has no default value, column=%s", column.getName()); } AnalyzerUtils.verifyNoAggregateFunctions(row.get(columnIdx), "Values"); AnalyzerUtils.verifyNoWindowFunctions(row.get(columnIdx), "Values"); } } } insertStmt.setTargetTable(table); if (session.getDumpInfo() != null) { session.getDumpInfo().addTable(insertStmt.getTableName().getDb(), table); } }
if (numSpecifiedKeyColumns != olapTable.getKeysNum()) {
public static void analyze(InsertStmt insertStmt, ConnectContext session) { QueryRelation query = insertStmt.getQueryStatement().getQueryRelation(); new QueryAnalyzer(session).analyze(insertStmt.getQueryStatement()); List<Table> tables = new ArrayList<>(); AnalyzerUtils.collectSpecifyExternalTables(insertStmt.getQueryStatement(), tables, Table::isHiveTable); tables.stream().map(table -> (HiveTable) table) .forEach(table -> table.useMetadataCache(false)); /* * Target table */ Table table; if (insertStmt.getTargetTable() != null) { table = insertStmt.getTargetTable(); } else { table = getTargetTable(insertStmt, session); } if (table instanceof OlapTable) { OlapTable olapTable = (OlapTable) table; List<Long> targetPartitionIds = Lists.newArrayList(); PartitionNames targetPartitionNames = insertStmt.getTargetPartitionNames(); if (insertStmt.isSpecifyPartitionNames()) { if (targetPartitionNames.getPartitionNames().isEmpty()) { throw new SemanticException("No partition specified in partition lists", targetPartitionNames.getPos()); } List<String> deduplicatePartitionNames = targetPartitionNames.getPartitionNames().stream().distinct().collect(Collectors.toList()); if (deduplicatePartitionNames.size() != targetPartitionNames.getPartitionNames().size()) { insertStmt.setTargetPartitionNames(new PartitionNames(targetPartitionNames.isTemp(), deduplicatePartitionNames, targetPartitionNames.getPartitionColNames(), targetPartitionNames.getPartitionColValues(), targetPartitionNames.getPos())); } for (String partitionName : deduplicatePartitionNames) { if (Strings.isNullOrEmpty(partitionName)) { throw new SemanticException("there are empty partition name", targetPartitionNames.getPos()); } Partition partition = olapTable.getPartition(partitionName, targetPartitionNames.isTemp()); if (partition == null) { throw new SemanticException("Unknown partition '%s' in table '%s'", partitionName, olapTable.getName(), targetPartitionNames.getPos()); } targetPartitionIds.add(partition.getId()); } } else if (insertStmt.isStaticKeyPartitionInsert()) { checkStaticKeyPartitionInsert(insertStmt, table, targetPartitionNames); } else { for (Partition partition : olapTable.getPartitions()) { targetPartitionIds.add(partition.getId()); } if (targetPartitionIds.isEmpty()) { throw new SemanticException("data cannot be inserted into table with empty partition." + "Use `SHOW PARTITIONS FROM %s` to see the currently partitions of this table. ", olapTable.getName()); } } insertStmt.setTargetPartitionIds(targetPartitionIds); } if (table.isIcebergTable() || table.isHiveTable()) { if (table.isHiveTable() && table.isUnPartitioned() && HiveWriteUtils.isS3Url(table.getTableLocation()) && insertStmt.isOverwrite()) { throw new SemanticException("Unsupported insert overwrite hive unpartitioned table with s3 location"); } if (table.isHiveTable() && ((HiveTable) table).getHiveTableType() != HiveTable.HiveTableType.MANAGED_TABLE && !session.getSessionVariable().enableWriteHiveExternalTable()) { throw new SemanticException("Only support to write hive managed table, tableType: " + ((HiveTable) table).getHiveTableType()); } PartitionNames targetPartitionNames = insertStmt.getTargetPartitionNames(); List<String> tablePartitionColumnNames = table.getPartitionColumnNames(); if (insertStmt.getTargetColumnNames() != null) { for (String partitionColName : tablePartitionColumnNames) { if (!insertStmt.getTargetColumnNames().contains(partitionColName)) { throw new SemanticException("Must include partition column %s", partitionColName); } } } else if (insertStmt.isStaticKeyPartitionInsert()) { checkStaticKeyPartitionInsert(insertStmt, table, targetPartitionNames); } List<Column> partitionColumns = tablePartitionColumnNames.stream() .map(table::getColumn) .collect(Collectors.toList()); for (Column column : partitionColumns) { if (isUnSupportedPartitionColumnType(column.getType())) { throw new SemanticException("Unsupported partition column type [%s] for %s table sink", column.getType().canonicalName(), table.getType()); } } } List<Column> targetColumns; Set<String> mentionedColumns = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER); if (insertStmt.getTargetColumnNames() == null) { if (table instanceof OlapTable) { targetColumns = new ArrayList<>(((OlapTable) table).getBaseSchemaWithoutGeneratedColumn()); mentionedColumns = ((OlapTable) table).getBaseSchemaWithoutGeneratedColumn().stream() .map(Column::getName).collect(Collectors.toSet()); } else { targetColumns = new ArrayList<>(table.getBaseSchema()); mentionedColumns = table.getBaseSchema().stream().map(Column::getName).collect(Collectors.toSet()); } } else { targetColumns = new ArrayList<>(); Set<String> requiredKeyColumns = table.getBaseSchema().stream().filter(Column::isKey) .filter(c -> !c.isAutoIncrement()).map(c -> c.getName().toLowerCase()).collect(Collectors.toSet()); for (String colName : insertStmt.getTargetColumnNames()) { Column column = table.getColumn(colName); if (column == null) { throw new SemanticException("Unknown column '%s' in '%s'", colName, table.getName()); } if (column.isGeneratedColumn()) { throw new SemanticException("generated column '%s' can not be specified", colName); } if (!mentionedColumns.add(colName)) { throw new SemanticException("Column '%s' specified twice", colName); } requiredKeyColumns.remove(colName.toLowerCase()); targetColumns.add(column); } if (table.isOlapTable()) { OlapTable olapTable = (OlapTable) table; if (olapTable.getKeysType().equals(KeysType.PRIMARY_KEYS)) { if (!requiredKeyColumns.isEmpty()) { String missingKeyColumns = String.join(",", requiredKeyColumns); ErrorReport.reportSemanticException(ErrorCode.ERR_MISSING_KEY_COLUMNS, missingKeyColumns); } if (targetColumns.size() < olapTable.getBaseSchemaWithoutGeneratedColumn().size()) { insertStmt.setUsePartialUpdate(); } } } } if (!insertStmt.usePartialUpdate()) { for (Column column : table.getBaseSchema()) { Column.DefaultValueType defaultValueType = column.getDefaultValueType(); if (defaultValueType == Column.DefaultValueType.NULL && !column.isAllowNull() && !column.isAutoIncrement() && !column.isGeneratedColumn() && !mentionedColumns.contains(column.getName())) { StringBuilder msg = new StringBuilder(); for (String s : mentionedColumns) { msg.append(" ").append(s).append(" "); } throw new SemanticException("'%s' must be explicitly mentioned in column permutation: %s", column.getName(), msg.toString()); } } } int mentionedColumnSize = mentionedColumns.size(); if ((table.isIcebergTable() || table.isHiveTable()) && insertStmt.isStaticKeyPartitionInsert()) { mentionedColumnSize -= table.getPartitionColumnNames().size(); } if (query.getRelationFields().size() != mentionedColumnSize) { throw new SemanticException("Column count doesn't match value count"); } if (query instanceof ValuesRelation) { ValuesRelation valuesRelation = (ValuesRelation) query; for (List<Expr> row : valuesRelation.getRows()) { for (int columnIdx = 0; columnIdx < row.size(); ++columnIdx) { Column column = targetColumns.get(columnIdx); Column.DefaultValueType defaultValueType = column.getDefaultValueType(); if (row.get(columnIdx) instanceof DefaultValueExpr && defaultValueType == Column.DefaultValueType.NULL && !column.isAutoIncrement()) { throw new SemanticException("Column has no default value, column=%s", column.getName()); } AnalyzerUtils.verifyNoAggregateFunctions(row.get(columnIdx), "Values"); AnalyzerUtils.verifyNoWindowFunctions(row.get(columnIdx), "Values"); } } } insertStmt.setTargetTable(table); if (session.getDumpInfo() != null) { session.getDumpInfo().addTable(insertStmt.getTableName().getDb(), table); } }
class InsertAnalyzer { private static void checkStaticKeyPartitionInsert(InsertStmt insertStmt, Table table, PartitionNames targetPartitionNames) { List<String> partitionColNames = targetPartitionNames.getPartitionColNames(); List<Expr> partitionColValues = targetPartitionNames.getPartitionColValues(); List<String> tablePartitionColumnNames = table.getPartitionColumnNames(); Preconditions.checkState(partitionColNames.size() == partitionColValues.size(), "Partition column names size must be equal to the partition column values size. %d vs %d", partitionColNames.size(), partitionColValues.size()); if (tablePartitionColumnNames.size() > partitionColNames.size()) { throw new SemanticException("Must include all %d partition columns in the partition clause", tablePartitionColumnNames.size()); } if (tablePartitionColumnNames.size() < partitionColNames.size()) { throw new SemanticException("Only %d partition columns can be included in the partition clause", tablePartitionColumnNames.size()); } Map<String, Long> frequencies = partitionColNames.stream() .collect(Collectors.groupingBy(Function.identity(), Collectors.counting())); Optional<Map.Entry<String, Long>> duplicateKey = frequencies.entrySet().stream() .filter(entry -> entry.getValue() > 1).findFirst(); if (duplicateKey.isPresent()) { throw new SemanticException("Found duplicate partition column name %s", duplicateKey.get().getKey()); } for (int i = 0; i < partitionColNames.size(); i++) { String actualName = partitionColNames.get(i); if (!AnalyzerUtils.containsIgnoreCase(tablePartitionColumnNames, actualName)) { throw new SemanticException("Can't find partition column %s", actualName); } Expr partitionValue = partitionColValues.get(i); if (!partitionValue.isLiteral()) { throw new SemanticException("partition value should be literal expression"); } if (partitionValue instanceof NullLiteral) { throw new SemanticException("partition value can't be null"); } LiteralExpr literalExpr = (LiteralExpr) partitionValue; Column column = table.getColumn(actualName); try { Expr expr = LiteralExpr.create(literalExpr.getStringValue(), column.getType()); insertStmt.getTargetPartitionNames().getPartitionColValues().set(i, expr); } catch (AnalysisException e) { throw new SemanticException(e.getMessage()); } } } private static Table getTargetTable(InsertStmt insertStmt, ConnectContext session) { if (insertStmt.useTableFunctionAsTargetTable()) { return insertStmt.makeTableFunctionTable(session.getSessionVariable()); } else if (insertStmt.useBlackHoleTableAsTargetTable()) { return insertStmt.makeBlackHoleTable(); } MetaUtils.normalizationTableName(session, insertStmt.getTableName()); String catalogName = insertStmt.getTableName().getCatalog(); String dbName = insertStmt.getTableName().getDb(); String tableName = insertStmt.getTableName().getTbl(); try { MetaUtils.checkCatalogExistAndReport(catalogName); } catch (AnalysisException e) { ErrorReport.reportSemanticException(ErrorCode.ERR_BAD_CATALOG_ERROR, catalogName); } Table table = MetaUtils.getTable(catalogName, dbName, tableName); if (table instanceof MaterializedView && !insertStmt.isSystem()) { throw new SemanticException( "The data of '%s' cannot be inserted because '%s' is a materialized view," + "and the data of materialized view must be consistent with the base table.", insertStmt.getTableName().getTbl(), insertStmt.getTableName().getTbl()); } if (insertStmt.isOverwrite()) { if (!(table instanceof OlapTable) && !table.isIcebergTable() && !table.isHiveTable()) { throw unsupportedException("Only support insert overwrite olap/iceberg/hive table"); } if (table instanceof OlapTable && ((OlapTable) table).getState() != NORMAL) { String msg = String.format("table state is %s, please wait to insert overwrite until table state is normal", ((OlapTable) table).getState()); throw unsupportedException(msg); } } if (!table.supportInsert()) { if (table.isIcebergTable() || table.isHiveTable()) { throw unsupportedException(String.format("Only support insert into %s table with parquet file format", table.getType())); } throw unsupportedException("Only support insert into olap/mysql/iceberg/hive table"); } if ((table.isHiveTable() || table.isIcebergTable()) && CatalogMgr.isInternalCatalog(catalogName)) { throw unsupportedException(String.format("Doesn't support %s table sink in the internal catalog. " + "You need to use %s catalog.", table.getType(), table.getType())); } return table; } public static boolean isUnSupportedPartitionColumnType(Type type) { return type.isFloat() || type.isDecimalOfAnyVersion() || type.isDatetime(); } }
class InsertAnalyzer { private static void checkStaticKeyPartitionInsert(InsertStmt insertStmt, Table table, PartitionNames targetPartitionNames) { List<String> partitionColNames = targetPartitionNames.getPartitionColNames(); List<Expr> partitionColValues = targetPartitionNames.getPartitionColValues(); List<String> tablePartitionColumnNames = table.getPartitionColumnNames(); Preconditions.checkState(partitionColNames.size() == partitionColValues.size(), "Partition column names size must be equal to the partition column values size. %d vs %d", partitionColNames.size(), partitionColValues.size()); if (tablePartitionColumnNames.size() > partitionColNames.size()) { throw new SemanticException("Must include all %d partition columns in the partition clause", tablePartitionColumnNames.size()); } if (tablePartitionColumnNames.size() < partitionColNames.size()) { throw new SemanticException("Only %d partition columns can be included in the partition clause", tablePartitionColumnNames.size()); } Map<String, Long> frequencies = partitionColNames.stream() .collect(Collectors.groupingBy(Function.identity(), Collectors.counting())); Optional<Map.Entry<String, Long>> duplicateKey = frequencies.entrySet().stream() .filter(entry -> entry.getValue() > 1).findFirst(); if (duplicateKey.isPresent()) { throw new SemanticException("Found duplicate partition column name %s", duplicateKey.get().getKey()); } for (int i = 0; i < partitionColNames.size(); i++) { String actualName = partitionColNames.get(i); if (!AnalyzerUtils.containsIgnoreCase(tablePartitionColumnNames, actualName)) { throw new SemanticException("Can't find partition column %s", actualName); } Expr partitionValue = partitionColValues.get(i); if (!partitionValue.isLiteral()) { throw new SemanticException("partition value should be literal expression"); } LiteralExpr literalExpr = (LiteralExpr) partitionValue; Column column = table.getColumn(actualName); try { Type type = literalExpr.isConstantNull() ? Type.NULL : column.getType(); Expr expr = LiteralExpr.create(literalExpr.getStringValue(), type); insertStmt.getTargetPartitionNames().getPartitionColValues().set(i, expr); } catch (AnalysisException e) { throw new SemanticException(e.getMessage()); } } } private static Table getTargetTable(InsertStmt insertStmt, ConnectContext session) { if (insertStmt.useTableFunctionAsTargetTable()) { return insertStmt.makeTableFunctionTable(session.getSessionVariable()); } else if (insertStmt.useBlackHoleTableAsTargetTable()) { return insertStmt.makeBlackHoleTable(); } MetaUtils.normalizationTableName(session, insertStmt.getTableName()); String catalogName = insertStmt.getTableName().getCatalog(); String dbName = insertStmt.getTableName().getDb(); String tableName = insertStmt.getTableName().getTbl(); try { MetaUtils.checkCatalogExistAndReport(catalogName); } catch (AnalysisException e) { ErrorReport.reportSemanticException(ErrorCode.ERR_BAD_CATALOG_ERROR, catalogName); } Table table = MetaUtils.getTable(catalogName, dbName, tableName); if (table instanceof MaterializedView && !insertStmt.isSystem()) { throw new SemanticException( "The data of '%s' cannot be inserted because '%s' is a materialized view," + "and the data of materialized view must be consistent with the base table.", insertStmt.getTableName().getTbl(), insertStmt.getTableName().getTbl()); } if (insertStmt.isOverwrite()) { if (!(table instanceof OlapTable) && !table.isIcebergTable() && !table.isHiveTable()) { throw unsupportedException("Only support insert overwrite olap/iceberg/hive table"); } if (table instanceof OlapTable && ((OlapTable) table).getState() != NORMAL) { String msg = String.format("table state is %s, please wait to insert overwrite until table state is normal", ((OlapTable) table).getState()); throw unsupportedException(msg); } } if (!table.supportInsert()) { if (table.isIcebergTable() || table.isHiveTable()) { throw unsupportedException(String.format("Only support insert into %s table with parquet file format", table.getType())); } throw unsupportedException("Only support insert into olap/mysql/iceberg/hive table"); } if ((table.isHiveTable() || table.isIcebergTable()) && CatalogMgr.isInternalCatalog(catalogName)) { throw unsupportedException(String.format("Doesn't support %s table sink in the internal catalog. " + "You need to use %s catalog.", table.getType(), table.getType())); } return table; } public static boolean isUnSupportedPartitionColumnType(Type type) { return type.isFloat() || type.isDecimalOfAnyVersion() || type.isDatetime(); } }
You should be able to remove a key that isn't there so you won't need to filter for keys that exist since that is expected to be slow if we need to read them all from the runner. If we knew that we had them all in memory already then it would be worthwhile to filter upfront.
public void asyncClose() throws Exception { checkState( !isClosed, "Multimap user state is no longer usable because it is closed for %s", keysStateRequest.getStateKey()); if (!isCleared && pendingRemoves.isEmpty() && pendingAdds.isEmpty()) { isClosed = true; return; } if (isCleared) { beamFnStateClient.handle( keysStateRequest.toBuilder().setClear(StateClearRequest.getDefaultInstance()), new CompletableFuture<>()); } else if (!pendingRemoves.isEmpty()) { Iterable<K> removeKeys = Iterables.filter(getPersistedKeys(), pendingRemoves::contains); for (K key : removeKeys) { beamFnStateClient.handle( createUserStateRequest(key) .toBuilder() .setClear(StateClearRequest.getDefaultInstance()), new CompletableFuture<>()); } } if (!pendingAdds.isEmpty()) { for (K key : pendingAdds.keySet()) { beamFnStateClient.handle( createUserStateRequest(key) .toBuilder() .setAppend( StateAppendRequest.newBuilder().setData(encodeValues(pendingAdds.get(key)))), new CompletableFuture<>()); } } isClosed = true; }
Iterable<K> removeKeys = Iterables.filter(getPersistedKeys(), pendingRemoves::contains);
public void asyncClose() throws Exception { checkState( !isClosed, "Multimap user state is no longer usable because it is closed for %s", keysStateRequest.getStateKey()); isClosed = true; if (!isCleared && pendingRemoves.isEmpty() && pendingAdds.isEmpty()) { return; } if (isCleared) { beamFnStateClient.handle( keysStateRequest.toBuilder().setClear(StateClearRequest.getDefaultInstance()), new CompletableFuture<>()); } else if (!pendingRemoves.isEmpty()) { for (K key : pendingRemoves) { beamFnStateClient.handle( createUserStateRequest(key) .toBuilder() .setClear(StateClearRequest.getDefaultInstance()), new CompletableFuture<>()); } } if (!pendingAdds.isEmpty()) { for (Map.Entry<K, List<V>> entry : pendingAdds.entrySet()) { beamFnStateClient.handle( createUserStateRequest(entry.getKey()) .toBuilder() .setAppend(StateAppendRequest.newBuilder().setData(encodeValues(entry.getValue()))), new CompletableFuture<>()); } } }
class MultimapUserState<K, V> { private final BeamFnStateClient beamFnStateClient; private final Coder<K> mapKeyCoder; private final Coder<V> valueCoder; private final String stateId; private final StateRequest keysStateRequest; private final StateRequest userStateRequest; private boolean isClosed; private boolean isCleared; private Set<K> pendingRemoves = Sets.newHashSet(); private Multimap<K, V> pendingAdds = ArrayListMultimap.create(); private Set<K> negativeCache = Sets.newHashSet(); private Multimap<K, V> persistedValues = ArrayListMultimap.create(); private @Nullable Iterable<K> persistedKeys = null; public MultimapUserState( BeamFnStateClient beamFnStateClient, String instructionId, String pTransformId, String stateId, ByteString encodedWindow, ByteString encodedKey, Coder<K> mapKeyCoder, Coder<V> valueCoder) { this.beamFnStateClient = beamFnStateClient; this.mapKeyCoder = mapKeyCoder; this.valueCoder = valueCoder; this.stateId = stateId; StateRequest.Builder keysStateRequestBuilder = StateRequest.newBuilder(); keysStateRequestBuilder .setInstructionId(instructionId) .getStateKeyBuilder() .getMultimapKeysUserStateBuilder() .setTransformId(pTransformId) .setUserStateId(stateId) .setKey(encodedKey) .setWindow(encodedWindow); keysStateRequest = keysStateRequestBuilder.build(); StateRequest.Builder userStateRequestBuilder = StateRequest.newBuilder(); userStateRequestBuilder .setInstructionId(instructionId) .getStateKeyBuilder() .getMultimapUserStateBuilder() .setTransformId(pTransformId) .setUserStateId(stateId) .setWindow(encodedWindow) .setKey(encodedKey); userStateRequest = userStateRequestBuilder.build(); } public void clear() { checkState( !isClosed, "Multimap user state is no longer usable because it is closed for %s", keysStateRequest.getStateKey()); isCleared = true; persistedValues = ArrayListMultimap.create(); persistedKeys = null; pendingRemoves = Sets.newHashSet(); pendingAdds = ArrayListMultimap.create(); } /* * Returns an iterable of the values associated with key in this multimap, if any. * If there are no values, this returns an empty collection, not null. */ public Iterable<V> get(@NonNull K key) { checkState( !isClosed, "Multimap user state is no longer usable because it is closed for %s", keysStateRequest.getStateKey()); Collection<V> pendingValues = Collections.unmodifiableCollection(Lists.newArrayList(pendingAdds.get(key))); if (isCleared || pendingRemoves.contains(key)) { return pendingValues; } Iterable<V> persistedValues = getPersistedValues(key); return Iterables.concat(persistedValues, pendingValues); } @SuppressWarnings({ "nullness" }) /* * Returns an iterables containing all distinct keys in this multimap. */ public Iterable<K> keys() { checkState( !isClosed, "Multimap user state is no longer usable because it is closed for %s", keysStateRequest.getStateKey()); if (isCleared) { return Collections.unmodifiableCollection(Sets.newHashSet(pendingAdds.keySet())); } Set<K> keys = Sets.newHashSet(getPersistedKeys()); keys.removeAll(pendingRemoves); keys.addAll(pendingAdds.keySet()); return Collections.unmodifiableCollection(keys); } /* * Store a key-value pair in the multimap. * Allows duplicate key-value pairs. */ public void put(K key, V value) { checkState( !isClosed, "Multimap user state is no longer usable because it is closed for %s", keysStateRequest.getStateKey()); pendingAdds.put(key, value); } /* * Removes all values for this key in the multimap. */ public void remove(@NonNull K key) { checkState( !isClosed, "Multimap user state is no longer usable because it is closed for %s", keysStateRequest.getStateKey()); pendingAdds.removeAll(key); if (!isCleared) { pendingRemoves.add(key); } } @SuppressWarnings({ "nullness" }) private ByteString encodeValues(Iterable<V> values) { try { ByteString.Output output = ByteString.newOutput(); for (V value : values) { valueCoder.encode(value, output); } return output.toByteString(); } catch (IOException e) { throw new IllegalStateException( String.format("Failed to encode values for multimap user state id %s.", stateId), e); } } private StateRequest createUserStateRequest(K key) { try { ByteString.Output output = ByteString.newOutput(); mapKeyCoder.encode(key, output); StateRequest.Builder request = userStateRequest.toBuilder(); request.getStateKeyBuilder().getMultimapUserStateBuilder().setMapKey(output.toByteString()); return request.build(); } catch (IOException e) { throw new IllegalStateException( String.format("Failed to encode key for multimap user state id %s.", stateId), e); } } private Iterable<V> getPersistedValues(@NonNull K key) { if (negativeCache.contains(key)) { return Collections.emptyList(); } if (persistedValues.get(key).isEmpty()) { Iterable<V> values = StateFetchingIterators.readAllAndDecodeStartingFrom( beamFnStateClient, createUserStateRequest(key), valueCoder); if (Iterables.isEmpty(values)) { negativeCache.add(key); } persistedValues.putAll(key, values); } return Iterables.unmodifiableIterable(persistedValues.get(key)); } private Iterable<K> getPersistedKeys() { if (persistedKeys == null) { Iterable<K> keys = StateFetchingIterators.readAllAndDecodeStartingFrom( beamFnStateClient, keysStateRequest, mapKeyCoder); persistedKeys = Iterables.unmodifiableIterable(keys); } return persistedKeys; } }
class MultimapUserState<K, V> { private final BeamFnStateClient beamFnStateClient; private final Coder<K> mapKeyCoder; private final Coder<V> valueCoder; private final String stateId; private final StateRequest keysStateRequest; private final StateRequest userStateRequest; private boolean isClosed; private boolean isCleared; private HashSet<K> pendingRemoves = Sets.newHashSet(); private HashMap<K, List<V>> pendingAdds = Maps.newHashMap(); private HashSet<K> negativeCache = Sets.newHashSet(); private Multimap<K, V> persistedValues = ArrayListMultimap.create(); private @Nullable Iterable<K> persistedKeys = null; public MultimapUserState( BeamFnStateClient beamFnStateClient, String instructionId, String pTransformId, String stateId, ByteString encodedWindow, ByteString encodedKey, Coder<K> mapKeyCoder, Coder<V> valueCoder) { this.beamFnStateClient = beamFnStateClient; this.mapKeyCoder = mapKeyCoder; this.valueCoder = valueCoder; this.stateId = stateId; StateRequest.Builder keysStateRequestBuilder = StateRequest.newBuilder(); keysStateRequestBuilder .setInstructionId(instructionId) .getStateKeyBuilder() .getMultimapKeysUserStateBuilder() .setTransformId(pTransformId) .setUserStateId(stateId) .setKey(encodedKey) .setWindow(encodedWindow); keysStateRequest = keysStateRequestBuilder.build(); StateRequest.Builder userStateRequestBuilder = StateRequest.newBuilder(); userStateRequestBuilder .setInstructionId(instructionId) .getStateKeyBuilder() .getMultimapUserStateBuilder() .setTransformId(pTransformId) .setUserStateId(stateId) .setWindow(encodedWindow) .setKey(encodedKey); userStateRequest = userStateRequestBuilder.build(); } public void clear() { checkState( !isClosed, "Multimap user state is no longer usable because it is closed for %s", keysStateRequest.getStateKey()); isCleared = true; persistedValues = ArrayListMultimap.create(); persistedKeys = null; pendingRemoves = Sets.newHashSet(); pendingAdds = Maps.newHashMap(); negativeCache = Sets.newHashSet(); } /* * Returns an iterable of the values associated with key in this multimap, if any. * If there are no values, this returns an empty collection, not null. */ public Iterable<V> get(K key) { checkState( !isClosed, "Multimap user state is no longer usable because it is closed for %s", keysStateRequest.getStateKey()); List<V> pendingAddValues = pendingAdds.getOrDefault(key, Collections.emptyList()); Collection<V> pendingValues = Collections.unmodifiableCollection(pendingAddValues.subList(0, pendingAddValues.size())); if (isCleared || pendingRemoves.contains(key)) { return pendingValues; } Iterable<V> persistedValues = getPersistedValues(key); return Iterables.concat(persistedValues, pendingValues); } @SuppressWarnings({ "nullness" }) /* * Returns an iterables containing all distinct keys in this multimap. */ public Iterable<K> keys() { checkState( !isClosed, "Multimap user state is no longer usable because it is closed for %s", keysStateRequest.getStateKey()); if (isCleared) { return Collections.unmodifiableCollection(Lists.newArrayList(pendingAdds.keySet())); } Set<K> keys = Sets.newHashSet(getPersistedKeys()); keys.removeAll(pendingRemoves); keys.addAll(pendingAdds.keySet()); return Collections.unmodifiableCollection(keys); } /* * Store a key-value pair in the multimap. * Allows duplicate key-value pairs. */ public void put(K key, V value) { checkState( !isClosed, "Multimap user state is no longer usable because it is closed for %s", keysStateRequest.getStateKey()); pendingAdds.putIfAbsent(key, new ArrayList<>()); pendingAdds.get(key).add(value); } /* * Removes all values for this key in the multimap. */ public void remove(K key) { checkState( !isClosed, "Multimap user state is no longer usable because it is closed for %s", keysStateRequest.getStateKey()); pendingAdds.remove(key); if (!isCleared) { pendingRemoves.add(key); } } @SuppressWarnings({ "nullness" }) private ByteString encodeValues(Iterable<V> values) { try { ByteString.Output output = ByteString.newOutput(); for (V value : values) { valueCoder.encode(value, output); } return output.toByteString(); } catch (IOException e) { throw new IllegalStateException( String.format("Failed to encode values for multimap user state id %s.", stateId), e); } } private StateRequest createUserStateRequest(K key) { try { ByteString.Output output = ByteString.newOutput(); mapKeyCoder.encode(key, output); StateRequest.Builder request = userStateRequest.toBuilder(); request.getStateKeyBuilder().getMultimapUserStateBuilder().setMapKey(output.toByteString()); return request.build(); } catch (IOException e) { throw new IllegalStateException( String.format("Failed to encode key for multimap user state id %s.", stateId), e); } } private Iterable<V> getPersistedValues(K key) { if (negativeCache.contains(key)) { return Collections.emptyList(); } if (persistedValues.get(key).isEmpty()) { Iterable<V> values = StateFetchingIterators.readAllAndDecodeStartingFrom( beamFnStateClient, createUserStateRequest(key), valueCoder); if (Iterables.isEmpty(values)) { negativeCache.add(key); } persistedValues.putAll(key, values); } return Iterables.unmodifiableIterable(persistedValues.get(key)); } private Iterable<K> getPersistedKeys() { checkState(!isCleared); if (persistedKeys == null) { Iterable<K> keys = StateFetchingIterators.readAllAndDecodeStartingFrom( beamFnStateClient, keysStateRequest, mapKeyCoder); persistedKeys = Iterables.unmodifiableIterable(keys); } return persistedKeys; } }
Required, protobuf Message not allow to use `null`
private void startCDCClient() { ImportDataSourceParameter importDataSourceParam = new ImportDataSourceParameter(appendExtraParam(getActualJdbcUrlTemplate(DS_4, false, 0)), getUsername(), getPassword()); StartCDCClientParameter parameter = new StartCDCClientParameter(importDataSourceParam); parameter.setAddress("localhost"); parameter.setPort(getContainerComposer().getProxyCDCPort()); parameter.setUsername(ProxyContainerConstants.USERNAME); parameter.setPassword(ProxyContainerConstants.PASSWORD); parameter.setDatabase("sharding_db"); parameter.setFull(true); String schema = ""; if (getDatabaseType().isSchemaAvailable()) { schema = "test"; } parameter.setSchemaTables(Collections.singletonList(SchemaTable.newBuilder().setTable(getSourceTableOrderName()).setSchema(schema).build())); parameter.setDatabaseType(getDatabaseType().getType()); CompletableFuture.runAsync(() -> new CDCClient(parameter).start(), executor).whenComplete((unused, throwable) -> { if (null != throwable) { log.error("cdc client sync failed, ", throwable); } throw new RuntimeException(throwable); }); }
String schema = "";
private void startCDCClient() { ImportDataSourceParameter importDataSourceParam = new ImportDataSourceParameter(appendExtraParam(getActualJdbcUrlTemplate(DS_4, false, 0)), getUsername(), getPassword()); StartCDCClientParameter parameter = new StartCDCClientParameter(importDataSourceParam); parameter.setAddress("localhost"); parameter.setPort(getContainerComposer().getProxyCDCPort()); parameter.setUsername(ProxyContainerConstants.USERNAME); parameter.setPassword(ProxyContainerConstants.PASSWORD); parameter.setDatabase("sharding_db"); parameter.setFull(true); String schema = getDatabaseType().isSchemaAvailable() ? "test" : ""; parameter.setSchemaTables(Collections.singletonList(SchemaTable.newBuilder().setTable(getSourceTableOrderName()).setSchema(schema).build())); parameter.setDatabaseType(getDatabaseType().getType()); CompletableFuture.runAsync(() -> new CDCClient(parameter).start(), executor).whenComplete((unused, throwable) -> { if (null != throwable) { log.error("cdc client sync failed, ", throwable); } }); }
class CDCE2EIT extends PipelineBaseE2EIT { private static final String REGISTER_STORAGE_UNIT_SQL = "REGISTER STORAGE UNIT ds_0 ( URL='${ds0}', USER='${user}', PASSWORD='${password}')," + "ds_1 ( URL='${ds1}', USER='${user}', PASSWORD='${password}')"; private static final String CREATE_SHARDING_RULE_SQL = "CREATE SHARDING TABLE RULE t_order(" + "STORAGE_UNITS(ds_0,ds_1)," + "SHARDING_COLUMN=user_id," + "TYPE(NAME='hash_mod',PROPERTIES('sharding-count'='4'))," + "KEY_GENERATE_STRATEGY(COLUMN=order_id,TYPE(NAME='snowflake'))" + ")"; private final ExecutorService executor = Executors.newSingleThreadExecutor(); public CDCE2EIT(final PipelineTestParameter testParam) { super(testParam); } @Parameters(name = "{0}") public static Collection<PipelineTestParameter> getTestParameters() { Collection<PipelineTestParameter> result = new LinkedList<>(); if (PipelineBaseE2EIT.ENV.getItEnvType() == PipelineEnvTypeEnum.NONE) { return result; } MySQLDatabaseType mysqlDatabaseType = new MySQLDatabaseType(); for (String each : PipelineBaseE2EIT.ENV.listStorageContainerImages(mysqlDatabaseType)) { result.add(new PipelineTestParameter(mysqlDatabaseType, each, "env/scenario/general/mysql.xml")); } OpenGaussDatabaseType openGaussDatabaseType = new OpenGaussDatabaseType(); for (String each : PipelineBaseE2EIT.ENV.listStorageContainerImages(openGaussDatabaseType)) { result.add(new PipelineTestParameter(openGaussDatabaseType, each, "env/scenario/general/postgresql.xml")); } return result; } @Override protected String getSourceTableOrderName() { return "t_order"; } @Test public void assertCDCDataImportSuccess() throws SQLException, InterruptedException { TimeZone.setDefault(TimeZone.getTimeZone("UTC")); initEnvironment(getDatabaseType(), new CDCJobType()); registerStorageUnit(); createOrderTableRule(); try (Connection connection = getProxyDataSource().getConnection()) { initSchemaAndTable(connection); } Pair<List<Object[]>, List<Object[]>> dataPair = PipelineCaseHelper.generateFullInsertData(getDatabaseType(), 20); log.info("init data begin: {}", LocalDateTime.now()); DataSourceExecuteUtil.execute(getProxyDataSource(), getExtraSQLCommand().getFullInsertOrder(getSourceTableOrderName()), dataPair.getLeft()); log.info("init data end: {}", LocalDateTime.now()); try (Connection connection = DriverManager.getConnection(getActualJdbcUrlTemplate(DS_4, false), getUsername(), getPassword())) { initSchemaAndTable(connection); } startCDCClient(); Awaitility.await().atMost(10, TimeUnit.SECONDS).pollInterval(1, TimeUnit.SECONDS).until(() -> !queryForListWithLog("SHOW STREAMING LIST").isEmpty()); if (getDatabaseType() instanceof MySQLDatabaseType) { startIncrementTask(new MySQLIncrementTask(getProxyDataSource(), getSourceTableOrderName(), new SnowflakeKeyGenerateAlgorithm(), 20)); } else { startIncrementTask(new PostgreSQLIncrementTask(getProxyDataSource(), PipelineBaseE2EIT.SCHEMA_NAME, getSourceTableOrderName(), 20)); } getIncreaseTaskThread().join(10000); List<Map<String, Object>> actualProxyList; try (Connection connection = getProxyDataSource().getConnection()) { ResultSet resultSet = connection.createStatement().executeQuery(String.format("SELECT * FROM %s ORDER BY order_id ASC", getOrderTableNameWithSchema())); actualProxyList = transformResultSetToList(resultSet); } Awaitility.await().atMost(10, TimeUnit.SECONDS).pollInterval(2, TimeUnit.SECONDS).until(() -> listOrderRecords(getOrderTableNameWithSchema()).size() == actualProxyList.size()); List<Map<String, Object>> actualImportedList = listOrderRecords(getOrderTableNameWithSchema()); assertThat(actualProxyList.size(), is(actualImportedList.size())); SchemaTableName schemaTableName = getDatabaseType().isSchemaAvailable() ? new SchemaTableName(new SchemaName(PipelineBaseE2EIT.SCHEMA_NAME), new TableName(getSourceTableOrderName())) : new SchemaTableName(new SchemaName(null), new TableName(getSourceTableOrderName())); PipelineDataSourceWrapper targetDataSource = new PipelineDataSourceWrapper(StorageContainerUtil.generateDataSource(getActualJdbcUrlTemplate(DS_4, false), getUsername(), getPassword()), getDatabaseType()); PipelineDataSourceWrapper sourceDataSource = new PipelineDataSourceWrapper(generateShardingSphereDataSourceFromProxy(), getDatabaseType()); StandardPipelineTableMetaDataLoader metaDataLoader = new StandardPipelineTableMetaDataLoader(targetDataSource); PipelineTableMetaData tableMetaData = metaDataLoader.getTableMetaData(PipelineBaseE2EIT.SCHEMA_NAME, "t_order"); PipelineColumnMetaData primaryKeyMetaData = tableMetaData.getColumnMetaData(tableMetaData.getPrimaryKeyColumns().get(0)); ConsistencyCheckJobItemProgressContext progressContext = new ConsistencyCheckJobItemProgressContext("", 0); SingleTableInventoryDataConsistencyChecker checker = new SingleTableInventoryDataConsistencyChecker("", sourceDataSource, targetDataSource, schemaTableName, schemaTableName, primaryKeyMetaData, metaDataLoader, null, progressContext); DataConsistencyCheckResult checkResult = checker.check(new DataMatchDataConsistencyCalculateAlgorithm()); System.out.println(checkResult); } private void registerStorageUnit() throws SQLException { String registerStorageUnitTemplate = REGISTER_STORAGE_UNIT_SQL.replace("${user}", getUsername()) .replace("${password}", getPassword()) .replace("${ds0}", appendExtraParam(getActualJdbcUrlTemplate(DS_0, true))) .replace("${ds1}", appendExtraParam(getActualJdbcUrlTemplate(DS_1, true))); addResource(registerStorageUnitTemplate); } private void createOrderTableRule() throws SQLException { proxyExecuteWithLog(CREATE_SHARDING_RULE_SQL, 2); } private void initSchemaAndTable(final Connection connection) throws SQLException { if (getDatabaseType().isSchemaAvailable()) { String sql = String.format("CREATE SCHEMA %s", PipelineBaseE2EIT.SCHEMA_NAME); log.info("create schema sql: {}", sql); connection.createStatement().execute(sql); } String sql = getExtraSQLCommand().getCreateTableOrder(getSourceTableOrderName()); log.info("create table sql: {}", sql); connection.createStatement().execute(sql); } private List<Map<String, Object>> listOrderRecords(final String tableNameWithSchema) throws SQLException { try (Connection connection = DriverManager.getConnection(getActualJdbcUrlTemplate(DS_4, false), getUsername(), getPassword())) { ResultSet resultSet = connection.createStatement().executeQuery(String.format("SELECT * FROM %s ORDER BY order_id ASC", tableNameWithSchema)); return transformResultSetToList(resultSet); } } private String getOrderTableNameWithSchema() { if (getDatabaseType().isSchemaAvailable()) { return String.join(".", PipelineBaseE2EIT.SCHEMA_NAME, getSourceTableOrderName()); } else { return getSourceTableOrderName(); } } }
class CDCE2EIT extends PipelineBaseE2EIT { private static final String CREATE_SHARDING_RULE_SQL = String.format("CREATE SHARDING TABLE RULE t_order(" + "STORAGE_UNITS(%s,%s)," + "SHARDING_COLUMN=user_id," + "TYPE(NAME='hash_mod',PROPERTIES('sharding-count'='4'))," + "KEY_GENERATE_STRATEGY(COLUMN=order_id,TYPE(NAME='snowflake'))" + ")", DS_0, DS_1); private final ExecutorService executor = Executors.newSingleThreadExecutor(); public CDCE2EIT(final PipelineTestParameter testParam) { super(testParam); } @Parameters(name = "{0}") public static Collection<PipelineTestParameter> getTestParameters() { Collection<PipelineTestParameter> result = new LinkedList<>(); if (PipelineBaseE2EIT.ENV.getItEnvType() == PipelineEnvTypeEnum.NONE) { return result; } MySQLDatabaseType mysqlDatabaseType = new MySQLDatabaseType(); for (String each : PipelineBaseE2EIT.ENV.listStorageContainerImages(mysqlDatabaseType)) { result.add(new PipelineTestParameter(mysqlDatabaseType, each, "env/scenario/general/mysql.xml")); } OpenGaussDatabaseType openGaussDatabaseType = new OpenGaussDatabaseType(); for (String each : PipelineBaseE2EIT.ENV.listStorageContainerImages(openGaussDatabaseType)) { result.add(new PipelineTestParameter(openGaussDatabaseType, each, "env/scenario/general/postgresql.xml")); } return result; } @Override protected String getSourceTableOrderName() { return "t_order"; } @Test public void assertCDCDataImportSuccess() throws SQLException, InterruptedException { TimeZone.setDefault(TimeZone.getTimeZone("UTC")); initEnvironment(getDatabaseType(), new CDCJobType()); for (String each : Arrays.asList(DS_0, DS_1)) { registerStorageUnit(each); } createOrderTableRule(); try (Connection connection = getProxyDataSource().getConnection()) { initSchemaAndTable(connection); } Pair<List<Object[]>, List<Object[]>> dataPair = PipelineCaseHelper.generateFullInsertData(getDatabaseType(), 20); log.info("init data begin: {}", LocalDateTime.now()); DataSourceExecuteUtil.execute(getProxyDataSource(), getExtraSQLCommand().getFullInsertOrder(getSourceTableOrderName()), dataPair.getLeft()); log.info("init data end: {}", LocalDateTime.now()); try (Connection connection = DriverManager.getConnection(getActualJdbcUrlTemplate(DS_4, false), getUsername(), getPassword())) { initSchemaAndTable(connection); } startCDCClient(); Awaitility.await().atMost(10, TimeUnit.SECONDS).pollInterval(1, TimeUnit.SECONDS).until(() -> !queryForListWithLog("SHOW STREAMING LIST").isEmpty()); if (getDatabaseType() instanceof MySQLDatabaseType) { startIncrementTask(new MySQLIncrementTask(getProxyDataSource(), getSourceTableOrderName(), new SnowflakeKeyGenerateAlgorithm(), 20)); } else { startIncrementTask(new PostgreSQLIncrementTask(getProxyDataSource(), PipelineBaseE2EIT.SCHEMA_NAME, getSourceTableOrderName(), 20)); } getIncreaseTaskThread().join(10000); List<Map<String, Object>> actualProxyList; try (Connection connection = getProxyDataSource().getConnection()) { ResultSet resultSet = connection.createStatement().executeQuery(String.format("SELECT * FROM %s ORDER BY order_id ASC", getOrderTableNameWithSchema())); actualProxyList = transformResultSetToList(resultSet); } Awaitility.await().atMost(10, TimeUnit.SECONDS).pollInterval(2, TimeUnit.SECONDS).until(() -> listOrderRecords(getOrderTableNameWithSchema()).size() == actualProxyList.size()); List<Map<String, Object>> actualImportedList = listOrderRecords(getOrderTableNameWithSchema()); assertThat(actualProxyList.size(), is(actualImportedList.size())); SchemaTableName schemaTableName = getDatabaseType().isSchemaAvailable() ? new SchemaTableName(new SchemaName(PipelineBaseE2EIT.SCHEMA_NAME), new TableName(getSourceTableOrderName())) : new SchemaTableName(new SchemaName(null), new TableName(getSourceTableOrderName())); PipelineDataSourceWrapper targetDataSource = new PipelineDataSourceWrapper(StorageContainerUtil.generateDataSource(getActualJdbcUrlTemplate(DS_4, false), getUsername(), getPassword()), getDatabaseType()); PipelineDataSourceWrapper sourceDataSource = new PipelineDataSourceWrapper(generateShardingSphereDataSourceFromProxy(), getDatabaseType()); StandardPipelineTableMetaDataLoader metaDataLoader = new StandardPipelineTableMetaDataLoader(targetDataSource); PipelineTableMetaData tableMetaData = metaDataLoader.getTableMetaData(PipelineBaseE2EIT.SCHEMA_NAME, "t_order"); PipelineColumnMetaData primaryKeyMetaData = tableMetaData.getColumnMetaData(tableMetaData.getPrimaryKeyColumns().get(0)); ConsistencyCheckJobItemProgressContext progressContext = new ConsistencyCheckJobItemProgressContext("", 0); SingleTableInventoryDataConsistencyChecker checker = new SingleTableInventoryDataConsistencyChecker("", sourceDataSource, targetDataSource, schemaTableName, schemaTableName, primaryKeyMetaData, metaDataLoader, null, progressContext); DataConsistencyCheckResult checkResult = checker.check(new DataMatchDataConsistencyCalculateAlgorithm()); assertTrue(checkResult.isMatched()); } private void createOrderTableRule() throws SQLException { proxyExecuteWithLog(CREATE_SHARDING_RULE_SQL, 2); } private void initSchemaAndTable(final Connection connection) throws SQLException { if (getDatabaseType().isSchemaAvailable()) { String sql = String.format("CREATE SCHEMA %s", PipelineBaseE2EIT.SCHEMA_NAME); log.info("create schema sql: {}", sql); connection.createStatement().execute(sql); } String sql = getExtraSQLCommand().getCreateTableOrder(getSourceTableOrderName()); log.info("create table sql: {}", sql); connection.createStatement().execute(sql); } private List<Map<String, Object>> listOrderRecords(final String tableNameWithSchema) throws SQLException { try (Connection connection = DriverManager.getConnection(getActualJdbcUrlTemplate(DS_4, false), getUsername(), getPassword())) { ResultSet resultSet = connection.createStatement().executeQuery(String.format("SELECT * FROM %s ORDER BY order_id ASC", tableNameWithSchema)); return transformResultSetToList(resultSet); } } private String getOrderTableNameWithSchema() { if (getDatabaseType().isSchemaAvailable()) { return String.join(".", PipelineBaseE2EIT.SCHEMA_NAME, getSourceTableOrderName()); } else { return getSourceTableOrderName(); } } }
Here, we use the binding pattern as it is with the `toSourceCode` API. Shall we check whether the user experience with the various binding pattern options? Specially the list and map binding pattern https://ballerina.io/spec/lang/master/#binding-pattern
public Optional<DocumentSymbol> transform(ModuleVariableDeclarationNode moduleVariableDeclarationNode) { String name = moduleVariableDeclarationNode.typedBindingPattern().bindingPattern().toSourceCode(); SymbolKind symbolKind = SymbolKind.Variable; Range range = DocumentSymbolUtil.generateNodeRange(moduleVariableDeclarationNode); Optional<MetadataNode> metadata = moduleVariableDeclarationNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.ofNullable(createDocumentSymbol(name, symbolKind, null, range, range, isDeprecated, Collections.emptyList(), this.context)); }
String name = moduleVariableDeclarationNode.typedBindingPattern().bindingPattern().toSourceCode();
public Optional<DocumentSymbol> transform(ModuleVariableDeclarationNode moduleVariableDeclarationNode) { BindingPatternNode bindingPatternNode = moduleVariableDeclarationNode.typedBindingPattern().bindingPattern(); if (bindingPatternNode.kind() != SyntaxKind.CAPTURE_BINDING_PATTERN) { return Optional.empty(); } String name = bindingPatternNode.toSourceCode(); SymbolKind symbolKind = SymbolKind.Variable; Range range = DocumentSymbolUtil.generateNodeRange(moduleVariableDeclarationNode); Optional<MetadataNode> metadata = moduleVariableDeclarationNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, Collections.emptyList())); }
class DocumentSymbolResolver extends NodeTransformer<Optional<DocumentSymbol>> { private List<DocumentSymbol> documentSymbolStore; private DocumentSymbolContext context; DocumentSymbolResolver(DocumentSymbolContext context) { this.context = context; documentSymbolStore = new ArrayList<>(); } public List<DocumentSymbol> getDocumentSymbolStore() { return this.documentSymbolStore; } @Override public Optional<DocumentSymbol> transform(Token token) { return Optional.empty(); } @Override protected Optional<DocumentSymbol> transformSyntaxNode(Node node) { return Optional.empty(); } @Override public Optional<DocumentSymbol> transform(ModulePartNode modulePartNode) { List<DocumentSymbol> memberSymbols = new ArrayList<>(); for (ModuleMemberDeclarationNode member : modulePartNode.members()) { member.apply(this).ifPresent(memberSymbols::add); } if (context.getHierarchicalDocumentSymbolSupport()) { this.documentSymbolStore.addAll(memberSymbols); } /* since module node is a collection of multiple documents. We don't create the document symbol node corresponding to the module node here. */ return Optional.empty(); } @Override public Optional<DocumentSymbol> transform(FunctionDefinitionNode functionDefinitionNode) { String name = ""; Range range = DocumentSymbolUtil.generateNodeRange(functionDefinitionNode); SymbolKind symbolKind; Optional<MetadataNode> metadata = functionDefinitionNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); switch (functionDefinitionNode.kind()) { case FUNCTION_DEFINITION: name = functionDefinitionNode.functionName().text(); symbolKind = SymbolKind.Function; break; case OBJECT_METHOD_DEFINITION: name = functionDefinitionNode.functionName().text(); if ("init".equals(name)) { symbolKind = SymbolKind.Constructor; } else { symbolKind = SymbolKind.Method; } break; case RESOURCE_ACCESSOR_DEFINITION: String accessor = functionDefinitionNode.functionName().text(); List<String> pathParams = new ArrayList<>(); String resourcePath = ""; for (Node child : functionDefinitionNode.children()) { if (child.kind() == SyntaxKind.IDENTIFIER_TOKEN && !((IdentifierToken) child).text().equals(accessor)) { resourcePath = ((IdentifierToken) child).text(); } else if (child.kind() == SyntaxKind.RESOURCE_PATH_SEGMENT_PARAM) { String[] param = child.toSourceCode() .replaceAll("\\[|\\]", "").split("\\s+"); pathParams.add(param[param.length - 1]); } else if (child.kind() == SyntaxKind.RESOURCE_PATH_REST_PARAM) { pathParams.add("*"); } } if (!accessor.isEmpty()) { name = accessor + ":" + resourcePath; if (!pathParams.isEmpty()) { String params = pathParams.stream().map(param -> "{" + param + "}") .collect(Collectors.joining("/")); name = name + (resourcePath.isEmpty() ? params : "/" + params); } else if (resourcePath.isEmpty()) { name = name + "/"; } } symbolKind = SymbolKind.Function; break; default: return Optional.empty(); } return Optional.ofNullable(createDocumentSymbol(name, symbolKind, null, range, range, isDeprecated, Collections.emptyList(), this.context)); } @Override public Optional<DocumentSymbol> transform(MethodDeclarationNode methodDeclarationNode) { String name = methodDeclarationNode.methodName().text(); SymbolKind symbolKind = SymbolKind.Method; Range range = DocumentSymbolUtil.generateNodeRange(methodDeclarationNode); Optional<MetadataNode> metadata = methodDeclarationNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.ofNullable(createDocumentSymbol(name, symbolKind, null, range, range, isDeprecated, Collections.emptyList(), this.context)); } @Override public Optional<DocumentSymbol> transform(ClassDefinitionNode classDefinitionNode) { String name = classDefinitionNode.className().text(); SymbolKind symbolKind = SymbolKind.Class; Range range = DocumentSymbolUtil.generateNodeRange(classDefinitionNode); Optional<MetadataNode> metadata = classDefinitionNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); List<DocumentSymbol> children = transformMembers(classDefinitionNode.members()); return Optional.ofNullable(createDocumentSymbol(name, symbolKind, null, range, range, isDeprecated, children, this.context)); } @Override public Optional<DocumentSymbol> transform(ServiceDeclarationNode serviceDeclarationNode) { Optional<TypeDescriptorNode> typeDesc = serviceDeclarationNode.typeDescriptor(); StringBuilder name = new StringBuilder("service"); if (typeDesc.isPresent()) { name.append(" ").append(typeDesc.get().toSourceCode()); } name.append(" ").append(serviceDeclarationNode.absoluteResourcePath().stream() .map(Node::toSourceCode).collect(Collectors.joining(""))) .append(" on ").append(serviceDeclarationNode.expressions().stream() .map(Node::toSourceCode).collect(Collectors.joining(","))); SymbolKind symbolKind = SymbolKind.Object; Range range = DocumentSymbolUtil.generateNodeRange(serviceDeclarationNode); Optional<MetadataNode> metadata = serviceDeclarationNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); List<DocumentSymbol> children = transformMembers(serviceDeclarationNode.members()); return Optional.ofNullable(createDocumentSymbol(name.toString(), symbolKind, null, range, range, isDeprecated, children, this.context)); } @Override public Optional<DocumentSymbol> transform(TypeDefinitionNode typeDefinitionNode) { String name = typeDefinitionNode.typeName().text(); Node typeDescriptor = typeDefinitionNode.typeDescriptor(); SymbolKind symbolKind; List<DocumentSymbol> children = new ArrayList<>(); switch (typeDescriptor.kind()) { case RECORD_TYPE_DESC: symbolKind = SymbolKind.Struct; break; case OBJECT_TYPE_DESC: symbolKind = SymbolKind.Interface; children.addAll(transformMembers(((ObjectTypeDescriptorNode) typeDescriptor).members())); break; default: symbolKind = SymbolKind.TypeParameter; } Range range = DocumentSymbolUtil.generateNodeRange(typeDefinitionNode); Optional<MetadataNode> metadata = typeDefinitionNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.ofNullable(createDocumentSymbol(name, symbolKind, null, range, range, isDeprecated, children, this.context)); } @Override @Override public Optional<DocumentSymbol> transform(ConstantDeclarationNode constantDeclarationNode) { String name = constantDeclarationNode.variableName().text(); SymbolKind symbolKind = SymbolKind.Constant; Range range = DocumentSymbolUtil.generateNodeRange(constantDeclarationNode); Optional<MetadataNode> metadata = constantDeclarationNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.ofNullable(createDocumentSymbol(name, symbolKind, null, range, range, isDeprecated, Collections.emptyList(), this.context)); } @Override public Optional<DocumentSymbol> transform(EnumDeclarationNode enumDeclarationNode) { String name = enumDeclarationNode.identifier().text(); SymbolKind symbolKind = SymbolKind.Enum; Range range = DocumentSymbolUtil.generateNodeRange(enumDeclarationNode); Optional<MetadataNode> metadata = enumDeclarationNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.ofNullable(createDocumentSymbol(name, symbolKind, null, range, range, isDeprecated, Collections.emptyList(), this.context)); } @Override public Optional<DocumentSymbol> transform(ModuleXMLNamespaceDeclarationNode moduleXMLNamespaceDeclarationNode) { String name = SyntaxKind.XMLNS_KEYWORD.stringValue() + " " + moduleXMLNamespaceDeclarationNode.namespaceuri().toSourceCode(); Optional<IdentifierToken> prefix = moduleXMLNamespaceDeclarationNode.namespacePrefix(); if (prefix.isPresent()) { name = name + " as " + prefix.get(); } SymbolKind symbolKind = SymbolKind.Namespace; Range range = DocumentSymbolUtil.generateNodeRange(moduleXMLNamespaceDeclarationNode); return Optional.ofNullable(createDocumentSymbol(name, symbolKind, null, range, range, false, Collections.emptyList(), this.context)); } @Override public Optional<DocumentSymbol> transform(ListenerDeclarationNode listenerDeclarationNode) { StringBuilder name = new StringBuilder(SyntaxKind.LISTENER_KEYWORD.stringValue()); Optional<TypeDescriptorNode> typeDescriptorNode = listenerDeclarationNode.typeDescriptor(); if (typeDescriptorNode.isPresent()) { name.append(" " + typeDescriptorNode.get().toSourceCode()); } name.append(" " + listenerDeclarationNode.variableName().text()); SymbolKind symbolKind = SymbolKind.Object; Range range = DocumentSymbolUtil.generateNodeRange(listenerDeclarationNode); Optional<MetadataNode> metadata = listenerDeclarationNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.ofNullable(createDocumentSymbol(name.toString(), symbolKind, null, range, range, isDeprecated, Collections.emptyList(), this.context)); } @Override public Optional<DocumentSymbol> transform(AnnotationDeclarationNode annotationDeclarationNode) { StringBuilder name = new StringBuilder(SyntaxKind.ANNOTATION_KEYWORD.stringValue()); Optional<Node> typeDesc = annotationDeclarationNode.typeDescriptor(); if (typeDesc.isPresent()) { name.append(" ").append(typeDesc.get().toSourceCode()); } name.append(" ").append(annotationDeclarationNode.annotationTag().text()); SymbolKind symbolKind = SymbolKind.Property; Range range = DocumentSymbolUtil.generateNodeRange(annotationDeclarationNode); Optional<MetadataNode> metadata = annotationDeclarationNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.ofNullable(createDocumentSymbol(name.toString(), symbolKind, null, range, range, isDeprecated, Collections.emptyList(), this.context)); } @Override public Optional<DocumentSymbol> transform(ObjectFieldNode objectFieldNode) { String name = objectFieldNode.fieldName().text(); SymbolKind symbolKind = SymbolKind.Field; Range range = DocumentSymbolUtil.generateNodeRange(objectFieldNode); Optional<MetadataNode> metadata = objectFieldNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.ofNullable(createDocumentSymbol(name, symbolKind, null, range, range, isDeprecated, Collections.emptyList(), this.context)); } /** * Provided a ChildNodes list generate the corresponding document symbols. * * @param nodes {@link NodeList<? extends Node>} Member nodes list. * @return {@link List<DocumentSymbol>} Generated list of document symbols. */ private List<DocumentSymbol> transformMembers(NodeList<? extends Node> nodes) { List<DocumentSymbol> childSymbols = new ArrayList<>(); nodes.forEach(node -> { node.apply(this).ifPresent(childSymbols::add); }); return childSymbols; } /** * Document symbol builder. * * @param name symbol name. * @param kind symbol kind. * @param detail symbol detail. * @param range Range of the symbol. * @param selectionRange selection range of the symbol. * @param isDeprecated Whether the symbol is deprecated. * @param children Child document symbols. * @param context Document symbol context. * @return */ public DocumentSymbol createDocumentSymbol(String name, SymbolKind kind, String detail, Range range, Range selectionRange, boolean isDeprecated, List<DocumentSymbol> children, DocumentSymbolContext context) { if (name == null || name.isEmpty()) { return null; } DocumentSymbol documentSymbol = new DocumentSymbol(); documentSymbol.setName(name); documentSymbol.setKind(kind); documentSymbol.setDetail(detail); documentSymbol.setRange(range); documentSymbol.setSelectionRange(selectionRange); if (isDeprecated && context.deprecatedSupport()) { documentSymbol.setTags(List.of(SymbolTag.Deprecated)); } if (context.getHierarchicalDocumentSymbolSupport()) { documentSymbol.setChildren(children); } else { this.documentSymbolStore.add(documentSymbol); } return documentSymbol; } }
class DocumentSymbolResolver extends NodeTransformer<Optional<DocumentSymbol>> { private List<DocumentSymbol> documentSymbolStore; private DocumentSymbolContext context; DocumentSymbolResolver(DocumentSymbolContext context) { this.context = context; documentSymbolStore = new ArrayList<>(); } public List<DocumentSymbol> getDocumentSymbolStore() { return this.documentSymbolStore; } @Override public Optional<DocumentSymbol> transform(Token token) { return Optional.empty(); } @Override protected Optional<DocumentSymbol> transformSyntaxNode(Node node) { return Optional.empty(); } @Override public Optional<DocumentSymbol> transform(ModulePartNode modulePartNode) { List<DocumentSymbol> memberSymbols = new ArrayList<>(); for (ModuleMemberDeclarationNode member : modulePartNode.members()) { member.apply(this).ifPresent(memberSymbols::add); } if (context.getHierarchicalDocumentSymbolSupport()) { this.documentSymbolStore.addAll(memberSymbols); } /* since module node is a collection of multiple documents. We don't create the document symbol node corresponding to the module node here. */ return Optional.empty(); } @Override public Optional<DocumentSymbol> transform(FunctionDefinitionNode functionDefinitionNode) { String name = ""; Range range = DocumentSymbolUtil.generateNodeRange(functionDefinitionNode); SymbolKind symbolKind; Optional<MetadataNode> metadata = functionDefinitionNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); switch (functionDefinitionNode.kind()) { case FUNCTION_DEFINITION: name = functionDefinitionNode.functionName().text(); symbolKind = SymbolKind.Function; break; case OBJECT_METHOD_DEFINITION: name = functionDefinitionNode.functionName().text(); if ("init".equals(name)) { symbolKind = SymbolKind.Constructor; } else { symbolKind = SymbolKind.Method; } break; case RESOURCE_ACCESSOR_DEFINITION: String accessor = functionDefinitionNode.functionName().text(); List<String> pathParams = new ArrayList<>(); String resourcePath = ""; for (Node child : functionDefinitionNode.children()) { if (child.kind() == SyntaxKind.IDENTIFIER_TOKEN && !((IdentifierToken) child).text().equals(accessor)) { resourcePath = ((IdentifierToken) child).text(); } else if (child.kind() == SyntaxKind.RESOURCE_PATH_SEGMENT_PARAM) { String[] param = child.toSourceCode() .replaceAll("\\[|\\]", "").split("\\s+"); pathParams.add(param[param.length - 1]); } else if (child.kind() == SyntaxKind.RESOURCE_PATH_REST_PARAM) { pathParams.add("*"); } } if (!accessor.isEmpty()) { name = accessor + ":" + resourcePath; if (!pathParams.isEmpty()) { String params = pathParams.stream().map(param -> "{" + param + "}") .collect(Collectors.joining("/")); name = name + (resourcePath.isEmpty() ? params : "/" + params); } else if (resourcePath.isEmpty()) { name = name + "/"; } } symbolKind = SymbolKind.Function; break; default: return Optional.empty(); } if (name == null || name.isEmpty()) { return Optional.empty(); } return Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, Collections.emptyList())); } @Override public Optional<DocumentSymbol> transform(MethodDeclarationNode methodDeclarationNode) { String name = methodDeclarationNode.methodName().text(); SymbolKind symbolKind = SymbolKind.Method; Range range = DocumentSymbolUtil.generateNodeRange(methodDeclarationNode); Optional<MetadataNode> metadata = methodDeclarationNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, Collections.emptyList())); } @Override public Optional<DocumentSymbol> transform(ClassDefinitionNode classDefinitionNode) { String name = classDefinitionNode.className().text(); SymbolKind symbolKind = SymbolKind.Class; Range range = DocumentSymbolUtil.generateNodeRange(classDefinitionNode); Optional<MetadataNode> metadata = classDefinitionNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); List<DocumentSymbol> children = transformMembers(classDefinitionNode.members()); return Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, children)); } @Override public Optional<DocumentSymbol> transform(ServiceDeclarationNode serviceDeclarationNode) { StringBuilder name = new StringBuilder("service"); name.append(" ").append(serviceDeclarationNode.absoluteResourcePath().stream() .map(Node::toSourceCode).collect(Collectors.joining(""))); SymbolKind symbolKind = SymbolKind.Object; Range range = DocumentSymbolUtil.generateNodeRange(serviceDeclarationNode); Optional<MetadataNode> metadata = serviceDeclarationNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); List<DocumentSymbol> children = transformMembers(serviceDeclarationNode.members()); return Optional.of(createDocumentSymbol(name.toString(), symbolKind, range, range, isDeprecated, children)); } @Override public Optional<DocumentSymbol> transform(TypeDefinitionNode typeDefinitionNode) { String name = typeDefinitionNode.typeName().text(); Node typeDescriptor = typeDefinitionNode.typeDescriptor(); SymbolKind symbolKind; List<DocumentSymbol> children = new ArrayList<>(); switch (typeDescriptor.kind()) { case RECORD_TYPE_DESC: symbolKind = SymbolKind.Struct; RecordTypeDescriptorNode recordTypeDescriptorNode = (RecordTypeDescriptorNode) typeDescriptor; children.addAll(transformMembers(recordTypeDescriptorNode.fields())); Optional<RecordRestDescriptorNode> restTypeDec = recordTypeDescriptorNode.recordRestDescriptor(); if (restTypeDec.isPresent()) { Optional<DocumentSymbol> restDocSymbol = restTypeDec.get().apply(this); restDocSymbol.ifPresent(children::add); } break; case OBJECT_TYPE_DESC: symbolKind = SymbolKind.Interface; children.addAll(transformMembers(((ObjectTypeDescriptorNode) typeDescriptor).members())); break; default: symbolKind = SymbolKind.TypeParameter; } Range range = DocumentSymbolUtil.generateNodeRange(typeDefinitionNode); Optional<MetadataNode> metadata = typeDefinitionNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, children)); } @Override @Override public Optional<DocumentSymbol> transform(ConstantDeclarationNode constantDeclarationNode) { String name = constantDeclarationNode.variableName().text(); SymbolKind symbolKind = SymbolKind.Constant; Range range = DocumentSymbolUtil.generateNodeRange(constantDeclarationNode); Optional<MetadataNode> metadata = constantDeclarationNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, Collections.emptyList())); } @Override public Optional<DocumentSymbol> transform(EnumDeclarationNode enumDeclarationNode) { String name = enumDeclarationNode.identifier().text(); SymbolKind symbolKind = SymbolKind.Enum; Range range = DocumentSymbolUtil.generateNodeRange(enumDeclarationNode); Optional<MetadataNode> metadata = enumDeclarationNode.metadata(); List<DocumentSymbol> children = transformMembers(enumDeclarationNode.enumMemberList()); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, children)); } @Override public Optional<DocumentSymbol> transform(ModuleXMLNamespaceDeclarationNode moduleXMLNamespaceDeclarationNode) { Optional<IdentifierToken> prefix = moduleXMLNamespaceDeclarationNode.namespacePrefix(); String name = prefix.isPresent() ? prefix.get().text() : SyntaxKind.XMLNS_KEYWORD.stringValue() + " " + moduleXMLNamespaceDeclarationNode.namespaceuri().toSourceCode(); SymbolKind symbolKind = SymbolKind.Namespace; Range range = DocumentSymbolUtil.generateNodeRange(moduleXMLNamespaceDeclarationNode); return Optional.of(createDocumentSymbol(name, symbolKind, range, range, Collections.emptyList())); } @Override public Optional<DocumentSymbol> transform(ListenerDeclarationNode listenerDeclarationNode) { String name = listenerDeclarationNode.variableName().text(); SymbolKind symbolKind = SymbolKind.Object; Range range = DocumentSymbolUtil.generateNodeRange(listenerDeclarationNode); Optional<MetadataNode> metadata = listenerDeclarationNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, Collections.emptyList())); } @Override public Optional<DocumentSymbol> transform(AnnotationDeclarationNode annotationDeclarationNode) { String name = annotationDeclarationNode.annotationTag().text(); SymbolKind symbolKind = SymbolKind.Property; Range range = DocumentSymbolUtil.generateNodeRange(annotationDeclarationNode); Optional<MetadataNode> metadata = annotationDeclarationNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, Collections.emptyList())); } @Override public Optional<DocumentSymbol> transform(ObjectFieldNode objectFieldNode) { String name = objectFieldNode.fieldName().text(); SymbolKind symbolKind = SymbolKind.Field; Range range = DocumentSymbolUtil.generateNodeRange(objectFieldNode); Optional<MetadataNode> metadata = objectFieldNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, Collections.emptyList())); } @Override public Optional<DocumentSymbol> transform(RecordFieldNode recordFieldNode) { String name = recordFieldNode.fieldName().text(); SymbolKind symbolKind = SymbolKind.Field; Range range = DocumentSymbolUtil.generateNodeRange(recordFieldNode); Optional<MetadataNode> metadata = recordFieldNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, Collections.emptyList())); } @Override public Optional<DocumentSymbol> transform(RecordFieldWithDefaultValueNode recordFieldWithDefaultValueNode) { String name = recordFieldWithDefaultValueNode.fieldName().text(); SymbolKind symbolKind = SymbolKind.Field; Range range = DocumentSymbolUtil.generateNodeRange(recordFieldWithDefaultValueNode); Optional<MetadataNode> metadata = recordFieldWithDefaultValueNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, Collections.emptyList())); } @Override public Optional<DocumentSymbol> transform(RecordRestDescriptorNode recordRestDescriptorNode) { String name = recordRestDescriptorNode.ellipsisToken().text() + recordRestDescriptorNode.typeName().toSourceCode().trim(); SymbolKind symbolKind = SymbolKind.Field; Range range = DocumentSymbolUtil.generateNodeRange(recordRestDescriptorNode); return Optional.of(createDocumentSymbol(name, symbolKind, range, range, Collections.emptyList())); } @Override public Optional<DocumentSymbol> transform(EnumMemberNode enumMemberNode) { String name = enumMemberNode.identifier().text(); SymbolKind symbolKind = SymbolKind.EnumMember; Range range = DocumentSymbolUtil.generateNodeRange(enumMemberNode); Optional<MetadataNode> metadata = enumMemberNode.metadata(); boolean isDeprecated = metadata.isPresent() && DocumentSymbolUtil.isDeprecated(metadata.get()); return Optional.of(createDocumentSymbol(name, symbolKind, range, range, isDeprecated, Collections.emptyList())); } /** * Provided a ChildNodes list generate the corresponding document symbols. * * @param nodes {@link NodeList<? extends Node>} Member nodes list. * @return {@link List<DocumentSymbol>} Generated list of document symbols. */ private List<DocumentSymbol> transformMembers(NodeList<? extends Node> nodes) { List<DocumentSymbol> childSymbols = new ArrayList<>(); nodes.forEach(node -> { node.apply(this).ifPresent(childSymbols::add); }); return childSymbols; } private DocumentSymbol createDocumentSymbol(String name, SymbolKind kind, Range range, Range selectionRange, List<DocumentSymbol> children) { return createDocumentSymbol(name, kind, null, range, selectionRange, false, children); } private DocumentSymbol createDocumentSymbol(String name, SymbolKind kind, Range range, Range selectionRange, boolean isDeprecated, List<DocumentSymbol> children) { return createDocumentSymbol(name, kind, null, range, selectionRange, isDeprecated, children); } /** * Document symbol builder. * * @param name symbol name. * @param kind symbol kind. * @param detail symbol detail. * @param range Range of the symbol. * @param selectionRange selection range of the symbol. * @param isDeprecated Whether the symbol is deprecated. * @param children Child document symbols. * @return */ private DocumentSymbol createDocumentSymbol(String name, SymbolKind kind, String detail, Range range, Range selectionRange, boolean isDeprecated, List<DocumentSymbol> children) { DocumentSymbol documentSymbol = new DocumentSymbol(); documentSymbol.setName(name); documentSymbol.setKind(kind); documentSymbol.setDetail(detail); documentSymbol.setRange(range); documentSymbol.setSelectionRange(selectionRange); if (isDeprecated && this.context.deprecatedSupport()) { documentSymbol.setTags(List.of(SymbolTag.Deprecated)); } if (this.context.getHierarchicalDocumentSymbolSupport()) { documentSymbol.setChildren(children); } else { this.documentSymbolStore.add(documentSymbol); } return documentSymbol; } }
IMO, it is a kind of an unnecessary call since we change the implicit casts into expressions within the `addConversionExprIfRequired`. Is there any other specific reason to call `rewriteExpr` here?
private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) { BVarSymbol iteratorSymbol = varDef.var.symbol; BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID, foreach.nillableResultType, this.env.scope.owner, foreach.pos, VIRTUAL); BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos, foreach.nillableResultType, iteratorSymbol, resultSymbol); BLangType userDefineType = getUserDefineTypeNode(foreach.resultType); BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol); BLangTypeTestExpr typeTestExpr = ASTBuilderUtil .createTypeTestExpr(foreach.pos, resultReferenceInWhile, userDefineType); BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode(); whileNode.pos = foreach.pos; whileNode.expr = typeTestExpr; whileNode.body = foreach.body; BLangAssignment resultAssignment = getIteratorNextAssignment(foreach.pos, iteratorSymbol, resultSymbol); VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode; BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol); BLangExpression expr = valueAccessExpr.expr; valueAccessExpr.expr = addConversionExprIfRequired(expr, symTable.mapAllType); variableDefinitionNode.getVariable() .setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType)); whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode); whileNode.body.stmts.add(1, resultAssignment); BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos); blockNode.addStatement(varDef); blockNode.addStatement(resultVariableDefinition); blockNode.addStatement(whileNode); return blockNode; } private BLangType getUserDefineTypeNode(BType type) { BLangUserDefinedType recordType = new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""), ASTBuilderUtil.createIdentifier(null, "")); recordType.setBType(type); return recordType; } @Override public void visit(BLangWhile whileNode) { if (whileNode.onFailClause != null) { BLangOnFailClause onFailClause = whileNode.onFailClause; whileNode.onFailClause = null; whileNode.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE; BLangDo doStmt = wrapStatementWithinDo(whileNode.pos, whileNode, onFailClause); result = rewrite(doStmt, env); } else { whileNode.expr = rewriteExpr(whileNode.expr); whileNode.body = rewrite(whileNode.body, env); result = whileNode; } } private BLangDo wrapStatementWithinDo(Location location, BLangStatement statement, BLangOnFailClause onFailClause) { BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode(); BLangBlockStmt doBlock = ASTBuilderUtil.createBlockStmt(location); doBlock.scope = new Scope(env.scope.owner); bLDo.body = doBlock; bLDo.pos = location; bLDo.onFailClause = onFailClause; bLDo.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK; doBlock.stmts.add(statement); return bLDo; } @Override public void visit(BLangLock lockNode) { BLangOnFailClause currentOnFailClause = this.onFailClause; BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos); if (lockNode.onFailClause != null) { blockStmt.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK; rewrite(lockNode.onFailClause, env); } BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos); blockStmt.addStatement(lockStmt); enclLocks.push(lockStmt); BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE); BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType); BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral); statementExpression.setBType(symTable.nilType); BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode(); trapExpr.setBType(nillableError); trapExpr.expr = statementExpression; BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"), this.env.scope.owner.pkgID, nillableError, this.env.scope.owner, lockNode.pos, VIRTUAL); BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult", nillableError, trapExpr, nillableErrorVarSymbol); BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable); blockStmt.addStatement(simpleVariableDef); BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos); unLockStmt.relatedLock = lockStmt; blockStmt.addStatement(unLockStmt); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol); BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos); BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.pos = lockNode.pos; panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType); ifBody.addStatement(panicNode); BLangTypeTestExpr isErrorTest = ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode()); isErrorTest.setBType(symTable.booleanType); BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null); blockStmt.addStatement(ifelse); result = rewrite(blockStmt, env); enclLocks.pop(); this.onFailClause = currentOnFailClause; } @Override public void visit(BLangLockStmt lockStmt) { result = lockStmt; } @Override public void visit(BLangUnLockStmt unLockStmt) { result = unLockStmt; } private BLangOnFailClause createTrxInternalOnFail(Location pos, BLangSimpleVarRef shouldPanicRef, BLangSimpleVarRef shouldRetryRef) { BLangOnFailClause trxOnFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode(); trxOnFailClause.pos = pos; trxOnFailClause.body = ASTBuilderUtil.createBlockStmt(pos); trxOnFailClause.body.scope = new Scope(env.scope.owner); trxOnFailClause.isInternal = true; BVarSymbol trxOnFailErrorSym = new BVarSymbol(0, names.fromString("$trxError$"), env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL); BLangSimpleVariable trxOnFailError = ASTBuilderUtil.createVariable(pos, "$trxError$", symTable.errorType, null, trxOnFailErrorSym); trxOnFailClause.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, trxOnFailError); trxOnFailClause.body.scope.define(trxOnFailErrorSym.name, trxOnFailErrorSym); transactionDesugar.createRollbackIfFailed(pos, trxOnFailClause.body, trxOnFailErrorSym, trxBlockId, shouldRetryRef); BLangGroupExpr shouldNotPanic = new BLangGroupExpr(); shouldNotPanic.setBType(symTable.booleanType); shouldNotPanic.expression = createNotBinaryExpression(pos, shouldPanicRef); BLangSimpleVarRef caughtError = ASTBuilderUtil.createVariableRef(pos, trxOnFailErrorSym); BLangBlockStmt failBlock = ASTBuilderUtil.createBlockStmt(pos); BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.pos = pos; panicNode.expr = caughtError; BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, shouldNotPanic, failBlock, panicNode); trxOnFailClause.body.stmts.add(exitIf); BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode(); failStmt.pos = pos; failStmt.expr = caughtError; failBlock.stmts.add(failStmt); trxOnFailClause.bodyContainsFail = true; return trxOnFailClause; } @Override public void visit(BLangTransaction transactionNode) { if (transactionNode.onFailClause != null) { BLangOnFailClause onFailClause = transactionNode.onFailClause; transactionNode.onFailClause = null; transactionNode.transactionBody.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE; BLangDo doStmt = wrapStatementWithinDo(transactionNode.pos, transactionNode, onFailClause); result = rewrite(doStmt, env); } else { BLangLiteral currentTrxBlockId = this.trxBlockId; String uniqueId = String.valueOf(++transactionBlockCount); this.trxBlockId = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.stringType, uniqueId); boolean currShouldReturnErrors = this.shouldReturnErrors; this.shouldReturnErrors = true; BLangOnFailClause currOnFailClause = this.onFailClause; BLangLiteral falseLiteral = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.booleanType, false); BVarSymbol shouldPanicVarSymbol = new BVarSymbol(0, names.fromString("$shouldPanic$"), env.scope.owner.pkgID, symTable.booleanType, this.env.scope.owner, transactionNode.pos, VIRTUAL); shouldPanicVarSymbol.closure = true; BLangSimpleVariable shouldPanicVariable = ASTBuilderUtil.createVariable(transactionNode.pos, "$shouldPanic$", symTable.booleanType, falseLiteral, shouldPanicVarSymbol); BLangSimpleVariableDef shouldPanicDef = ASTBuilderUtil.createVariableDef(transactionNode.pos, shouldPanicVariable); BLangSimpleVarRef shouldPanicRef = ASTBuilderUtil.createVariableRef(transactionNode.pos, shouldPanicVarSymbol); BLangOnFailClause trxInternalOnFail = createTrxInternalOnFail(transactionNode.pos, shouldPanicRef, this.shouldRetryRef); enclosingShouldPanic.put(trxInternalOnFail, shouldPanicRef); boolean userDefinedOnFailAvbl = this.onFailClause != null; analyzeOnFailClause(trxInternalOnFail, transactionNode.transactionBody); BLangBlockStmt transactionStmtBlock = transactionDesugar.rewrite(transactionNode, trxBlockId, env, uniqueId); transactionStmtBlock.stmts.add(0, shouldPanicDef); transactionStmtBlock.scope.define(shouldPanicVarSymbol.name, shouldPanicVarSymbol); transactionStmtBlock.failureBreakMode = userDefinedOnFailAvbl ? BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE : BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK; result = rewrite(transactionStmtBlock, this.env); this.shouldReturnErrors = currShouldReturnErrors; this.trxBlockId = currentTrxBlockId; swapAndResetEnclosingOnFail(currOnFailClause); } } @Override public void visit(BLangRollback rollbackNode) { BLangBlockStmt rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, trxBlockId, this.shouldRetryRef); result = rewrite(rollbackStmtExpr, env); } private BLangOnFailClause createRetryInternalOnFail(Location pos, BLangSimpleVarRef retryResultRef, BLangSimpleVarRef retryManagerRef, BLangSimpleVarRef shouldRetryRef, BLangSimpleVarRef continueLoopRef, BLangSimpleVarRef returnResult) { BLangOnFailClause internalOnFail = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode(); internalOnFail.pos = pos; internalOnFail.body = ASTBuilderUtil.createBlockStmt(pos); internalOnFail.body.scope = new Scope(env.scope.owner); BVarSymbol caughtErrorSym = new BVarSymbol(0, names.fromString("$caughtError$"), env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL); BLangSimpleVariable caughtError = ASTBuilderUtil.createVariable(pos, "$caughtError$", symTable.errorType, null, caughtErrorSym); internalOnFail.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, caughtError); env.scope.define(caughtErrorSym.name, caughtErrorSym); BLangSimpleVarRef caughtErrorRef = ASTBuilderUtil.createVariableRef(pos, caughtErrorSym); BLangAssignment errorAssignment = ASTBuilderUtil.createAssignmentStmt(pos, retryResultRef, caughtErrorRef); internalOnFail.body.stmts.add(errorAssignment); BLangAssignment continueLoopTrue = ASTBuilderUtil.createAssignmentStmt(pos, continueLoopRef, ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true)); internalOnFail.body.stmts.add(continueLoopTrue); BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(pos, retryManagerRef, caughtErrorRef); BLangAssignment shouldRetryAssignment = ASTBuilderUtil.createAssignmentStmt(pos, shouldRetryRef, shouldRetryInvocation); internalOnFail.body.stmts.add(shouldRetryAssignment); BLangGroupExpr shouldNotRetryCheck = new BLangGroupExpr(); shouldNotRetryCheck.setBType(symTable.booleanType); shouldNotRetryCheck.expression = createNotBinaryExpression(pos, shouldRetryRef); BLangGroupExpr exitCheck = new BLangGroupExpr(); exitCheck.setBType(symTable.booleanType); exitCheck.expression = shouldNotRetryCheck; BLangBlockStmt exitLogicBlock = ASTBuilderUtil.createBlockStmt(pos); BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, exitCheck, exitLogicBlock, null); if (this.onFailClause != null) { BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode(); failStmt.pos = pos; failStmt.expr = retryResultRef; exitLogicBlock.stmts.add(failStmt); internalOnFail.bodyContainsFail = true; internalOnFail.body.stmts.add(exitIf); BLangContinue loopContinueStmt = (BLangContinue) TreeBuilder.createContinueNode(); loopContinueStmt.pos = pos; internalOnFail.body.stmts.add(loopContinueStmt); } else { BLangAssignment returnErrorTrue = ASTBuilderUtil.createAssignmentStmt(pos, returnResult, ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true)); exitLogicBlock.stmts.add(returnErrorTrue); internalOnFail.body.stmts.add(exitIf); } return internalOnFail; } BLangUnaryExpr createNotBinaryExpression(Location pos, BLangExpression expression) { List<BType> paramTypes = new ArrayList<>(); paramTypes.add(symTable.booleanType); BInvokableType type = new BInvokableType(paramTypes, symTable.booleanType, null); BOperatorSymbol notOperatorSymbol = new BOperatorSymbol( names.fromString(OperatorKind.NOT.value()), symTable.rootPkgSymbol.pkgID, type, symTable.rootPkgSymbol, symTable.builtinPos, VIRTUAL); return ASTBuilderUtil.createUnaryExpr(pos, expression, symTable.booleanType, OperatorKind.NOT, notOperatorSymbol); } BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix, List<BLangSimpleVariable> lambdaFunctionVariable, TypeNode returnType, BLangFunctionBody lambdaBody) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++); lambdaFunction.function = func; func.requiredParams.addAll(lambdaFunctionVariable); func.setReturnTypeNode(returnType); func.desugaredReturnType = true; defineFunction(func, env.enclPkg); lambdaFunctionVariable = func.requiredParams; func.body = lambdaBody; func.desugared = false; lambdaFunction.pos = pos; List<BType> paramTypes = new ArrayList<>(); lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type)); lambdaFunction.setBType(new BInvokableType(paramTypes, func.symbol.type.getReturnType(), null)); return lambdaFunction; } protected BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix, List<BLangSimpleVariable> lambdaFunctionVariable, TypeNode returnType, List<BLangStatement> fnBodyStmts, SymbolEnv env, Scope bodyScope) { BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode(); body.scope = bodyScope; SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); this.forceCastReturnType = ((BLangType) returnType).getBType(); body.stmts = rewriteStmt(fnBodyStmts, bodyEnv); this.forceCastReturnType = null; return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body); } private BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix, TypeNode returnType) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++); lambdaFunction.function = func; func.setReturnTypeNode(returnType); func.desugaredReturnType = true; defineFunction(func, env.enclPkg); func.desugared = false; lambdaFunction.pos = pos; return lambdaFunction; } private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) { final BPackageSymbol packageSymbol = targetPkg.symbol; final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol); symbolEnter.defineNode(funcNode, packageEnv); packageEnv.enclPkg.functions.add(funcNode); packageEnv.enclPkg.topLevelNodes.add(funcNode); } @Override public void visit(BLangForkJoin forkJoin) { result = forkJoin; } @Override public void visit(BLangLiteral literalExpr) { if (literalExpr.getBType().tag == TypeTags.ARRAY && ((BArrayType) literalExpr.getBType()).eType.tag == TypeTags.BYTE) { result = rewriteBlobLiteral(literalExpr); return; } result = literalExpr; } private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) { String[] result = getBlobTextValue((String) literalExpr.value); byte[] values; if (BASE_64.equals(result[0])) { values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8)); } else { values = hexStringToByteArray(result[1]); } BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); arrayLiteralNode.setBType(literalExpr.getBType()); arrayLiteralNode.pos = literalExpr.pos; arrayLiteralNode.exprs = new ArrayList<>(); for (byte b : values) { arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b)); } return arrayLiteralNode; } private String[] getBlobTextValue(String blobLiteralNodeText) { String nodeText = blobLiteralNodeText.replace("\t", "").replace("\n", "").replace("\r", "") .replace(" ", ""); String[] result = new String[2]; result[0] = nodeText.substring(0, nodeText.indexOf('`')); result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`')); return result; } private static byte[] hexStringToByteArray(String str) { int len = str.length(); byte[] data = new byte[len / 2]; for (int i = 0; i < len; i += 2) { data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16)); } return data; } @Override public void visit(BLangListConstructorExpr listConstructor) { listConstructor.exprs = rewriteExprs(listConstructor.exprs); BLangExpression expr; if (listConstructor.getBType().tag == TypeTags.TUPLE) { expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType()); result = rewriteExpr(expr); } else if (listConstructor.getBType().tag == TypeTags.JSON) { expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.getBType())); result = rewriteExpr(expr); } else if (getElementType(listConstructor.getBType()).tag == TypeTags.JSON) { expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.getBType()); result = rewriteExpr(expr); } else if (listConstructor.getBType().tag == TypeTags.TYPEDESC) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = listConstructor.typedescType; typedescExpr.setBType(symTable.typeDesc); result = rewriteExpr(typedescExpr); } else { expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType()); result = rewriteExpr(expr); } } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { rewriteExprs(tableConstructorExpr.recordLiteralList); result = tableConstructorExpr; } @Override public void visit(BLangArrayLiteral arrayLiteral) { arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs); if (arrayLiteral.getBType().tag == TypeTags.JSON) { result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.getBType())); return; } else if (getElementType(arrayLiteral.getBType()).tag == TypeTags.JSON) { result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.getBType()); return; } result = arrayLiteral; } @Override public void visit(BLangTupleLiteral tupleLiteral) { if (tupleLiteral.isTypedescExpr) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = tupleLiteral.typedescType; typedescExpr.setBType(symTable.typeDesc); result = rewriteExpr(typedescExpr); return; } List<BLangExpression> exprs = tupleLiteral.exprs; BTupleType tupleType = (BTupleType) tupleLiteral.getBType(); List<BType> tupleMemberTypes = tupleType.tupleTypes; int tupleMemberTypeSize = tupleMemberTypes.size(); int tupleExprSize = exprs.size(); for (int i = 0; i < tupleExprSize; i++) { BLangExpression expr = exprs.get(i); BType expType = expr.impConversionExpr == null ? expr.getBType() : expr.impConversionExpr.getBType(); BType targetType = i < tupleMemberTypeSize ? tupleMemberTypes.get(i) : tupleType.restType; types.setImplicitCastExpr(expr, expType, targetType); } tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs); result = tupleLiteral; } @Override public void visit(BLangGroupExpr groupExpr) { if (groupExpr.isTypedescExpr) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = groupExpr.typedescType; typedescExpr.setBType(symTable.typeDesc); result = rewriteExpr(typedescExpr); } else { result = rewriteExpr(groupExpr.expression); } } @Override public void visit(BLangRecordLiteral recordLiteral) { List<RecordLiteralNode.RecordField> fields = recordLiteral.fields; fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2))); result = rewriteExpr(rewriteMappingConstructor(recordLiteral)); } @Override public void visit(BLangSimpleVarRef varRefExpr) { BLangSimpleVarRef genVarRefExpr = varRefExpr; if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) { BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName); qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol; qnameExpr.localname = varRefExpr.variableName; qnameExpr.prefix = varRefExpr.pkgAlias; qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI; qnameExpr.isUsedInXML = false; qnameExpr.pos = varRefExpr.pos; qnameExpr.setBType(symTable.stringType); result = qnameExpr; return; } if (varRefExpr.symbol == null) { result = varRefExpr; return; } if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) { BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol; if (varSymbol.originalSymbol != null) { varRefExpr.symbol = varSymbol.originalSymbol; } } BSymbol ownerSymbol = varRefExpr.symbol.owner; if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION && varRefExpr.symbol.type.tag == TypeTags.INVOKABLE) { genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol); } else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE && !((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) { genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (ownerSymbol.tag & SymTag.LET) == SymTag.LET) { genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) { genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE || (ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) { if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) { BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol; if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) { BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType, constSymbol.value.value); result = addConversionExprIfRequired(literal, varRefExpr.getBType()); return; } } genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol); if (!enclLocks.isEmpty()) { BVarSymbol symbol = (BVarSymbol) varRefExpr.symbol; BLangLockStmt lockStmt = enclLocks.peek(); lockStmt.addLockVariable(symbol); lockStmt.addLockVariable(this.globalVariablesDependsOn.getOrDefault(symbol, new HashSet<>())); } } genVarRefExpr.setBType(varRefExpr.getBType()); genVarRefExpr.pos = varRefExpr.pos; if ((varRefExpr.isLValue) || genVarRefExpr.symbol.name.equals(IGNORE)) { genVarRefExpr.isLValue = varRefExpr.isLValue; genVarRefExpr.setBType(varRefExpr.symbol.type); result = genVarRefExpr; return; } genVarRefExpr.isLValue = varRefExpr.isLValue; BType targetType = genVarRefExpr.getBType(); genVarRefExpr.setBType(genVarRefExpr.symbol.type); BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType); result = expression.impConversionExpr != null ? expression.impConversionExpr : expression; } @Override public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) { rewriteFieldBasedAccess(nsPrefixedFieldBasedAccess); } private void rewriteFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr) { if (safeNavigate(fieldAccessExpr)) { result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr)); return; } BLangAccessExpression targetVarRef = fieldAccessExpr; BType varRefType = types.getTypeWithEffectiveIntersectionTypes(fieldAccessExpr.expr.getBType()); fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr); if (!types.isSameType(fieldAccessExpr.expr.getBType(), varRefType)) { fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType); } BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.field.pos, StringEscapeUtils.unescapeJava(fieldAccessExpr.field.value)); int varRefTypeTag = varRefType.tag; if (varRefTypeTag == TypeTags.OBJECT || (varRefTypeTag == TypeTags.UNION && ((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) { if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE && ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) { result = rewriteObjectMemberAccessAsField(fieldAccessExpr); return; } else { boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation; if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) { BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol; BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) varRefType.tsymbol; BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc; BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc; if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) || (initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) { isStoreOnCreation = true; } } targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, (BVarSymbol) fieldAccessExpr.symbol, false, isStoreOnCreation); } } else if (varRefTypeTag == TypeTags.RECORD || (varRefTypeTag == TypeTags.UNION && ((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) { if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE && ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) { targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol); } else { targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, (BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation); } } else if (types.isLax(varRefType)) { if (!(varRefType.tag == TypeTags.XML || varRefType.tag == TypeTags.XML_ELEMENT)) { if (varRefType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) varRefType).constraint.tag)) { result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr)); return; } fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType); targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit); } else { BLangInvocation xmlAccessInvocation = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr); xmlAccessInvocation.setBType(fieldAccessExpr.getBType()); result = xmlAccessInvocation; return; } } else if (varRefTypeTag == TypeTags.MAP) { targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, fieldAccessExpr.isStoreOnCreation); } else if (TypeTags.isXMLTypeTag(varRefTypeTag)) { targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, fieldAccessExpr.fieldKind); } targetVarRef.isLValue = fieldAccessExpr.isLValue; targetVarRef.setBType(fieldAccessExpr.getBType()); targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess; result = targetVarRef; } @Override public void visit(BLangFieldBasedAccess fieldAccessExpr) { rewriteFieldBasedAccess(fieldAccessExpr); } private BLangNode rewriteObjectMemberAccessAsField(BLangFieldBasedAccess fieldAccessExpr) { Location pos = fieldAccessExpr.pos; BInvokableSymbol originalMemberFuncSymbol = (BInvokableSymbol) fieldAccessExpr.symbol; BLangFunction func = (BLangFunction) TreeBuilder.createFunctionNode(); String funcName = "$annon$method$delegate$" + lambdaFunctionCount++; BInvokableSymbol funcSymbol = new BInvokableSymbol(SymTag.INVOKABLE, (Flags.ANONYMOUS | Flags.LAMBDA), names.fromString(funcName), env.enclPkg.packageID, originalMemberFuncSymbol.type, env.scope.owner, pos, VIRTUAL); funcSymbol.retType = originalMemberFuncSymbol.retType; funcSymbol.bodyExist = true; funcSymbol.params = new ArrayList<>(); funcSymbol.scope = new Scope(funcSymbol); func.pos = pos; func.name = createIdentifier(pos, funcName); func.flagSet.add(Flag.LAMBDA); func.flagSet.add(Flag.ANONYMOUS); func.body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode(); func.symbol = funcSymbol; func.setBType(funcSymbol.type); func.closureVarSymbols = new LinkedHashSet<>(); BLangExpression receiver = fieldAccessExpr.expr; BLangSimpleVariableDef intermediateObjDef = null; if (receiver.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BSymbol receiverSymbol = ((BLangVariableReference) receiver).symbol; receiverSymbol.closure = true; func.closureVarSymbols.add(new ClosureVarSymbol(receiverSymbol, pos)); } else { BLangSimpleVariableDef varDef = createVarDef("$$temp$obj$" + annonVarCount++, receiver.getBType(), receiver, pos); intermediateObjDef = varDef; varDef.var.symbol.closure = true; env.scope.define(varDef.var.symbol.name, varDef.var.symbol); BLangSimpleVarRef variableRef = createVariableRef(pos, varDef.var.symbol); func.closureVarSymbols.add(new ClosureVarSymbol(varDef.var.symbol, pos)); receiver = variableRef; } ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); for (BVarSymbol param : originalMemberFuncSymbol.params) { BLangSimpleVariable fParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); fParam.symbol = new BVarSymbol(0, param.name, env.enclPkg.packageID, param.type, funcSymbol, pos, VIRTUAL); fParam.pos = pos; fParam.name = createIdentifier(pos, param.name.value); fParam.setBType(param.type); func.requiredParams.add(fParam); funcSymbol.params.add(fParam.symbol); funcSymbol.scope.define(fParam.symbol.name, fParam.symbol); BLangSimpleVarRef paramRef = createVariableRef(pos, fParam.symbol); requiredArgs.add(paramRef); } ArrayList<BLangExpression> restArgs = new ArrayList<>(); if (originalMemberFuncSymbol.restParam != null) { BLangSimpleVariable restParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); func.restParam = restParam; BVarSymbol restSym = originalMemberFuncSymbol.restParam; restParam.name = ASTBuilderUtil.createIdentifier(pos, restSym.name.value); restParam.symbol = new BVarSymbol(0, restSym.name, env.enclPkg.packageID, restSym.type, funcSymbol, pos, VIRTUAL); restParam.pos = pos; restParam.setBType(restSym.type); funcSymbol.restParam = restParam.symbol; funcSymbol.scope.define(restParam.symbol.name, restParam.symbol); BLangSimpleVarRef restArg = createVariableRef(pos, restParam.symbol); BLangRestArgsExpression restArgExpr = new BLangRestArgsExpression(); restArgExpr.expr = restArg; restArgExpr.pos = pos; restArgExpr.setBType(restSym.type); restArgExpr.expectedType = restArgExpr.getBType(); restArgs.add(restArgExpr); } BLangIdentifier field = fieldAccessExpr.field; BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode(); retStmt.expr = createObjectMethodInvocation( receiver, field, fieldAccessExpr.symbol, requiredArgs, restArgs); ((BLangBlockFunctionBody) func.body).addStatement(retStmt); BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaFunction.function = func; lambdaFunction.capturedClosureEnv = env.createClone(); env.enclPkg.functions.add(func); env.enclPkg.topLevelNodes.add(func); lambdaFunction.parent = env.enclInvokable; lambdaFunction.setBType(func.getBType()); if (intermediateObjDef == null) { return rewrite(lambdaFunction, env); } else { BLangStatementExpression expr = createStatementExpression(intermediateObjDef, rewrite(lambdaFunction, env)); expr.setBType(lambdaFunction.getBType()); return rewrite(expr, env); } } private BLangInvocation createObjectMethodInvocation(BLangExpression receiver, BLangIdentifier field, BSymbol invocableSymbol, List<BLangExpression> requiredArgs, List<BLangExpression> restArgs) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.name = field; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.expr = receiver; invocationNode.symbol = invocableSymbol; invocationNode.setBType(((BInvokableType) invocableSymbol.type).retType); invocationNode.requiredArgs = requiredArgs; invocationNode.restArgs = restArgs; return invocationNode; } private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) { BLangStatementExpression statementExpression = new BLangStatementExpression(); BLangBlockStmt block = new BLangBlockStmt(); statementExpression.stmt = block; BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.errorType); Location pos = fieldAccessExpr.pos; BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos); block.addStatement(result); BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol); resultRef.setBType(fieldAccessType); statementExpression.setBType(fieldAccessType); BLangLiteral mapIndex = ASTBuilderUtil.createLiteral( fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value); BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex); BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.nilType); mapAccessExpr.setBType(xmlOrNil); BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos); BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol); block.addStatement(mapResult); BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block); BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType); ifStmt.expr = isLikeNilExpr; BLangBlockStmt resultNilBody = new BLangBlockStmt(); ifStmt.body = resultNilBody; BLangBlockStmt resultHasValueBody = new BLangBlockStmt(); ifStmt.elseStmt = resultHasValueBody; BLangErrorConstructorExpr errorConstructorExpr = (BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode(); BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env, names.fromString(""), names.fromString("error")); errorConstructorExpr.setBType(symbol.type); List<BLangExpression> positionalArgs = new ArrayList<>(); List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); positionalArgs.add(createStringLiteral(pos, "{" + RuntimeConstants.MAP_LANG_LIB + "}InvalidKey")); BLangNamedArgsExpression message = new BLangNamedArgsExpression(); message.name = ASTBuilderUtil.createIdentifier(pos, "key"); message.expr = createStringLiteral(pos, fieldAccessExpr.field.value); namedArgs.add(message); errorConstructorExpr.positionalArgs = positionalArgs; errorConstructorExpr.namedArgs = namedArgs; BLangSimpleVariableDef errorDef = createVarDef("$_invalid_key_error", symTable.errorType, errorConstructorExpr, pos); resultNilBody.addStatement(errorDef); BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol); BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody); errorVarAssignment.varRef = resultRef; errorVarAssignment.expr = errorRef; BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt( pos, resultHasValueBody); mapResultAssignment.varRef = resultRef; mapResultAssignment.expr = mapResultRef; statementExpression.expr = resultRef; return statementExpression; } private BLangInvocation rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) { ArrayList<BLangExpression> args = new ArrayList<>(); String fieldName = fieldAccessExpr.field.value; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess = (BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr; fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName); } if (fieldName.equals("_")) { return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING, fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>()); } BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName); args.add(attributeNameLiteral); args.add(isOptionalAccessToLiteral(fieldAccessExpr)); return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args, new ArrayList<>()); } private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) { return rewrite( createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env); } private String createExpandedQName(String nsURI, String localName) { return "{" + nsURI + "}" + localName; } @Override public void visit(BLangIndexBasedAccess indexAccessExpr) { if (safeNavigate(indexAccessExpr)) { result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr)); return; } BLangIndexBasedAccess targetVarRef = indexAccessExpr; indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr); BType varRefType = types.getTypeWithEffectiveIntersectionTypes(indexAccessExpr.expr.getBType()); indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr); if (!types.isSameType(indexAccessExpr.expr.getBType(), varRefType)) { indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType); } if (varRefType.tag == TypeTags.MAP) { targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation); } else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) { targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr, (BVarSymbol) indexAccessExpr.symbol, false); } else if (types.isSubTypeOfList(varRefType)) { targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (types.isAssignable(varRefType, symTable.stringType)) { indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType); targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (varRefType.tag == TypeTags.TABLE) { if (targetVarRef.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY) { BLangTupleLiteral listConstructorExpr = new BLangTupleLiteral(); listConstructorExpr.exprs = ((BLangTableMultiKeyExpr) indexAccessExpr.indexExpr).multiKeyIndexExprs; List<BType> memberTypes = new ArrayList<>(); ((BLangTableMultiKeyExpr) indexAccessExpr.indexExpr).multiKeyIndexExprs. forEach(expression -> memberTypes.add(expression.getBType())); listConstructorExpr.setBType(new BTupleType(memberTypes)); indexAccessExpr.indexExpr = listConstructorExpr; } targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } targetVarRef.isLValue = indexAccessExpr.isLValue; targetVarRef.setBType(indexAccessExpr.getBType()); result = targetVarRef; } @Override public void visit(BLangTableMultiKeyExpr tableMultiKeyExpr) { rewriteExprs(tableMultiKeyExpr.multiKeyIndexExprs); result = tableMultiKeyExpr; } @Override public void visit(BLangInvocation iExpr) { rewriteInvocation(iExpr, false); } @Override public void visit(BLangErrorConstructorExpr errorConstructorExpr) { if (errorConstructorExpr.positionalArgs.size() == 1) { errorConstructorExpr.positionalArgs.add(createNilLiteral()); } errorConstructorExpr.positionalArgs.set(1, addConversionExprIfRequired(errorConstructorExpr.positionalArgs.get(1), symTable.errorType)); rewriteExprs(errorConstructorExpr.positionalArgs); BLangExpression errorDetail; BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(errorConstructorExpr.pos, ((BErrorType) errorConstructorExpr.getBType()).detailType); if (errorConstructorExpr.namedArgs.isEmpty()) { errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.getBType()); } else { for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) { BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField(); member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos, symTable.stringType, namedArg.name.value)); if (recordLiteral.getBType().tag == TypeTags.RECORD) { member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType); } else { member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.getBType()); } recordLiteral.fields.add(member); } errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), ((BErrorType) errorConstructorExpr.getBType()).detailType); } errorConstructorExpr.errorDetail = errorDetail; result = errorConstructorExpr; } @Override public void visit(BLangInvocation.BLangActionInvocation actionInvocation) { if (!actionInvocation.async && actionInvocation.invokedInsideTransaction) { transactionDesugar.startTransactionCoordinatorOnce(env, actionInvocation.pos); } rewriteInvocation(actionInvocation, actionInvocation.async); } private void rewriteInvocation(BLangInvocation invocation, boolean async) { BLangInvocation invRef = invocation; if (!enclLocks.isEmpty()) { BLangLockStmt lock = enclLocks.peek(); lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars); } reorderArguments(invocation); invocation.requiredArgs = rewriteExprs(invocation.requiredArgs); fixStreamTypeCastsInInvocationParams(invocation); fixNonRestArgTypeCastInTypeParamInvocation(invocation); invocation.restArgs = rewriteExprs(invocation.restArgs); annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos, invocation.symbol.pkgID, invocation.symbol.owner, env); if (invocation.functionPointerInvocation) { visitFunctionPointerInvocation(invocation); return; } invocation.expr = rewriteExpr(invocation.expr); result = invRef; BInvokableSymbol invSym = (BInvokableSymbol) invocation.symbol; if (Symbols.isFlagOn(invSym.retType.flags, Flags.PARAMETERIZED)) { BType retType = unifier.build(invSym.retType); invocation.setBType(invocation.async ? new BFutureType(TypeTags.FUTURE, retType, null) : retType); } if (invocation.expr == null) { fixTypeCastInTypeParamInvocation(invocation, invRef); if (invocation.exprSymbol == null) { return; } invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol); invocation.expr = rewriteExpr(invocation.expr); } switch (invocation.expr.getBType().tag) { case TypeTags.OBJECT: case TypeTags.RECORD: if (!invocation.langLibInvocation) { List<BLangExpression> argExprs = new ArrayList<>(invocation.requiredArgs); argExprs.add(0, invocation.expr); BLangAttachedFunctionInvocation attachedFunctionInvocation = new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs, invocation.symbol, invocation.getBType(), invocation.expr, async); attachedFunctionInvocation.name = invocation.name; attachedFunctionInvocation.annAttachments = invocation.annAttachments; result = invRef = attachedFunctionInvocation; } break; } fixTypeCastInTypeParamInvocation(invocation, invRef); } private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) { if (!iExpr.langLibInvocation) { return; } List<BLangExpression> requiredArgs = iExpr.requiredArgs; List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params; for (int i = 0; i < requiredArgs.size(); i++) { requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type)); } } /* This function is a workaround and need improvement * Notes for improvement : * 1. Both arguments are same. * 2. Due to current type param logic we put type param flag on the original type. * 3. Error type having Cloneable type with type param flag, change expression type by this code. * 4. using error type is a problem as Cloneable type is an typeparm eg: ExprBodiedFunctionTest * added never to CloneableType type param * @typeParam type * CloneableType Cloneable|never; * */ private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) { var returnTypeOfInvokable = ((BInvokableSymbol) iExpr.symbol).retType; if (!iExpr.langLibInvocation && !TypeParamAnalyzer.containsTypeParam(returnTypeOfInvokable)) { return; } BType originalInvType = genIExpr.getBType(); if (!genIExpr.async) { genIExpr.setBType(returnTypeOfInvokable); } this.result = addConversionExprIfRequired(genIExpr, originalInvType); } private void fixStreamTypeCastsInInvocationParams(BLangInvocation iExpr) { List<BLangExpression> requiredArgs = iExpr.requiredArgs; List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params; if (!params.isEmpty()) { for (int i = 0; i < requiredArgs.size(); i++) { BVarSymbol param = params.get(i); if (param.type.tag == TypeTags.STREAM) { requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), param.type)); } } } } private BLangLiteral createNilLiteral() { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = null; literal.setBType(symTable.nilType); return literal; } public void visit(BLangTypeInit typeInitExpr) { if (typeInitExpr.getBType().tag == TypeTags.STREAM) { result = rewriteExpr(desugarStreamTypeInit(typeInitExpr)); } else { result = rewrite(desugarObjectTypeInit(typeInitExpr), env); } } private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) { typeInitExpr.desugared = true; BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos); BType objType = getObjectType(typeInitExpr.getBType()); BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos); objVarDef.var.name.pos = symTable.builtinPos; BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol); blockStmt.addStatement(objVarDef); typeInitExpr.initInvocation.exprSymbol = objVarDef.var.symbol; typeInitExpr.initInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol; if (typeInitExpr.initInvocation.getBType().tag == TypeTags.NIL) { BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt); initInvExpr.expr = typeInitExpr.initInvocation; typeInitExpr.initInvocation.name.value = GENERATED_INIT_SUFFIX.value; BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef); stmtExpr.setBType(objVarRef.symbol.type); return stmtExpr; } BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitExpr.initInvocation.getBType(), typeInitExpr.initInvocation, typeInitExpr.pos); blockStmt.addStatement(initInvRetValVarDef); BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.getBType(), null, typeInitExpr.pos); blockStmt.addStatement(resultVarDef); BLangSimpleVarRef initRetValVarRefInCondition = ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol); BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos); BLangTypeTestExpr isErrorTest = ASTBuilderUtil.createTypeTestExpr(symTable.builtinPos, initRetValVarRefInCondition, getErrorTypeNode()); isErrorTest.setBType(symTable.booleanType); BLangSimpleVarRef thenInitRetValVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol); BLangAssignment errAssignment = ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, thenResultVarRef, thenInitRetValVarRef); thenStmt.addStatement(errAssignment); BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol); BLangAssignment objAssignment = ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, elseResultVarRef, objVarRef); BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos); elseStmt.addStatement(objAssignment); BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(symTable.builtinPos, isErrorTest, thenStmt, elseStmt); blockStmt.addStatement(ifelse); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.setBType(resultVarRef.symbol.type); return stmtExpr; } private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope .lookup(Names.CONSTRUCT_STREAM).symbol; BType constraintType = ((BStreamType) typeInitExpr.getBType()).constraint; BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol); BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr(); constraintTdExpr.resolvedType = constraintType; constraintTdExpr.setBType(constraintTdType); BType completionType = ((BStreamType) typeInitExpr.getBType()).completionType; BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol); BLangTypedescExpr completionTdExpr = new BLangTypedescExpr(); completionTdExpr.resolvedType = completionType; completionTdExpr.setBType(completionTdType); List<BLangExpression> args = new ArrayList<>(Lists.of(constraintTdExpr, completionTdExpr)); if (!typeInitExpr.argsExpr.isEmpty()) { args.add(typeInitExpr.argsExpr.get(0)); } BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod( typeInitExpr.pos, symbol, args, symResolver); streamConstructInvocation.setBType(new BStreamType(TypeTags.STREAM, constraintType, completionType, null)); return streamConstructInvocation; } private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr, Location location) { BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name)); if (objSym == null || objSym == symTable.notFoundSymbol) { objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type, this.env.scope.owner, location, VIRTUAL); } BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(location, name, type, expr, (BVarSymbol) objSym); BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(location); objVarDef.var = objVar; objVarDef.setBType(objVar.getBType()); return objVarDef; } private BType getObjectType(BType type) { if (type.tag == TypeTags.OBJECT) { return type; } else if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream() .filter(t -> t.tag == TypeTags.OBJECT) .findFirst() .orElse(symTable.noType); } throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context"); } BLangErrorType getErrorTypeNode() { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.setBType(symTable.errorType); errorTypeNode.pos = symTable.builtinPos; return errorTypeNode; } BLangErrorType getErrorOrNillTypeNode() { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.setBType(symTable.errorOrNilType); return errorTypeNode; } @Override public void visit(BLangTernaryExpr ternaryExpr) { /* * First desugar to if-else: * * T $result$; * if () { * $result$ = thenExpr; * } else { * $result$ = elseExpr; * } * */ BLangSimpleVariableDef resultVarDef = createVarDef("$ternary_result$", ternaryExpr.getBType(), null, ternaryExpr.pos); BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangAssignment thenAssignment = ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr); thenBody.addStatement(thenAssignment); BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangAssignment elseAssignment = ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr); elseBody.addStatement(elseAssignment); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse)); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.setBType(ternaryExpr.getBType()); result = rewriteExpr(stmtExpr); } @Override public void visit(BLangWaitExpr waitExpr) { if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) { waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>()); } else { waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression())); } result = waitExpr; } private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) { visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs); visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs); return exprs; } private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) { if (expr.getKind() == NodeKind.BINARY_EXPR) { collectAllBinaryExprs((BLangBinaryExpr) expr, exprs); } else { expr = rewriteExpr(expr); exprs.add(expr); } } @Override public void visit(BLangWaitForAllExpr waitExpr) { waitExpr.keyValuePairs.forEach(keyValue -> { if (keyValue.valueExpr != null) { keyValue.valueExpr = rewriteExpr(keyValue.valueExpr); } else { keyValue.keyExpr = rewriteExpr(keyValue.keyExpr); } }); BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.getBType()); expr.pos = waitExpr.pos; result = rewriteExpr(expr); } @Override public void visit(BLangTrapExpr trapExpr) { trapExpr.expr = rewriteExpr(trapExpr.expr); if (trapExpr.expr.getBType().tag != TypeTags.NIL) { trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.getBType()); } result = trapExpr; } @Override public void visit(BLangBinaryExpr binaryExpr) { if (isNullableBinaryExpr(binaryExpr)) { BLangStatementExpression stmtExpr = createStmtExprForNullableBinaryExpr(binaryExpr); result = rewrite(stmtExpr, env); return; } if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) { if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) { binaryExpr.rhsExpr = getModifiedIntRangeEndExpr(binaryExpr.rhsExpr); } result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr)); return; } if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) { visitBinaryLogicalExpr(binaryExpr); return; } OperatorKind binaryOpKind = binaryExpr.opKind; if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB || binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV || binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND || binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) { checkByteTypeIncompatibleOperations(binaryExpr); } binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr); binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr); result = binaryExpr; int rhsExprTypeTag = binaryExpr.rhsExpr.getBType().tag; int lhsExprTypeTag = binaryExpr.lhsExpr.getBType().tag; if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL || binaryExpr.opKind == OperatorKind.NOT_EQUAL || binaryExpr.opKind == OperatorKind.REF_EQUAL || binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) { if (TypeTags.isIntegerTypeTag(lhsExprTypeTag) && rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType); return; } if (lhsExprTypeTag == TypeTags.BYTE && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType); return; } } boolean isBinaryShiftOperator = symResolver.isBinaryShiftOperator(binaryOpKind); boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryOpKind); if (lhsExprTypeTag == rhsExprTypeTag) { if (!isBinaryShiftOperator && !isArithmeticOperator) { return; } if (types.isValueType(binaryExpr.lhsExpr.getBType())) { return; } } if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(lhsExprTypeTag) && (rhsExprTypeTag == TypeTags.XML || rhsExprTypeTag == TypeTags.XML_TEXT)) { binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr, binaryExpr.lhsExpr.pos, symTable.xmlType); return; } if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(rhsExprTypeTag) && (lhsExprTypeTag == TypeTags.XML || lhsExprTypeTag == TypeTags.XML_TEXT)) { binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr, binaryExpr.rhsExpr.pos, symTable.xmlType); return; } if (lhsExprTypeTag == TypeTags.DECIMAL) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType()); return; } if (rhsExprTypeTag == TypeTags.DECIMAL) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType()); return; } if (lhsExprTypeTag == TypeTags.FLOAT) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType()); return; } if (rhsExprTypeTag == TypeTags.FLOAT) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType()); return; } if (isArithmeticOperator) { createTypeCastExprForArithmeticExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag); return; } if (isBinaryShiftOperator) { createTypeCastExprForBinaryShiftExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag); return; } if (symResolver.isBinaryComparisonOperator(binaryOpKind)) { createTypeCastExprForRelationalExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag); } } private BLangStatementExpression createStmtExprForNullableBinaryExpr(BLangBinaryExpr binaryExpr) { /* * int? x = 3; * int? y = 5; * int? z = x + y; * Above is desugared to * int? $result$; * if (x is () or y is ()) { * $result$ = (); * } else { * $result$ = x + y; * } * int z = $result$; */ BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BUnionType exprBType = (BUnionType) binaryExpr.getBType(); BType nonNilType = exprBType.getMemberTypes().iterator().next(); boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryExpr.opKind); boolean isShiftOperator = symResolver.isBinaryShiftOperator(binaryExpr.opKind); boolean isBitWiseOperator = !isArithmeticOperator && !isShiftOperator; BType rhsType = nonNilType; if (isBitWiseOperator) { if (binaryExpr.rhsExpr.getBType().isNullable()) { rhsType = types.getSafeType(binaryExpr.rhsExpr.getBType(), true, false); } else { rhsType = binaryExpr.rhsExpr.getBType(); } } BType lhsType = nonNilType; if (isBitWiseOperator) { if (binaryExpr.lhsExpr.getBType().isNullable()) { lhsType = types.getSafeType(binaryExpr.lhsExpr.getBType(), true, false); } else { lhsType = binaryExpr.lhsExpr.getBType(); } } if (binaryExpr.lhsExpr.getBType().isNullable()) { binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr); } BLangSimpleVariableDef tempVarDef = createVarDef("result", binaryExpr.getBType(), null, binaryExpr.pos); BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempVarDef.var.symbol); blockStmt.addStatement(tempVarDef); BLangTypeTestExpr typeTestExprOne = createTypeCheckExpr(binaryExpr.pos, binaryExpr.lhsExpr, getNillTypeNode()); typeTestExprOne.setBType(symTable.booleanType); BLangTypeTestExpr typeTestExprTwo = createTypeCheckExpr(binaryExpr.pos, binaryExpr.rhsExpr, getNillTypeNode()); typeTestExprTwo.setBType(symTable.booleanType); BLangBinaryExpr ifBlockCondition = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, typeTestExprOne, typeTestExprTwo, symTable.booleanType, OperatorKind.OR, binaryExpr.opSymbol); BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, ifBody); bLangAssignmentIf.varRef = tempVarRef; bLangAssignmentIf.expr = createNilLiteral(); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseBody); bLangAssignmentElse.varRef = tempVarRef; BLangBinaryExpr newBinaryExpr = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr, nonNilType, binaryExpr.opKind, binaryExpr.opSymbol); newBinaryExpr.lhsExpr = createTypeCastExpr(newBinaryExpr.lhsExpr, lhsType); newBinaryExpr.rhsExpr = createTypeCastExpr(newBinaryExpr.rhsExpr, rhsType); bLangAssignmentElse.expr = newBinaryExpr; BLangIf ifStatement = ASTBuilderUtil.createIfStmt(binaryExpr.pos, blockStmt); ifStatement.expr = ifBlockCondition; ifStatement.body = ifBody; ifStatement.elseStmt = elseBody; BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef); stmtExpr.setBType(binaryExpr.getBType()); return stmtExpr; } private boolean isNullableBinaryExpr(BLangBinaryExpr binaryExpr) { if ((binaryExpr.lhsExpr.getBType() != null && binaryExpr.rhsExpr.getBType() != null) && (binaryExpr.rhsExpr.getBType().isNullable() || binaryExpr.lhsExpr.getBType().isNullable())) { switch (binaryExpr.getOperatorKind()) { case ADD: case SUB: case MUL: case DIV: case MOD: case BITWISE_LEFT_SHIFT: case BITWISE_RIGHT_SHIFT: case BITWISE_UNSIGNED_RIGHT_SHIFT: case BITWISE_AND: case BITWISE_OR: case BITWISE_XOR: return true; } } return false; } private void createTypeCastExprForArithmeticExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag, int rhsExprTypeTag) { if ((TypeTags.isIntegerTypeTag(lhsExprTypeTag) && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) || (TypeTags.isStringTypeTag(lhsExprTypeTag) && TypeTags.isStringTypeTag(rhsExprTypeTag)) || (TypeTags.isXMLTypeTag(lhsExprTypeTag) && TypeTags.isXMLTypeTag(rhsExprTypeTag))) { return; } if (TypeTags.isXMLTypeTag(lhsExprTypeTag) && !TypeTags.isXMLTypeTag(rhsExprTypeTag)) { if (types.checkTypeContainString(binaryExpr.rhsExpr.getBType())) { binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr, binaryExpr.rhsExpr.pos, symTable.xmlType); return; } binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.xmlType); return; } if (TypeTags.isXMLTypeTag(rhsExprTypeTag) && !TypeTags.isXMLTypeTag(lhsExprTypeTag)) { if (types.checkTypeContainString(binaryExpr.lhsExpr.getBType())) { binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr, binaryExpr.rhsExpr.pos, symTable.xmlType); return; } binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.xmlType); return; } binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.getBType()); binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.getBType()); } private void createTypeCastExprForBinaryShiftExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag, int rhsExprTypeTag) { boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag); boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag); if (isLhsIntegerType || lhsExprTypeTag == TypeTags.BYTE) { if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) { return; } binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType); return; } if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType); return; } binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType); binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType); } private void createTypeCastExprForRelationalExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag, int rhsExprTypeTag) { boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag); boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag); if ((isLhsIntegerType && isRhsIntegerType) || (lhsExprTypeTag == TypeTags.BYTE && rhsExprTypeTag == TypeTags.BYTE)) { return; } if (isLhsIntegerType && !isRhsIntegerType) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType); return; } if (!isLhsIntegerType && isRhsIntegerType) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType); return; } if (lhsExprTypeTag == TypeTags.BYTE || rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType); binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType); return; } boolean isLhsStringType = TypeTags.isStringTypeTag(lhsExprTypeTag); boolean isRhsStringType = TypeTags.isStringTypeTag(rhsExprTypeTag); if (isLhsStringType && isRhsStringType) { return; } if (isLhsStringType && !isRhsStringType) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.stringType); return; } if (!isLhsStringType && isRhsStringType) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.stringType); } } private BLangInvocation replaceWithIntRange(Location location, BLangExpression lhsExpr, BLangExpression rhsExpr) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope .lookup(Names.CREATE_INT_RANGE).symbol; BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(location, symbol, new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver); createIntRangeInvocation.setBType(symTable.intRangeType); return createIntRangeInvocation; } private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) { if (binaryExpr.expectedType == null) { return; } int rhsExprTypeTag = binaryExpr.rhsExpr.getBType().tag; int lhsExprTypeTag = binaryExpr.lhsExpr.getBType().tag; if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) { return; } int resultTypeTag = binaryExpr.expectedType.tag; if (resultTypeTag == TypeTags.INT) { if (rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType); } if (lhsExprTypeTag == TypeTags.BYTE) { binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType); } } } /** * This method checks whether given binary expression is related to shift operation. * If its true, then both lhs and rhs of the binary expression will be converted to 'int' type. * <p> * byte a = 12; * byte b = 34; * int i = 234; * int j = -4; * <p> * true: where binary expression's expected type is 'int' * int i1 = a >> b; * int i2 = a << b; * int i3 = a >> i; * int i4 = a << i; * int i5 = i >> j; * int i6 = i << j; */ private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) { return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT || binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT || binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT; } public void visit(BLangElvisExpr elvisExpr) { BLangMatchExpression matchExpr = ASTBuilderUtil.createMatchExpression(elvisExpr.lhsExpr); matchExpr.patternClauses.add(getMatchNullPatternGivenExpression(elvisExpr.pos, rewriteExpr(elvisExpr.rhsExpr))); matchExpr.setBType(elvisExpr.getBType()); matchExpr.pos = elvisExpr.pos; result = rewriteExpr(matchExpr); } @Override public void visit(BLangUnaryExpr unaryExpr) { if (isNullableUnaryExpr(unaryExpr)) { BLangStatementExpression statementExpression = createStmtExprForNilableUnaryExpr(unaryExpr); result = rewrite(statementExpression, env); return; } if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) { rewriteBitwiseComplementOperator(unaryExpr); return; } unaryExpr.expr = rewriteExpr(unaryExpr.expr); result = unaryExpr; } /** * This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below. * Example : ~a -> a ^ -1; * ~ 11110011 -> 00001100 * 11110011 ^ 11111111 -> 00001100 * * @param unaryExpr the bitwise complement expression */ private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) { final Location pos = unaryExpr.pos; final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode(); binaryExpr.pos = pos; binaryExpr.opKind = OperatorKind.BITWISE_XOR; binaryExpr.lhsExpr = unaryExpr.expr; if (TypeTags.BYTE == unaryExpr.getBType().tag) { binaryExpr.setBType(symTable.byteType); binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR, symTable.byteType, symTable.byteType); } else { binaryExpr.setBType(symTable.intType); binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR, symTable.intType, symTable.intType); } result = rewriteExpr(binaryExpr); } private BLangStatementExpression createStmtExprForNilableUnaryExpr(BLangUnaryExpr unaryExpr) { /* * int? x = 3; * int? y = +x; * * * Above is desugared to * int? $result$; * if (x is ()) { * $result$ = (); * } else { * $result$ = +x; * } * int y = $result$ */ BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(unaryExpr.pos); BUnionType exprBType = (BUnionType) unaryExpr.getBType(); BType nilLiftType = exprBType.getMemberTypes().iterator().next(); unaryExpr.expr = rewriteExpr(unaryExpr.expr); BLangSimpleVariableDef tempVarDef = createVarDef("$result", unaryExpr.getBType(), createNilLiteral(), unaryExpr.pos); BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(unaryExpr.pos, tempVarDef.var.symbol); blockStmt.addStatement(tempVarDef); BLangTypeTestExpr typeTestExpr = createTypeCheckExpr(unaryExpr.pos, unaryExpr.expr, getNillTypeNode()); typeTestExpr.setBType(symTable.booleanType); BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos); BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, ifBody); bLangAssignmentIf.varRef = tempVarRef; bLangAssignmentIf.expr = createNilLiteral(); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos); BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, elseBody); bLangAssignmentElse.varRef = tempVarRef; BLangExpression expr = createTypeCastExpr(unaryExpr.expr, nilLiftType); bLangAssignmentElse.expr = ASTBuilderUtil.createUnaryExpr(unaryExpr.pos, expr, nilLiftType, unaryExpr.operator, unaryExpr.opSymbol); BLangIf ifStatement = ASTBuilderUtil.createIfStmt(unaryExpr.pos, blockStmt); ifStatement.expr = typeTestExpr; ifStatement.body = ifBody; ifStatement.elseStmt = elseBody; BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef); stmtExpr.setBType(unaryExpr.getBType()); return stmtExpr; } private boolean isNullableUnaryExpr(BLangUnaryExpr unaryExpr) { if (unaryExpr.getBType() != null && unaryExpr.getBType().isNullable()) { switch (unaryExpr.operator) { case ADD: case SUB: case BITWISE_COMPLEMENT: return true; } } return false; } @Override public void visit(BLangTypeConversionExpr conversionExpr) { if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) { result = rewriteExpr(conversionExpr.expr); return; } BType targetType = conversionExpr.targetType; conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env); conversionExpr.expr = rewriteExpr(conversionExpr.expr); result = conversionExpr; } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { if (!env.enclPkg.lambdaFunctions.contains(bLangLambdaFunction)) { env.enclPkg.lambdaFunctions.add(bLangLambdaFunction); } result = bLangLambdaFunction; } @Override public void visit(BLangArrowFunction bLangArrowFunction) { BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode(); bLangFunction.setName(bLangArrowFunction.functionName); BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaFunction.pos = bLangArrowFunction.pos; bLangFunction.addFlag(Flag.LAMBDA); lambdaFunction.function = bLangFunction; BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode(); returnType.setBType(bLangArrowFunction.body.expr.getBType()); bLangFunction.setReturnTypeNode(returnType); bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction)); bLangArrowFunction.params.forEach(bLangFunction::addParameter); lambdaFunction.parent = bLangArrowFunction.parent; lambdaFunction.setBType(bLangArrowFunction.funcType); BLangFunction funcNode = lambdaFunction.function; BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet), new Name(funcNode.name.value), new Name(funcNode.name.originalValue), env.enclPkg.symbol.pkgID, bLangArrowFunction.funcType, env.enclEnv.enclVarSym, true, bLangArrowFunction.pos, VIRTUAL); funcSymbol.originalName = new Name(funcNode.name.originalValue); SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env); defineInvokableSymbol(funcNode, funcSymbol, invokableEnv); List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> { Scope enclScope = invokableEnv.scope; varNode.symbol.kind = SymbolKind.FUNCTION; varNode.symbol.owner = invokableEnv.scope.owner; enclScope.define(varNode.symbol.name, varNode.symbol); }).map(varNode -> varNode.symbol).collect(Collectors.toList()); funcSymbol.params = paramSymbols; funcSymbol.restParam = getRestSymbol(funcNode); funcSymbol.retType = funcNode.returnTypeNode.getBType(); List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList()); funcNode.setBType( new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.getBType(), null)); lambdaFunction.function.pos = bLangArrowFunction.pos; lambdaFunction.function.body.pos = bLangArrowFunction.pos; lambdaFunction.capturedClosureEnv = env; rewrite(lambdaFunction.function, env); env.enclPkg.addFunction(lambdaFunction.function); bLangArrowFunction.function = lambdaFunction.function; result = rewriteExpr(lambdaFunction); } private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol, SymbolEnv invokableEnv) { invokableNode.symbol = funcSymbol; funcSymbol.scope = new Scope(funcSymbol); invokableEnv.scope = funcSymbol.scope; } @Override public void visit(BLangXMLQName xmlQName) { result = xmlQName; } @Override public void visit(BLangXMLAttribute xmlAttribute) { xmlAttribute.name = rewriteExpr(xmlAttribute.name); xmlAttribute.value = rewriteExpr(xmlAttribute.value); result = xmlAttribute; } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName); xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName); xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren); xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes); Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator(); while (attributesItr.hasNext()) { BLangXMLAttribute attribute = attributesItr.next(); if (!attribute.isNamespaceDeclr) { continue; } BLangXMLNS xmlns; if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) { xmlns = new BLangPackageXMLNS(); } else { xmlns = new BLangLocalXMLNS(); } xmlns.namespaceURI = attribute.value.concatExpr; xmlns.prefix = ((BLangXMLQName) attribute.name).localname; xmlns.symbol = attribute.symbol; xmlElementLiteral.inlineNamespaces.add(xmlns); } result = xmlElementLiteral; } @Override public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) { for (BLangExpression xmlItem : xmlSequenceLiteral.xmlItems) { rewriteExpr(xmlItem); } result = xmlSequenceLiteral; } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments)); result = xmlTextLiteral; } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { xmlCommentLiteral.concatExpr = rewriteExpr( constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments)); result = xmlCommentLiteral; } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target); xmlProcInsLiteral.dataConcatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments)); result = xmlProcInsLiteral; } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { xmlQuotedString.concatExpr = rewriteExpr( constructStringTemplateConcatExpression(xmlQuotedString.textFragments)); result = xmlQuotedString; } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs)); } /** * The raw template literal gets desugared to a type init expression. For each literal, a new object class type * def is generated from the object type. The type init expression creates an instance of this generated object * type. For example, consider the following statements: * string name = "Pubudu"; * 'object:RawTemplate rt = `Hello ${name}!`; * * The raw template literal above is desugared to: * type RawTemplate$Impl$0 object { * public string[] strings = ["Hello ", "!"]; * public (any|error)[] insertions; * * function init((any|error)[] insertions) { * self.insertions = insertions; * } * }; * * * 'object:RawTemplate rt = new RawTemplate$Impl$0([name]); * * @param rawTemplateLiteral The raw template literal to be desugared. */ @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { Location pos = rawTemplateLiteral.pos; BObjectType objType = (BObjectType) rawTemplateLiteral.getBType(); BLangClassDefinition objClassDef = desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos); BObjectType classObjType = (BObjectType) objClassDef.getBType(); BVarSymbol insertionsSym = classObjType.fields.get("insertions").symbol; BLangListConstructorExpr insertionsList = ASTBuilderUtil.createListConstructorExpr(pos, insertionsSym.type); insertionsList.exprs.addAll(rawTemplateLiteral.insertions); insertionsList.expectedType = insertionsSym.type; BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType); typeNewExpr.argsExpr.add(insertionsList); typeNewExpr.initInvocation.argExprs.add(insertionsList); typeNewExpr.initInvocation.requiredArgs.add(insertionsList); result = rewriteExpr(typeNewExpr); } /** * This method desugars a raw template literal object class for the provided raw template object type as follows. * A literal defined as 'object:RawTemplate rt = `Hello ${name}!`; * is desugared to, * type $anonType$0 object { * public string[] strings = ["Hello ", "!"]; * public (any|error)[] insertions; * * function init((any|error)[] insertions) { * self.insertions = insertions; * } * }; * @param strings The string portions of the literal * @param objectType The abstract object type for which an object class needs to be generated * @param pos The diagnostic position info for the type node * @return Returns the generated concrete object class def */ private BLangClassDefinition desugarTemplateLiteralObjectTypedef(List<BLangLiteral> strings, BObjectType objectType, Location pos) { BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol; Name objectClassName = names.fromString( anonModelHelper.getNextRawTemplateTypeKey(env.enclPkg.packageID, tSymbol.name)); BObjectTypeSymbol classTSymbol = Symbols.createClassSymbol(tSymbol.flags, objectClassName, env.enclPkg.packageID, null, env.enclPkg.symbol, pos, VIRTUAL, false); classTSymbol.flags |= Flags.CLASS; BObjectType objectClassType = new BObjectType(classTSymbol, classTSymbol.flags); objectClassType.fields = objectType.fields; classTSymbol.type = objectClassType; objectClassType.typeIdSet.add(objectType.typeIdSet); BLangClassDefinition classDef = TypeDefBuilderHelper.createClassDef(pos, classTSymbol, env); classDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value); BType stringsType = objectClassType.fields.get("strings").symbol.type; BLangListConstructorExpr stringsList = ASTBuilderUtil.createListConstructorExpr(pos, stringsType); stringsList.exprs.addAll(strings); stringsList.expectedType = stringsType; classDef.fields.get(0).expr = stringsList; BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(classDef, env); classDef.initFunction = userDefinedInitFunction; env.enclPkg.functions.add(userDefinedInitFunction); env.enclPkg.topLevelNodes.add(userDefinedInitFunction); BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(classDef, env); tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction, tempGeneratedInitFunction.symbol.scope, env); this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env); classDef.generatedInitFunction = tempGeneratedInitFunction; env.enclPkg.functions.add(classDef.generatedInitFunction); env.enclPkg.topLevelNodes.add(classDef.generatedInitFunction); return rewrite(classDef, env); } /** * Creates a user-defined init() method for the provided object type node. If there are fields without default * values specified in the type node, this will add parameters for those fields in the init() method and assign the * param values to the respective fields in the method body. * * @param classDefn The object type node for which the init() method is generated * @param env The symbol env for the object type node * @return The generated init() method */ private BLangFunction createUserDefinedObjectInitFn(BLangClassDefinition classDefn, SymbolEnv env) { BLangFunction initFunction = TypeDefBuilderHelper.createInitFunctionForStructureType(classDefn.pos, classDefn.symbol, env, names, Names.USER_DEFINED_INIT_SUFFIX, symTable, classDefn.getBType()); BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefn.getBType().tsymbol); typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol, (BInvokableType) initFunction.getBType(), classDefn.pos); classDefn.initFunction = initFunction; initFunction.returnTypeNode.setBType(symTable.nilType); BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body; BInvokableType initFnType = (BInvokableType) initFunction.getBType(); for (BLangSimpleVariable field : classDefn.fields) { if (field.expr != null) { continue; } BVarSymbol fieldSym = field.symbol; BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type, initFunction.symbol, classDefn.pos, VIRTUAL); BLangSimpleVariable param = ASTBuilderUtil.createVariable(classDefn.pos, fieldSym.name.value, fieldSym.type, null, paramSym); param.flagSet.add(Flag.FINAL); initFunction.symbol.scope.define(paramSym.name, paramSym); initFunction.symbol.params.add(paramSym); initFnType.paramTypes.add(param.getBType()); initFunction.requiredParams.add(param); BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym); BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.getBType(), initFunction.receiver.symbol, field.name); initFuncBody.addStatement(fieldInit); } return initFunction; } @Override public void visit(BLangWorkerSend workerSendNode) { workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.getBType()); result = workerSendNode; } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.getBType()); result = syncSendExpr; } @Override public void visit(BLangWorkerReceive workerReceiveNode) { result = workerReceiveNode; } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts .stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList()); result = workerFlushExpr; } @Override public void visit(BLangTransactionalExpr transactionalExpr) { BInvokableSymbol isTransactionalSymbol = (BInvokableSymbol) transactionDesugar.getInternalTransactionModuleInvokableSymbol(IS_TRANSACTIONAL); result = ASTBuilderUtil .createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(), Collections.emptyList(), symResolver); } @Override public void visit(BLangCommitExpr commitExpr) { BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env); result = rewriteExpr(stmtExpr); } @Override public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { xmlAttributeAccessExpr.indexExpr = rewriteExpr(xmlAttributeAccessExpr.indexExpr); xmlAttributeAccessExpr.expr = rewriteExpr(xmlAttributeAccessExpr.expr); if (xmlAttributeAccessExpr.indexExpr != null && xmlAttributeAccessExpr.indexExpr.getKind() == NodeKind.XML_QNAME) { ((BLangXMLQName) xmlAttributeAccessExpr.indexExpr).isUsedInXML = true; } xmlAttributeAccessExpr.desugared = true; if (xmlAttributeAccessExpr.isLValue || xmlAttributeAccessExpr.indexExpr != null) { result = xmlAttributeAccessExpr; } else { result = rewriteExpr(xmlAttributeAccessExpr); } } @Override public void visit(BLangFail failNode) { if (this.onFailClause != null) { if (this.onFailClause.bodyContainsFail) { result = rewriteNestedOnFail(this.onFailClause, failNode); } else { result = createOnFailInvocation(onFailClause, failNode); } } else { BLangReturn stmt = ASTBuilderUtil.createReturnStmt(failNode.pos, rewrite(failNode.expr, env)); stmt.desugared = true; result = stmt; } } @Override public void visit(BLangLocalVarRef localVarRef) { result = localVarRef; } @Override public void visit(BLangFieldVarRef fieldVarRef) { result = fieldVarRef; } @Override public void visit(BLangPackageVarRef packageVarRef) { result = packageVarRef; } @Override public void visit(BLangFunctionVarRef functionVarRef) { result = functionVarRef; } @Override public void visit(BLangStructFieldAccessExpr fieldAccessExpr) { result = fieldAccessExpr; } @Override public void visit(BLangStructFunctionVarRef functionVarRef) { result = functionVarRef; } @Override public void visit(BLangMapAccessExpr mapKeyAccessExpr) { result = mapKeyAccessExpr; } @Override public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) { result = arrayIndexAccessExpr; } @Override public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) { result = arrayIndexAccessExpr; } @Override public void visit(BLangTableAccessExpr tableKeyAccessExpr) { result = tableKeyAccessExpr; } @Override public void visit(BLangMapLiteral mapLiteral) { result = mapLiteral; } @Override public void visit(BLangStructLiteral structLiteral) { result = structLiteral; } @Override public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) { result = waitLiteral; } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr); ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters); BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS, xmlElementAccess.expr, new ArrayList<>(), filters); result = rewriteExpr(invocationNode); } private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) { Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env); BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX)); String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null; ArrayList<BLangExpression> args = new ArrayList<>(); for (BLangXMLElementFilter filter : filters) { BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace)); if (nsSymbol == symTable.notFoundSymbol) { if (defaultNS != null && !filter.name.equals("*")) { String expandedName = createExpandedQName(defaultNS, filter.name); args.add(createStringLiteral(filter.elemNamePos, expandedName)); } else { args.add(createStringLiteral(filter.elemNamePos, filter.name)); } } else { BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol; String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name); BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName); args.add(stringLiteral); } } return args; } private BLangInvocation createLanglibXMLInvocation(Location pos, String functionName, BLangExpression invokeOnExpr, ArrayList<BLangExpression> args, ArrayList<BLangExpression> restArgs) { invokeOnExpr = rewriteExpr(invokeOnExpr); BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); name.pos = pos; invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.expr = invokeOnExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName)); ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); requiredArgs.add(invokeOnExpr); requiredArgs.addAll(args); invocationNode.requiredArgs = requiredArgs; invocationNode.restArgs = rewriteExprs(restArgs); invocationNode.setBType(((BInvokableType) invocationNode.symbol.type).getReturnType()); invocationNode.langLibInvocation = true; return invocationNode; } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { xmlNavigation.expr = rewriteExpr(xmlNavigation.expr); xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex); ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters); if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) { BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters); result = rewriteExpr(invocationNode); } else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) { BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN, xmlNavigation.expr, new ArrayList<>(), new ArrayList<>()); result = rewriteExpr(invocationNode); } else { BLangExpression childIndexExpr; if (xmlNavigation.childIndex == null) { childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType); } else { childIndexExpr = xmlNavigation.childIndex; } ArrayList<BLangExpression> args = new ArrayList<>(); args.add(rewriteExpr(childIndexExpr)); BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters); result = rewriteExpr(invocationNode); } } @Override public void visit(BLangIsAssignableExpr assignableExpr) { assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr); result = assignableExpr; } @Override public void visit(BFunctionPointerInvocation fpInvocation) { result = fpInvocation; } @Override public void visit(BLangTypedescExpr typedescExpr) { typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env); result = typedescExpr; } @Override public void visit(BLangIntRangeExpression intRangeExpression) { if (!intRangeExpression.includeStart) { intRangeExpression.startExpr = getModifiedIntRangeStartExpr(intRangeExpression.startExpr); } if (!intRangeExpression.includeEnd) { intRangeExpression.endExpr = getModifiedIntRangeEndExpr(intRangeExpression.endExpr); } intRangeExpression.startExpr = rewriteExpr(intRangeExpression.startExpr); intRangeExpression.endExpr = rewriteExpr(intRangeExpression.endExpr); result = intRangeExpression; } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { result = rewriteExpr(bLangVarArgsExpression.expr); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr); result = bLangNamedArgsExpression.expr; } @Override public void visit(BLangMatchExpression bLangMatchExpression) { addMatchExprDefaultCase(bLangMatchExpression); String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result"; BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos, matchTempResultVarName, bLangMatchExpression.getBType(), null, new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID, bLangMatchExpression.getBType(), this.env.scope.owner, bLangMatchExpression.pos, VIRTUAL)); BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(bLangMatchExpression.pos, tempResultVar); tempResultVarDef.desugared = true; BLangBlockStmt stmts = ASTBuilderUtil.createBlockStmt(bLangMatchExpression.pos, Lists.of(tempResultVarDef)); List<BLangMatchTypedBindingPatternClause> patternClauses = new ArrayList<>(); for (int i = 0; i < bLangMatchExpression.patternClauses.size(); i++) { BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i); pattern.expr = rewriteExpr(pattern.expr); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol); pattern.expr = addConversionExprIfRequired(pattern.expr, tempResultVarRef.getBType()); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pattern.pos, tempResultVarRef, pattern.expr); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(pattern.pos, Lists.of(assignmentStmt)); patternClauses.add(ASTBuilderUtil.createMatchStatementPattern(pattern.pos, pattern.variable, patternBody)); } stmts.addStatement(ASTBuilderUtil.createMatchStatement(bLangMatchExpression.pos, bLangMatchExpression.expr, patternClauses)); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol); BLangStatementExpression statementExpr = createStatementExpression(stmts, tempResultVarRef); statementExpr.setBType(bLangMatchExpression.getBType()); result = rewriteExpr(statementExpr); } @Override public void visit(BLangCheckedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr, false); } @Override public void visit(BLangCheckPanickedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr, true); } private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) { String checkedExprVarName = GEN_VAR_PREFIX.value; BLangSimpleVariable checkedExprVar = ASTBuilderUtil.createVariable(checkedExpr.pos, checkedExprVarName, checkedExpr.getBType(), null, new BVarSymbol(0, names.fromString(checkedExprVarName), this.env.scope.owner.pkgID, checkedExpr.getBType(), this.env.scope.owner, checkedExpr.pos, VIRTUAL)); BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(checkedExpr.pos, checkedExprVar); checkedExprVarDef.desugared = true; BLangMatchTypedBindingPatternClause patternSuccessCase = getSafeAssignSuccessPattern(checkedExprVar.pos, checkedExprVar.symbol.type, true, checkedExprVar.symbol, null); BLangMatchTypedBindingPatternClause patternErrorCase = getSafeAssignErrorPattern(checkedExpr.pos, this.env.enclInvokable.symbol, checkedExpr.equivalentErrorTypeList, isCheckPanic); BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(checkedExpr.pos, checkedExpr.expr, new ArrayList<BLangMatchTypedBindingPatternClause>() {{ add(patternSuccessCase); add(patternErrorCase); }}); BLangBlockStmt generatedStmtBlock = ASTBuilderUtil.createBlockStmt(checkedExpr.pos, new ArrayList<BLangStatement>() {{ add(checkedExprVarDef); add(matchStmt); }}); BLangSimpleVarRef tempCheckedExprVarRef = ASTBuilderUtil.createVariableRef( checkedExpr.pos, checkedExprVar.symbol); BLangStatementExpression statementExpr = createStatementExpression( generatedStmtBlock, tempCheckedExprVarRef); statementExpr.setBType(checkedExpr.getBType()); result = rewriteExpr(statementExpr); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos, serviceConstructorExpr.serviceNode.serviceClass.symbol.type); serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = rewriteExpr(typeInit); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { BLangExpression expr = typeTestExpr.expr; if (types.isValueType(expr.getBType())) { expr = addConversionExprIfRequired(expr, symTable.anyType); } if (typeTestExpr.isNegation) { BLangTypeTestExpr bLangTypeTestExpr = ASTBuilderUtil.createTypeTestExpr(typeTestExpr.pos, typeTestExpr.expr, typeTestExpr.typeNode); BLangGroupExpr bLangGroupExpr = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode(); bLangGroupExpr.expression = bLangTypeTestExpr; bLangGroupExpr.setBType(typeTestExpr.getBType()); BLangUnaryExpr unaryExpr = ASTBuilderUtil.createUnaryExpr(typeTestExpr.pos, bLangGroupExpr, typeTestExpr.getBType(), OperatorKind.NOT, null); result = rewriteExpr(unaryExpr); return; } typeTestExpr.expr = rewriteExpr(expr); typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env); result = typeTestExpr; } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode(); binaryExpr.pos = annotAccessExpr.pos; binaryExpr.opKind = OperatorKind.ANNOT_ACCESS; binaryExpr.lhsExpr = annotAccessExpr.expr; binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType, annotAccessExpr.annotationSymbol.bvmAlias()); binaryExpr.setBType(annotAccessExpr.getBType()); binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null, new BInvokableType(Lists.of(binaryExpr.lhsExpr.getBType(), binaryExpr.rhsExpr.getBType()), annotAccessExpr.getBType(), null), null, symTable.builtinPos, VIRTUAL); result = rewriteExpr(binaryExpr); } @Override public void visit(BLangIsLikeExpr isLikeExpr) { isLikeExpr.expr = rewriteExpr(isLikeExpr.expr); result = isLikeExpr; } @Override public void visit(BLangStatementExpression bLangStatementExpression) { bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr); bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env); result = bLangStatementExpression; } @Override public void visit(BLangQueryExpr queryExpr) { BLangStatementExpression stmtExpr = queryDesugar.desugar(queryExpr, env); result = rewrite(stmtExpr, env); } @Override public void visit(BLangQueryAction queryAction) { BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env); result = rewrite(stmtExpr, env); } @Override public void visit(BLangJSONArrayLiteral jsonArrayLiteral) { jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs); result = jsonArrayLiteral; } @Override public void visit(BLangConstant constant) { BConstantSymbol constSymbol = constant.symbol; if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) { if (constSymbol.literalType.tag != TypeTags.NIL && (constSymbol.value == null || constSymbol.value.value == null)) { throw new IllegalStateException(); } BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType, constSymbol.value.value); constant.expr = rewriteExpr(literal); } else { constant.expr = rewriteExpr(constant.expr); } constant.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = constant; } @Override public void visit(BLangIgnoreExpr ignoreExpr) { result = ignoreExpr; } @Override public void visit(BLangDynamicArgExpr dynamicParamExpr) { dynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument); dynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition); result = dynamicParamExpr; } @Override public void visit(BLangConstRef constantRef) { result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.getBType(), constantRef.value); } BLangSimpleVariableDef getIteratorVariableDefinition(Location pos, BVarSymbol collectionSymbol, BInvokableSymbol iteratorInvokableSymbol, boolean isIteratorFuncFromLangLib) { BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol); BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); iteratorInvocation.pos = pos; iteratorInvocation.expr = dataReference; iteratorInvocation.symbol = iteratorInvokableSymbol; iteratorInvocation.setBType(iteratorInvokableSymbol.retType); iteratorInvocation.argExprs = Lists.of(dataReference); iteratorInvocation.requiredArgs = iteratorInvocation.argExprs; iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib; BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID, iteratorInvokableSymbol.retType, this.env.scope.owner, pos, VIRTUAL); BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$", iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol); return ASTBuilderUtil.createVariableDef(pos, iteratorVariable); } BLangSimpleVariableDef getIteratorNextVariableDefinition(Location pos, BType nillableResultType, BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) { BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol); BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$", nillableResultType, nextInvocation, resultSymbol); return ASTBuilderUtil.createVariableDef(pos, resultVariable); } BLangAssignment getIteratorNextAssignment(Location pos, BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) { BLangSimpleVarRef resultReferenceInAssignment = ASTBuilderUtil.createVariableRef(pos, resultSymbol); BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol); nextInvocation.expr.setBType(types.getSafeType(nextInvocation.expr.getBType(), true, false)); return ASTBuilderUtil.createAssignmentStmt(pos, resultReferenceInAssignment, nextInvocation, false); } BLangInvocation createIteratorNextInvocation(Location pos, BVarSymbol iteratorSymbol) { BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next"); BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol); BInvokableSymbol nextFuncSymbol = getNextFunc((BObjectType) iteratorSymbol.type).symbol; BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); nextInvocation.pos = pos; nextInvocation.name = nextIdentifier; nextInvocation.expr = iteratorReferenceInNext; nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol)); nextInvocation.argExprs = nextInvocation.requiredArgs; nextInvocation.symbol = nextFuncSymbol; nextInvocation.setBType(nextFuncSymbol.retType); return nextInvocation; } private BAttachedFunction getNextFunc(BObjectType iteratorType) { BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol; for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) { if (bAttachedFunction.funcName.value.equals("next")) { return bAttachedFunction; } } return null; } BLangFieldBasedAccess getValueAccessExpression(Location location, BType varType, BVarSymbol resultSymbol) { return getFieldAccessExpression(location, "value", varType, resultSymbol); } BLangFieldBasedAccess getFieldAccessExpression(Location pos, String fieldName, BType varType, BVarSymbol resultSymbol) { BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol); BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName); BLangFieldBasedAccess fieldBasedAccessExpression = ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier); fieldBasedAccessExpression.pos = pos; fieldBasedAccessExpression.setBType(varType); fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.getBType(); return fieldBasedAccessExpression; } private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) { BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode(); BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.pos = bLangArrowFunction.body.expr.pos; returnNode.setExpression(bLangArrowFunction.body.expr); blockNode.addStatement(returnNode); return blockNode; } private BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol; invocationNode.setBType(retType); invocationNode.requiredArgs = args; return invocationNode; } private BLangInvocation createLangLibInvocationNode(String functionName, BLangExpression onExpr, List<BLangExpression> args, BType retType, Location pos) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); name.pos = pos; invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.expr = onExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.getBType(), names.fromString(functionName)); ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); requiredArgs.add(onExpr); requiredArgs.addAll(args); invocationNode.requiredArgs = requiredArgs; invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType); invocationNode.langLibInvocation = true; return invocationNode; } private BLangInvocation createLangLibInvocationNode(String functionName, List<BLangExpression> args, BType retType, Location pos) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); name.pos = pos; invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.symbol = symResolver.lookupMethodInModule(symTable.langInternalModuleSymbol, names.fromString(functionName), env); ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); requiredArgs.addAll(args); invocationNode.requiredArgs = requiredArgs; invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType); invocationNode.langLibInvocation = true; return invocationNode; } private BLangArrayLiteral createArrayLiteralExprNode() { BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); expr.exprs = new ArrayList<>(); expr.setBType(new BArrayType(symTable.anyType)); return expr; } private void visitFunctionPointerInvocation(BLangInvocation iExpr) { BLangValueExpression expr; if (iExpr.expr == null) { expr = new BLangSimpleVarRef(); } else { BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess(); fieldBasedAccess.expr = iExpr.expr; fieldBasedAccess.field = iExpr.name; expr = fieldBasedAccess; } expr.symbol = iExpr.symbol; expr.setBType(iExpr.symbol.type); BLangExpression rewritten = rewriteExpr(expr); result = new BFunctionPointerInvocation(iExpr, rewritten); } private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) { if (types.isValueType(expr.getBType())) { return expr; } if (expr.getBType().tag == TypeTags.ERROR) { return expr; } BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), null, expr.pos); return addConversionExprIfRequired(cloneInvok, lhsType); } private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) { if (types.isValueType(expr.getBType())) { return expr; } if (expr.getBType().tag == TypeTags.ERROR) { return expr; } BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(), expr.getBType(), expr.pos); return addConversionExprIfRequired(cloneInvok, lhsType); } @SuppressWarnings("unchecked") <E extends BLangNode> E rewrite(E node, SymbolEnv env) { if (node == null) { return null; } if (node.desugared) { return node; } SymbolEnv previousEnv = this.env; this.env = env; node.accept(this); BLangNode resultNode = this.result; this.result = null; resultNode.desugared = true; this.env = previousEnv; return (E) resultNode; } @SuppressWarnings("unchecked") <E extends BLangExpression> E rewriteExpr(E node) { if (node == null) { return null; } if (node.desugared) { return node; } BLangExpression expr = node; if (node.impConversionExpr != null) { expr = node.impConversionExpr; node.impConversionExpr = null; } expr.accept(this); BLangNode resultNode = this.result; this.result = null; resultNode.desugared = true; return (E) resultNode; } @SuppressWarnings("unchecked") <E extends BLangStatement> E rewrite(E statement, SymbolEnv env) { if (statement == null) { return null; } BLangStatementLink link = new BLangStatementLink(); link.parent = currentLink; currentLink = link; BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env); link.statement = stmt; stmt.statementLink = link; currentLink = link.parent; return (E) stmt; } private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewrite(nodeList.get(i), env)); } return nodeList; } private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewrite(nodeList.get(i), env)); } return nodeList; } private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewriteExpr(nodeList.get(i))); } return nodeList; } private BLangLiteral createStringLiteral(Location pos, String value) { BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType); stringLit.pos = pos; return stringLit; } private BLangLiteral createIntLiteral(long value) { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = value; literal.setBType(symTable.intType); return literal; } private BLangLiteral createByteLiteral(Location pos, Byte value) { BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType); byteLiteral.pos = pos; return byteLiteral; } private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) { if (types.isSameType(expr.getBType(), targetType)) { return expr; } BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.pos = expr.pos; conversionExpr.expr = expr; conversionExpr.setBType(targetType); conversionExpr.targetType = targetType; conversionExpr.internal = true; return conversionExpr; } private BType getElementType(BType type) { if (type.tag != TypeTags.ARRAY) { return type; } return getElementType(((BArrayType) type).getElementType()); } private void addReturnIfNotPresent(BLangInvokableNode invokableNode) { if (Symbols.isNative(invokableNode.symbol) || (invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) { return; } BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body; if (invokableNode.workers.size() == 0 && invokableNode.symbol.type.getReturnType().isNullable() && (funcBody.stmts.size() < 1 || funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) { BLangReturn returnStmt; if (invokableNode.name.value.contains(GENERATED_INIT_SUFFIX.value)) { returnStmt = ASTBuilderUtil.createNilReturnStmt(null, symTable.nilType); } else { Location invPos = invokableNode.pos; Location returnStmtPos = new BLangDiagnosticLocation(invPos.lineRange().filePath(), invPos.lineRange().endLine().line(), invPos.lineRange().endLine().line(), invPos.lineRange().startLine().offset(), invPos.lineRange().startLine().offset(), 0, 0); returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType); } funcBody.addStatement(returnStmt); } } /** * Reorder the invocation arguments to match the original function signature. * * @param iExpr Function invocation expressions to reorder the arguments */ private void reorderArguments(BLangInvocation iExpr) { BSymbol symbol = iExpr.symbol; if (symbol == null || symbol.type.tag != TypeTags.INVOKABLE) { return; } BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol; List<BLangExpression> restArgs = iExpr.restArgs; int originalRequiredArgCount = iExpr.requiredArgs.size(); BLangSimpleVarRef varargRef = null; BLangBlockStmt blockStmt = null; BType varargVarType = null; int restArgCount = restArgs.size(); if (restArgCount > 0 && restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR && originalRequiredArgCount < invokableSymbol.params.size()) { BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr; Location varargExpPos = expr.pos; varargVarType = expr.getBType(); String varargVarName = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++; BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID, varargVarType, this.env.scope.owner, varargExpPos, VIRTUAL); varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol); BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos); varDef.var = var; varDef.setBType(varargVarType); blockStmt = createBlockStmt(varargExpPos); blockStmt.stmts.add(varDef); } if (!invokableSymbol.params.isEmpty()) { reorderNamedArgs(iExpr, invokableSymbol, varargRef); } if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) { if (invokableSymbol.restParam == null) { return; } BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); List<BLangExpression> exprs = new ArrayList<>(); BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type; BType elemType = arrayType.eType; for (BLangExpression restArg : restArgs) { exprs.add(addConversionExprIfRequired(restArg, elemType)); } arrayLiteral.exprs = exprs; arrayLiteral.setBType(arrayType); if (restArgCount != 0) { iExpr.restArgs = new ArrayList<>(); } iExpr.restArgs.add(arrayLiteral); return; } if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) { if (iExpr.requiredArgs.size() == originalRequiredArgCount) { return; } BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0); BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg); stmtExpression.setBType(firstNonRestArg.getBType()); iExpr.requiredArgs.add(0, stmtExpression); if (invokableSymbol.restParam == null) { restArgs.remove(0); return; } BLangRestArgsExpression restArgsExpression = (BLangRestArgsExpression) restArgs.remove(0); BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type; if (restArgsExpression.getBType().tag == TypeTags.RECORD) { BLangExpression expr = ASTBuilderUtil.createEmptyArrayLiteral(invokableSymbol.pos, restParamType); restArgs.add(expr); return; } Location pos = restArgsExpression.pos; BLangArrayLiteral newArrayLiteral = createArrayLiteralExprNode(); newArrayLiteral.setBType(restParamType); String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++; BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType, this.env.scope.owner, pos, VIRTUAL); BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); BLangSimpleVariable var = createVariable(pos, name, restParamType, newArrayLiteral, varSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos); varDef.var = var; varDef.setBType(restParamType); BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount); BLangInvocation lengthInvocation = createLengthInvocation(pos, varargRef); BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndex, getModifiedIntRangeEndExpr(lengthInvocation)); BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode(); foreach.pos = pos; foreach.collection = intRangeInvocation; types.setForeachTypedBindingPatternType(foreach); final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType); foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name), this.env.scope.owner.pkgID, foreachVariable.getBType(), this.env.scope.owner, pos, VIRTUAL); BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol); foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable); foreach.isDeclaredWithVar = true; BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos); BLangIndexBasedAccess valueExpr = ASTBuilderUtil.createIndexAccessExpr(varargRef, foreachVarRef); if (varargVarType.tag == TypeTags.ARRAY) { BArrayType arrayType = (BArrayType) varargVarType; if (arrayType.state == BArrayState.CLOSED && arrayType.size == (iExpr.requiredArgs.size() - originalRequiredArgCount)) { valueExpr.setBType(restParamType.eType); } else { valueExpr.setBType(arrayType.eType); } } else { valueExpr.setBType(symTable.anyOrErrorType); } BLangExpression pushExpr = addConversionExprIfRequired(valueExpr, restParamType.eType); BLangExpressionStmt expressionStmt = createExpressionStmt(pos, foreachBody); BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef, List.of(pushExpr), restParamType, pos); pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1)); expressionStmt.expr = pushInvocation; foreach.body = foreachBody; BLangBlockStmt newArrayBlockStmt = createBlockStmt(pos); newArrayBlockStmt.addStatement(varDef); newArrayBlockStmt.addStatement(foreach); BLangStatementExpression newArrayStmtExpression = createStatementExpression(newArrayBlockStmt, arrayVarRef); newArrayStmtExpression.setBType(restParamType); restArgs.add(addConversionExprIfRequired(newArrayStmtExpression, restParamType)); return; } BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type; BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); arrayLiteral.setBType(restParamType); BType elemType = restParamType.eType; Location pos = restArgs.get(0).pos; List<BLangExpression> exprs = new ArrayList<>(); for (int i = 0; i < restArgCount - 1; i++) { exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType)); } arrayLiteral.exprs = exprs; BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode(); pushRestArgsExpr.pos = pos; pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1); String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++; BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType, this.env.scope.owner, pos, VIRTUAL); BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos); varDef.var = var; varDef.setBType(restParamType); BLangBlockStmt pushBlockStmt = createBlockStmt(pos); pushBlockStmt.stmts.add(varDef); BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt); BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef, new ArrayList<BLangExpression>() {{ add(pushRestArgsExpr); }}, restParamType, pos); pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1)); expressionStmt.expr = pushInvocation; BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef); stmtExpression.setBType(restParamType); iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }}; } private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) { List<BLangExpression> args = new ArrayList<>(); Map<String, BLangExpression> namedArgs = new LinkedHashMap<>(); iExpr.requiredArgs.stream() .filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR) .forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr)); List<BVarSymbol> params = invokableSymbol.params; List<BLangRecordLiteral> incRecordLiterals = new ArrayList<>(); BLangRecordLiteral incRecordParamAllowAdditionalFields = null; int varargIndex = 0; BType varargType = null; boolean tupleTypedVararg = false; if (varargRef != null) { varargType = varargRef.getBType(); tupleTypedVararg = varargType.tag == TypeTags.TUPLE; } for (int i = 0; i < params.size(); i++) { BVarSymbol param = params.get(i); if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) { args.add(iExpr.requiredArgs.get(i)); } else if (namedArgs.containsKey(param.name.value)) { args.add(namedArgs.remove(param.name.value)); } else if (param.getFlags().contains(Flag.INCLUDED)) { BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode(); BType paramType = param.type; recordLiteral.setBType(paramType); args.add(recordLiteral); incRecordLiterals.add(recordLiteral); if (((BRecordType) paramType).restFieldType != symTable.noType) { incRecordParamAllowAdditionalFields = recordLiteral; } } else if (varargRef == null) { BLangExpression expr = new BLangIgnoreExpr(); expr.setBType(param.type); args.add(expr); } else { if (varargRef.getBType().tag == TypeTags.RECORD) { if (param.isDefaultable) { BLangInvocation hasKeyInvocation = createLangLibInvocationNode(HAS_KEY, varargRef, List.of(createStringLiteral(param.pos, param.name.value)), null, varargRef.pos); BLangExpression indexExpr = rewriteExpr(createStringLiteral(param.pos, param.name.value)); BLangIndexBasedAccess memberAccessExpr = ASTBuilderUtil.createMemberAccessExprNode(param.type, varargRef, indexExpr); BLangExpression ignoreExpr = ASTBuilderUtil.createIgnoreExprNode(param.type); BLangTernaryExpr ternaryExpr = ASTBuilderUtil.createTernaryExprNode(param.type, hasKeyInvocation, memberAccessExpr, ignoreExpr); args.add(ASTBuilderUtil.createDynamicParamExpression(hasKeyInvocation, ternaryExpr)); } else { BLangFieldBasedAccess fieldBasedAccessExpression = ASTBuilderUtil.createFieldAccessExpr(varargRef, ASTBuilderUtil.createIdentifier(param.pos, param.name.value)); fieldBasedAccessExpression.setBType(param.type); args.add(fieldBasedAccessExpression); } } else { BLangExpression indexExpr = rewriteExpr(createIntLiteral(varargIndex)); BType memberAccessExprType = tupleTypedVararg ? ((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType; args.add(addConversionExprIfRequired(ASTBuilderUtil.createMemberAccessExprNode(memberAccessExprType, varargRef, indexExpr), param.type)); varargIndex++; } } } if (namedArgs.size() > 0) { setFieldsForIncRecordLiterals(namedArgs, incRecordLiterals, incRecordParamAllowAdditionalFields); } iExpr.requiredArgs = args; } private void setFieldsForIncRecordLiterals(Map<String, BLangExpression> namedArgs, List<BLangRecordLiteral> incRecordLiterals, BLangRecordLiteral incRecordParamAllowAdditionalFields) { for (String name : namedArgs.keySet()) { boolean isAdditionalField = true; BLangNamedArgsExpression expr = (BLangNamedArgsExpression) namedArgs.get(name); for (BLangRecordLiteral recordLiteral : incRecordLiterals) { LinkedHashMap<String, BField> fields = ((BRecordType) recordLiteral.getBType()).fields; if (fields.containsKey(name) && fields.get(name).type.tag != TypeTags.NEVER) { isAdditionalField = false; createAndAddRecordFieldForIncRecordLiteral(recordLiteral, expr); break; } } if (isAdditionalField) { createAndAddRecordFieldForIncRecordLiteral(incRecordParamAllowAdditionalFields, expr); } } } private void createAndAddRecordFieldForIncRecordLiteral(BLangRecordLiteral recordLiteral, BLangNamedArgsExpression expr) { BLangSimpleVarRef varRef = new BLangSimpleVarRef(); varRef.variableName = expr.name; BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField = ASTBuilderUtil. createBLangRecordKeyValue(varRef, expr.expr); recordLiteral.fields.add(recordKeyValueField); } private BLangMatchTypedBindingPatternClause getSafeAssignErrorPattern(Location location, BSymbol invokableSymbol, List<BType> equivalentErrorTypes, boolean isCheckPanicExpr) { BType enclosingFuncReturnType = ((BInvokableType) invokableSymbol.type).retType; Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ? ((BUnionType) enclosingFuncReturnType).getMemberTypes() : new LinkedHashSet<BType>() {{ add(enclosingFuncReturnType); }}; boolean returnOnError = equivalentErrorTypes.stream() .allMatch(errorType -> returnTypeSet.stream() .anyMatch(retType -> types.isAssignable(errorType, retType))); String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure"; BLangSimpleVariable patternFailureCaseVar = ASTBuilderUtil.createVariable(location, patternFailureCaseVarName, symTable.errorType, null, new BVarSymbol(0, names.fromString(patternFailureCaseVarName), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner, location, VIRTUAL)); BLangVariableReference patternFailureCaseVarRef = ASTBuilderUtil.createVariableRef(location, patternFailureCaseVar.symbol); BLangBlockStmt patternBlockFailureCase = (BLangBlockStmt) TreeBuilder.createBlockNode(); patternBlockFailureCase.pos = location; if (!isCheckPanicExpr && (returnOnError || this.onFailClause != null)) { BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode(); failStmt.pos = location; failStmt.expr = patternFailureCaseVarRef; patternBlockFailureCase.stmts.add(failStmt); if (returnOnError && this.shouldReturnErrors) { BLangReturn errorReturn = ASTBuilderUtil.createReturnStmt(location, rewrite(patternFailureCaseVarRef, env)); errorReturn.desugared = true; failStmt.exprStmt = errorReturn; } } else { BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.pos = location; panicNode.expr = patternFailureCaseVarRef; patternBlockFailureCase.stmts.add(panicNode); } return ASTBuilderUtil.createMatchStatementPattern(location, patternFailureCaseVar, patternBlockFailureCase); } private BLangMatchTypedBindingPatternClause getSafeAssignSuccessPattern(Location location, BType lhsType, boolean isVarDef, BVarSymbol varSymbol, BLangExpression lhsExpr) { String patternSuccessCaseVarName = GEN_VAR_PREFIX.value + "t_match"; BLangSimpleVariable patternSuccessCaseVar = ASTBuilderUtil.createVariable(location, patternSuccessCaseVarName, lhsType, null, new BVarSymbol(0, names.fromString(patternSuccessCaseVarName), this.env.scope.owner.pkgID, lhsType, this.env.scope.owner, location, VIRTUAL)); BLangExpression varRefExpr; if (isVarDef) { varRefExpr = ASTBuilderUtil.createVariableRef(location, varSymbol); } else { varRefExpr = lhsExpr; } BLangVariableReference patternSuccessCaseVarRef = ASTBuilderUtil.createVariableRef(location, patternSuccessCaseVar.symbol); BLangAssignment assignmentStmtSuccessCase = ASTBuilderUtil.createAssignmentStmt(location, varRefExpr, patternSuccessCaseVarRef, false); BLangBlockStmt patternBlockSuccessCase = ASTBuilderUtil.createBlockStmt(location, new ArrayList<BLangStatement>() {{ add(assignmentStmtSuccessCase); }}); return ASTBuilderUtil.createMatchStatementPattern(location, patternSuccessCaseVar, patternBlockSuccessCase); } private BLangStatement generateIfElseStmt(BLangMatch matchStmt, BLangSimpleVariable matchExprVar) { List<BLangMatchBindingPatternClause> patterns = matchStmt.patternClauses; BLangIf parentIfNode = generateIfElseStmt(patterns.get(0), matchExprVar); BLangIf currentIfNode = parentIfNode; for (int i = 1; i < patterns.size(); i++) { BLangMatchBindingPatternClause patternClause = patterns.get(i); if (i == patterns.size() - 1 && patternClause.isLastPattern) { currentIfNode.elseStmt = getMatchPatternElseBody(patternClause, matchExprVar); } else { currentIfNode.elseStmt = generateIfElseStmt(patternClause, matchExprVar); currentIfNode = (BLangIf) currentIfNode.elseStmt; } } return parentIfNode; } /** * Generate an if-else statement from the given match statement. * * @param pattern match pattern statement node * @param matchExprVar variable node of the match expression * @return if else statement node */ private BLangIf generateIfElseStmt(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangExpression ifCondition = createPatternIfCondition(pattern, matchExprVar.symbol); if (NodeKind.MATCH_TYPED_PATTERN_CLAUSE == pattern.getKind()) { BLangBlockStmt patternBody = getMatchPatternBody(pattern, matchExprVar); return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, patternBody, null); } BType expectedType = matchExprVar.getBType(); if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) { BLangMatchStructuredBindingPatternClause matchPattern = (BLangMatchStructuredBindingPatternClause) pattern; expectedType = getStructuredBindingPatternType(matchPattern.bindingPatternVariable); } if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) { BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) pattern; BLangSimpleVariableDef varDef = forceCastIfApplicable(matchExprVar.symbol, pattern.pos, expectedType); BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, varDef.var.symbol); structuredPattern.bindingPatternVariable.expr = matchExprVarRef; BLangStatement varDefStmt; if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos, (BLangTupleVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos, (BLangRecordVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos, (BLangErrorVariable) structuredPattern.bindingPatternVariable); } else { varDefStmt = ASTBuilderUtil .createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable); } if (structuredPattern.typeGuardExpr != null) { BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(structuredPattern.pos); blockStmt.addStatement(varDef); blockStmt.addStatement(varDefStmt); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, structuredPattern.typeGuardExpr); stmtExpr.setBType(symTable.booleanType); ifCondition = ASTBuilderUtil .createBinaryExpr(pattern.pos, ifCondition, stmtExpr, symTable.booleanType, OperatorKind.AND, (BOperatorSymbol) symResolver .resolveBinaryOperator(OperatorKind.AND, symTable.booleanType, symTable.booleanType)); } else { structuredPattern.body.stmts.add(0, varDef); structuredPattern.body.stmts.add(1, varDefStmt); } } return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, pattern.body, null); } private BLangBlockStmt getMatchPatternBody(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangBlockStmt body; BLangMatchTypedBindingPatternClause patternClause = (BLangMatchTypedBindingPatternClause) pattern; if (patternClause.variable.name.value.equals(Names.IGNORE.value)) { return patternClause.body; } BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(patternClause.pos, matchExprVar.symbol); BLangExpression patternVarExpr = addConversionExprIfRequired(matchExprVarRef, patternClause.variable.getBType()); BLangSimpleVariable patternVar = ASTBuilderUtil.createVariable(patternClause.pos, "", patternClause.variable.getBType(), patternVarExpr, patternClause.variable.symbol); BLangSimpleVariableDef patternVarDef = ASTBuilderUtil.createVariableDef(patternVar.pos, patternVar); patternClause.body.stmts.add(0, patternVarDef); body = patternClause.body; return body; } private BLangBlockStmt getMatchPatternElseBody(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangBlockStmt body = pattern.body; if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) { BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, matchExprVar.symbol); BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) pattern; structuredPattern.bindingPatternVariable.expr = matchExprVarRef; BLangStatement varDefStmt; if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos, (BLangTupleVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos, (BLangRecordVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos, (BLangErrorVariable) structuredPattern.bindingPatternVariable); } else { varDefStmt = ASTBuilderUtil .createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable); } structuredPattern.body.stmts.add(0, varDefStmt); body = structuredPattern.body; } return body; } BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) { if (lhsType.tag == TypeTags.NONE) { return expr; } BType rhsType = expr.getBType(); if (types.isSameType(rhsType, lhsType)) { return expr; } types.setImplicitCastExpr(expr, rhsType, lhsType); if (expr.impConversionExpr != null) { BLangExpression impConversionExpr = expr.impConversionExpr; expr.impConversionExpr = null; return impConversionExpr; } if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) { return expr; } if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) { return expr; } if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) { return expr; } BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.expr = expr; conversionExpr.targetType = lhsType; conversionExpr.setBType(lhsType); conversionExpr.pos = expr.pos; conversionExpr.checkTypes = false; conversionExpr.internal = true; return conversionExpr; } private BLangExpression createPatternIfCondition(BLangMatchBindingPatternClause patternClause, BVarSymbol varSymbol) { BType patternType; switch (patternClause.getKind()) { case MATCH_STATIC_PATTERN_CLAUSE: BLangMatchStaticBindingPatternClause staticPattern = (BLangMatchStaticBindingPatternClause) patternClause; patternType = staticPattern.literal.getBType(); break; case MATCH_STRUCTURED_PATTERN_CLAUSE: BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) patternClause; patternType = getStructuredBindingPatternType(structuredPattern.bindingPatternVariable); break; default: BLangMatchTypedBindingPatternClause simplePattern = (BLangMatchTypedBindingPatternClause) patternClause; patternType = simplePattern.variable.getBType(); break; } BLangExpression binaryExpr; BType[] memberTypes; if (patternType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) patternType; memberTypes = unionType.getMemberTypes().toArray(new BType[0]); } else { memberTypes = new BType[1]; memberTypes[0] = patternType; } if (memberTypes.length == 1) { binaryExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]); } else { BLangExpression lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]); BLangExpression rhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[1]); binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR, lhsExpr.getBType(), rhsExpr.getBType())); for (int i = 2; i < memberTypes.length; i++) { lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[i]); rhsExpr = binaryExpr; binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR, lhsExpr.getBType(), rhsExpr.getBType())); } } return binaryExpr; } private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) { if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) { BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable; List<BType> memberTypes = new ArrayList<>(); for (int i = 0; i < tupleVariable.memberVariables.size(); i++) { memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i))); } BTupleType tupleType = new BTupleType(memberTypes); if (tupleVariable.restVariable != null) { BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable); tupleType.restType = restArrayType.eType; } return tupleType; } if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) { BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable; BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + UNDERSCORE + recordCount++), env.enclPkg.symbol.pkgID, null, env.scope.owner, recordVariable.pos, VIRTUAL); recordSymbol.initializerFunc = createRecordInitFunc(); recordSymbol.scope = new Scope(recordSymbol); recordSymbol.scope.define( names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value), recordSymbol.initializerFunc.symbol); LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); List<BLangSimpleVariable> typeDefFields = new ArrayList<>(); for (int i = 0; i < recordVariable.variableList.size(); i++) { String fieldNameStr = recordVariable.variableList.get(i).key.value; Name fieldName = names.fromString(fieldNameStr); BType fieldType = getStructuredBindingPatternType( recordVariable.variableList.get(i).valueBindingPattern); BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType, recordSymbol, bindingPatternVariable.pos, VIRTUAL); fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol)); typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordVarType = new BRecordType(recordSymbol); recordVarType.fields = fields; recordVarType.restFieldType = recordVariable.restParam != null ? ((BRecordType) ((BLangSimpleVariable) recordVariable.restParam).getBType()).restFieldType : symTable.anydataType; recordSymbol.type = recordVarType; recordVarType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields, recordVarType, bindingPatternVariable.pos); recordTypeNode.initFunction = rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable), env); TypeDefBuilderHelper.addTypeDefinition(recordVarType, recordSymbol, recordTypeNode, env); return recordVarType; } if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) { BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable; BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol( SymTag.ERROR, Flags.PUBLIC, names.fromString("$anonErrorType$" + UNDERSCORE + errorCount++), env.enclPkg.symbol.pkgID, null, null, errorVariable.pos, VIRTUAL); BType detailType; if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) { detailType = symTable.detailType; } else { detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++, errorVariable.pos); BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType); recordTypeNode.initFunction = TypeDefBuilderHelper .createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(detailType, detailType.tsymbol, recordTypeNode, env); } BErrorType errorType = new BErrorType(errorTypeSymbol, detailType); errorTypeSymbol.type = errorType; TypeDefBuilderHelper.addTypeDefinition(errorType, errorTypeSymbol, createErrorTypeNode(errorType), env); return errorType; } return bindingPatternVariable.getBType(); } private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) { List<BLangSimpleVariable> fieldList = new ArrayList<>(); for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) { BVarSymbol symbol = field.valueBindingPattern.symbol; if (symbol == null) { symbol = new BVarSymbol(Flags.PUBLIC, names.fromString(field.key.value + "$"), this.env.enclPkg.packageID, symTable.pureType, null, field.valueBindingPattern.pos, VIRTUAL); } BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable( field.valueBindingPattern.pos, symbol.name.value, field.valueBindingPattern.getBType(), field.valueBindingPattern.expr, symbol); fieldList.add(fieldVar); } return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos); } private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail, BLangSimpleVariable restDetail, int errorNo, Location pos) { BRecordType detailRecordType = createAnonRecordType(pos); if (restDetail == null) { detailRecordType.sealed = true; } for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) { Name fieldName = names.fromIdNode(detailEntry.key); BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern); BVarSymbol fieldSym = new BVarSymbol(Flags.PUBLIC, fieldName, detailRecordType.tsymbol.pkgID, fieldType, detailRecordType.tsymbol, detailEntry.key.pos, VIRTUAL); detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym)); detailRecordType.tsymbol.scope.define(fieldName, fieldSym); } return detailRecordType; } private BRecordType createAnonRecordType(Location pos) { BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol( SymTag.RECORD, Flags.PUBLIC, names.fromString(anonModelHelper.getNextRecordVarKey(env.enclPkg.packageID)), env.enclPkg.symbol.pkgID, null, null, pos, VIRTUAL); detailRecordTypeSymbol.initializerFunc = createRecordInitFunc(); detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol); detailRecordTypeSymbol.scope.define( names.fromString(detailRecordTypeSymbol.name.value + "." + detailRecordTypeSymbol.initializerFunc.funcName.value), detailRecordTypeSymbol.initializerFunc.symbol); BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol); detailRecordType.restFieldType = symTable.anydataType; return detailRecordType; } private BAttachedFunction createRecordInitFunc() { BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null); BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol( Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false, symTable.builtinPos, VIRTUAL); initFuncSymbol.retType = symTable.nilType; return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, symTable.builtinPos); } BLangErrorType createErrorTypeNode(BErrorType errorType) { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.setBType(errorType); return errorTypeNode; } private BLangExpression createPatternMatchBinaryExpr(BLangMatchBindingPatternClause patternClause, BVarSymbol varSymbol, BType patternType) { Location pos = patternClause.pos; BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); if (NodeKind.MATCH_STATIC_PATTERN_CLAUSE == patternClause.getKind()) { BLangMatchStaticBindingPatternClause pattern = (BLangMatchStaticBindingPatternClause) patternClause; return createBinaryExpression(pos, varRef, pattern.literal); } if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == patternClause.getKind()) { return createIsLikeExpression(pos, ASTBuilderUtil.createVariableRef(pos, varSymbol), patternType); } if (patternType == symTable.nilType) { BLangLiteral bLangLiteral = ASTBuilderUtil.createLiteral(pos, symTable.nilType, null); return ASTBuilderUtil.createBinaryExpr(pos, varRef, bLangLiteral, symTable.booleanType, OperatorKind.EQUAL, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.EQUAL, symTable.anyType, symTable.nilType)); } else { return createIsAssignableExpression(pos, varSymbol, patternType); } } private BLangExpression createBinaryExpression(Location pos, BLangSimpleVarRef varRef, BLangExpression expression) { BLangBinaryExpr binaryExpr; if (NodeKind.GROUP_EXPR == expression.getKind()) { return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression); } if (NodeKind.BINARY_EXPR == expression.getKind()) { binaryExpr = (BLangBinaryExpr) expression; BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr); BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr); binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver .resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType)); } else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) { BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode(); anyType.setBType(symTable.anyType); anyType.typeKind = TypeKind.ANY; return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType); } else { binaryExpr = ASTBuilderUtil .createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null); BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.getBType(), expression.getBType()); if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver .getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.getBType(), binaryExpr, env); } binaryExpr.opSymbol = (BOperatorSymbol) opSymbol; } return binaryExpr; } private BLangIsAssignableExpr createIsAssignableExpression(Location pos, BVarSymbol varSymbol, BType patternType) { BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); return ASTBuilderUtil.createIsAssignableExpr(pos, varRef, patternType, symTable.booleanType, names, symTable.builtinPos); } private BLangIsLikeExpr createIsLikeExpression(Location pos, BLangExpression expr, BType type) { return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType); } private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) { BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); varRef.pos = variable.pos; varRef.variableName = variable.name; varRef.symbol = variable.symbol; varRef.setBType(variable.getBType()); BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode(); assignmentStmt.expr = variable.expr; assignmentStmt.pos = variable.pos; assignmentStmt.setVariable(varRef); return assignmentStmt; } private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable, BVarSymbol selfSymbol) { return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.getBType(), selfSymbol, variable.name); } private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr, BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol, BLangIdentifier fieldName) { BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol); BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName); fieldAccess.symbol = fieldSymbol; fieldAccess.setBType(fieldType); fieldAccess.isStoreOnCreation = true; BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode(); assignmentStmt.expr = expr; assignmentStmt.pos = function.pos; assignmentStmt.setVariable(fieldAccess); SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env); return rewrite(assignmentStmt, initFuncEnv); } private void addMatchExprDefaultCase(BLangMatchExpression bLangMatchExpression) { List<BType> exprTypes; List<BType> unmatchedTypes = new ArrayList<>(); if (bLangMatchExpression.expr.getBType().tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) bLangMatchExpression.expr.getBType(); exprTypes = new ArrayList<>(unionType.getMemberTypes()); } else { exprTypes = Lists.of(bLangMatchExpression.getBType()); } for (BType type : exprTypes) { boolean assignable = false; for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) { if (this.types.isAssignable(type, pattern.variable.getBType())) { assignable = true; break; } } if (!assignable) { unmatchedTypes.add(type); } } if (unmatchedTypes.isEmpty()) { return; } BType defaultPatternType; if (unmatchedTypes.size() == 1) { defaultPatternType = unmatchedTypes.get(0); } else { defaultPatternType = BUnionType.create(null, new LinkedHashSet<>(unmatchedTypes)); } String patternCaseVarName = GEN_VAR_PREFIX.value + "t_match_default"; BLangSimpleVariable patternMatchCaseVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos, patternCaseVarName, defaultPatternType, null, new BVarSymbol(0, names.fromString(patternCaseVarName), this.env.scope.owner.pkgID, defaultPatternType, this.env.scope.owner, bLangMatchExpression.pos, VIRTUAL)); BLangMatchExprPatternClause defaultPattern = (BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern(); defaultPattern.variable = patternMatchCaseVar; defaultPattern.expr = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, patternMatchCaseVar.symbol); defaultPattern.pos = bLangMatchExpression.pos; bLangMatchExpression.patternClauses.add(defaultPattern); } private boolean safeNavigate(BLangAccessExpression accessExpr) { if (accessExpr.isLValue || accessExpr.expr == null) { return false; } if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) { return true; } NodeKind kind = accessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) { return safeNavigate((BLangAccessExpression) accessExpr.expr); } return false; } private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) { BType originalExprType = accessExpr.getBType(); String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result"; BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.getBType(), null, new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID, accessExpr.getBType(), this.env.scope.owner, accessExpr.pos, VIRTUAL)); BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol); handleSafeNavigation(accessExpr, accessExpr.getBType(), tempResultVar); BLangMatch matcEXpr = this.matchStmtStack.firstElement(); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matcEXpr)); BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef); stmtExpression.setBType(originalExprType); this.matchStmtStack = new Stack<>(); this.accessExprStack = new Stack<>(); this.successPattern = null; this.safeNavigationAssignment = null; return stmtExpression; } private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) { if (accessExpr.expr == null) { return; } NodeKind kind = accessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) { handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar); } if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) { BType originalType = accessExpr.originalType; if (TypeTags.isXMLTypeTag(originalType.tag)) { accessExpr.setBType(BUnionType.create(null, originalType, symTable.errorType)); } else { accessExpr.setBType(originalType); } if (this.safeNavigationAssignment != null) { this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.getBType()); } return; } /* * If the field access is a safe navigation, create a match expression. * Then chain the current expression as the success-pattern of the parent * match expr, if available. * eg: * x but { <--- parent match expr * error e => e, * T t => t.y but { <--- current expr * error e => e, * R r => r.z * } * } */ BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(accessExpr.pos, accessExpr.expr, new ArrayList<>()); boolean isAllTypesRecords = false; LinkedHashSet<BType> memTypes = new LinkedHashSet<>(); if (accessExpr.expr.getBType().tag == TypeTags.UNION) { memTypes = new LinkedHashSet<>(((BUnionType) accessExpr.expr.getBType()).getMemberTypes()); isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes); } if (accessExpr.nilSafeNavigation) { matchStmt.patternClauses.add(getMatchNullPattern(accessExpr, tempResultVar)); matchStmt.setBType(type); memTypes.remove(symTable.nilType); } if (accessExpr.errorSafeNavigation) { matchStmt.patternClauses.add(getMatchErrorPattern(accessExpr, tempResultVar)); matchStmt.setBType(type); matchStmt.pos = accessExpr.pos; memTypes.remove(symTable.errorType); } BLangMatchTypedBindingPatternClause successPattern = null; Name field = getFieldName(accessExpr); if (field == Names.EMPTY) { successPattern = getSuccessPattern(accessExpr.expr.getBType(), accessExpr, tempResultVar, accessExpr.errorSafeNavigation); matchStmt.patternClauses.add(successPattern); pushToMatchStatementStack(matchStmt, accessExpr, successPattern); return; } if (isAllTypesRecords) { for (BType memberType : memTypes) { BRecordType recordType = (BRecordType) memberType; if (recordType.fields.containsKey(field.value) || !recordType.sealed) { successPattern = getSuccessPattern(memberType, accessExpr, tempResultVar, accessExpr.errorSafeNavigation); matchStmt.patternClauses.add(successPattern); } } matchStmt.patternClauses.add(getMatchAllAndNilReturnPattern(accessExpr, tempResultVar)); pushToMatchStatementStack(matchStmt, accessExpr, successPattern); return; } successPattern = getSuccessPattern(accessExpr.expr.getBType(), accessExpr, tempResultVar, accessExpr.errorSafeNavigation); matchStmt.patternClauses.add(successPattern); pushToMatchStatementStack(matchStmt, accessExpr, successPattern); } private void pushToMatchStatementStack(BLangMatch matchStmt, BLangAccessExpression accessExpr, BLangMatchTypedBindingPatternClause successPattern) { this.matchStmtStack.push(matchStmt); if (this.successPattern != null) { this.successPattern.body = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(matchStmt)); } this.successPattern = successPattern; } private Name getFieldName(BLangAccessExpression accessExpr) { Name field = Names.EMPTY; if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { field = new Name(((BLangFieldBasedAccess) accessExpr).field.value); } else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr; if (indexBasedExpression.getKind() == NodeKind.LITERAL) { field = new Name(((BLangLiteral) indexBasedExpression).value.toString()); } } return field; } private boolean isAllTypesAreRecordsInUnion(LinkedHashSet<BType> memTypes) { for (BType memType : memTypes) { int typeTag = memType.tag; if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) { return false; } } return true; } private BLangMatchTypedBindingPatternClause getMatchErrorPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error"; BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(expr.pos, errorPatternVarName, symTable.errorType, null, new BVarSymbol(0, names.fromString(errorPatternVarName), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner, expr.pos, VIRTUAL)); BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, errorPatternVar.symbol); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause errorPattern = ASTBuilderUtil .createMatchStatementPattern(expr.pos, errorPatternVar, patternBody); return errorPattern; } private BLangMatchExprPatternClause getMatchNullPatternGivenExpression(Location pos, BLangExpression expr) { String nullPatternVarName = IGNORE.toString(); BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(pos, nullPatternVarName, symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName), this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner, pos, VIRTUAL)); BLangMatchExprPatternClause nullPattern = (BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern(); nullPattern.variable = errorPatternVar; nullPattern.expr = expr; nullPattern.pos = pos; return nullPattern; } private BLangMatchTypedBindingPatternClause getMatchNullPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null"; BLangSimpleVariable nullPatternVar = ASTBuilderUtil.createVariable(expr.pos, nullPatternVarName, symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName), this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner, expr.pos, VIRTUAL)); BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, nullPatternVar.symbol); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause nullPattern = ASTBuilderUtil .createMatchStatementPattern(expr.pos, nullPatternVar, patternBody); return nullPattern; } private BLangMatchStaticBindingPatternClause getMatchAllAndNilReturnPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, createLiteral(expr.pos, symTable.nilType, Names.NIL_VALUE), false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchStaticBindingPatternClause matchAllPattern = (BLangMatchStaticBindingPatternClause) TreeBuilder.createMatchStatementStaticBindingPattern(); String matchAllVarName = "_"; matchAllPattern.literal = ASTBuilderUtil.createVariableRef(expr.pos, new BVarSymbol(0, names.fromString(matchAllVarName), this.env.scope.owner.pkgID, symTable.anyType, this.env.scope.owner, expr.pos, VIRTUAL)); matchAllPattern.body = patternBody; return matchAllPattern; } private BLangMatchTypedBindingPatternClause getSuccessPattern(BType type, BLangAccessExpression accessExpr, BLangSimpleVariable tempResultVar, boolean liftError) { type = types.getSafeType(type, true, liftError); String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success"; BVarSymbol successPatternSymbol; if (type.tag == TypeTags.INVOKABLE) { successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, names.fromString(successPatternVarName), this.env.scope.owner.pkgID, type, this.env.scope.owner, accessExpr.pos, VIRTUAL); } else { successPatternSymbol = new BVarSymbol(0, names.fromString(successPatternVarName), this.env.scope.owner.pkgID, type, this.env.scope.owner, accessExpr.pos, VIRTUAL); } BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName, type, null, successPatternSymbol); BLangAccessExpression tempAccessExpr = nodeCloner.cloneNode(accessExpr); if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { ((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr; } if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) { ((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol = ((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol; } tempAccessExpr.expr = ASTBuilderUtil.createVariableRef(accessExpr.pos, successPatternVar.symbol); tempAccessExpr.errorSafeNavigation = false; tempAccessExpr.nilSafeNavigation = false; accessExpr.cloneRef = null; if (TypeTags.isXMLTypeTag(tempAccessExpr.expr.getBType().tag)) { tempAccessExpr.setBType(BUnionType.create(null, accessExpr.originalType, symTable.errorType, symTable.nilType)); } else { tempAccessExpr.setBType(accessExpr.originalType); } tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess; BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol); BLangExpression assignmentRhsExpr = addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.getBType()); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause successPattern = ASTBuilderUtil.createMatchStatementPattern(accessExpr.pos, successPatternVar, patternBody); this.safeNavigationAssignment = assignmentStmt; return successPattern; } private boolean safeNavigateLHS(BLangExpression expr) { if (expr.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR && expr.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) { return false; } BLangExpression varRef = ((BLangAccessExpression) expr).expr; if (varRef.getBType().isNullable()) { return true; } return safeNavigateLHS(varRef); } private BLangStatement rewriteSafeNavigationAssignment(BLangAccessExpression accessExpr, BLangExpression rhsExpr, boolean safeAssignment) { this.accessExprStack = new Stack<>(); List<BLangStatement> stmts = new ArrayList<>(); createLHSSafeNavigation(stmts, accessExpr.expr); BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, cloneExpression(accessExpr), rhsExpr); stmts.add(assignment); return ASTBuilderUtil.createBlockStmt(accessExpr.pos, stmts); } private void createLHSSafeNavigation(List<BLangStatement> stmts, BLangExpression expr) { NodeKind kind = expr.getKind(); boolean root = false; if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR || kind == NodeKind.INVOCATION) { BLangAccessExpression accessExpr = (BLangAccessExpression) expr; createLHSSafeNavigation(stmts, accessExpr.expr); accessExpr.expr = accessExprStack.pop(); } else { root = true; } if (expr.getKind() == NodeKind.INVOCATION) { BLangInvocation invocation = (BLangInvocation) expr; BVarSymbol interMediateSymbol = new BVarSymbol(0, names.fromString(GEN_VAR_PREFIX.value + "i_intermediate"), this.env.scope.owner.pkgID, invocation.getBType(), this.env.scope.owner, expr.pos, VIRTUAL); BLangSimpleVariable intermediateVariable = ASTBuilderUtil.createVariable(expr.pos, interMediateSymbol.name.value, invocation.getBType(), invocation, interMediateSymbol); BLangSimpleVariableDef intermediateVariableDefinition = ASTBuilderUtil.createVariableDef(invocation.pos, intermediateVariable); stmts.add(intermediateVariableDefinition); expr = ASTBuilderUtil.createVariableRef(invocation.pos, interMediateSymbol); } if (expr.getBType().isNullable()) { BLangTypeTestExpr isNillTest = ASTBuilderUtil.createTypeTestExpr(expr.pos, expr, getNillTypeNode()); isNillTest.setBType(symTable.booleanType); BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(expr.pos); expr = cloneExpression(expr); expr.setBType(types.getSafeType(expr.getBType(), true, false)); if (isDefaultableMappingType(expr.getBType()) && !root) { BLangRecordLiteral jsonLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode(); jsonLiteral.setBType(expr.getBType()); jsonLiteral.pos = expr.pos; BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(expr.pos, expr, jsonLiteral); thenStmt.addStatement(assignment); } else { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = ERROR_REASON_NULL_REFERENCE_ERROR; literal.setBType(symTable.stringType); BLangErrorConstructorExpr errorConstructorExpr = (BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode(); BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env, names.fromString(""), names.fromString("error")); errorConstructorExpr.setBType(symbol.type); errorConstructorExpr.pos = expr.pos; List<BLangExpression> positionalArgs = new ArrayList<>(); positionalArgs.add(literal); errorConstructorExpr.positionalArgs = positionalArgs; BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.expr = errorConstructorExpr; panicNode.pos = expr.pos; thenStmt.addStatement(panicNode); } BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(expr.pos, isNillTest, thenStmt, null); stmts.add(ifelse); } accessExprStack.push(expr); } BLangValueType getNillTypeNode() { BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); nillTypeNode.typeKind = TypeKind.NIL; nillTypeNode.setBType(symTable.nilType); return nillTypeNode; } private BLangValueExpression cloneExpression(BLangExpression expr) { switch (expr.getKind()) { case SIMPLE_VARIABLE_REF: return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol); case FIELD_BASED_ACCESS_EXPR: case INDEX_BASED_ACCESS_EXPR: return cloneAccessExpr((BLangAccessExpression) expr); default: throw new IllegalStateException(); } } private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) { if (originalAccessExpr.expr == null) { return originalAccessExpr; } BLangExpression varRef; NodeKind kind = originalAccessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) { varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr); } else { varRef = cloneExpression(originalAccessExpr.expr); } varRef.setBType(types.getSafeType(originalAccessExpr.expr.getBType(), true, false)); BLangAccessExpression accessExpr; switch (originalAccessExpr.getKind()) { case FIELD_BASED_ACCESS_EXPR: accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef, ((BLangFieldBasedAccess) originalAccessExpr).field); break; case INDEX_BASED_ACCESS_EXPR: accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef, ((BLangIndexBasedAccess) originalAccessExpr).indexExpr); break; default: throw new IllegalStateException(); } accessExpr.originalType = originalAccessExpr.originalType; accessExpr.pos = originalAccessExpr.pos; accessExpr.isLValue = originalAccessExpr.isLValue; accessExpr.symbol = originalAccessExpr.symbol; accessExpr.errorSafeNavigation = false; accessExpr.nilSafeNavigation = false; accessExpr.setBType(originalAccessExpr.originalType); return accessExpr; } private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) { BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L); return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD, symTable.intType, symTable.intType)); } private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) { BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L); return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB, symTable.intType, symTable.intType)); } private BLangLiteral getBooleanLiteral(boolean value) { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = value; literal.setBType(symTable.booleanType); literal.pos = symTable.builtinPos; return literal; } private boolean isDefaultableMappingType(BType type) { switch (types.getSafeType(type, true, false).tag) { case TypeTags.JSON: case TypeTags.MAP: case TypeTags.RECORD: return true; default: return false; } } private BLangFunction createInitFunctionForClassDefn(BLangClassDefinition classDefinition, SymbolEnv env) { BType returnType = symTable.nilType; BLangFunction userDefinedInitMethod = classDefinition.initFunction; if (userDefinedInitMethod != null) { returnType = userDefinedInitMethod.getBType().getReturnType(); } BLangFunction initFunction = TypeDefBuilderHelper.createInitFunctionForStructureType(classDefinition.pos, classDefinition.symbol, env, names, GENERATED_INIT_SUFFIX, classDefinition.getBType(), returnType); BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefinition.getBType().tsymbol); typeSymbol.generatedInitializerFunc = new BAttachedFunction(GENERATED_INIT_SUFFIX, initFunction.symbol, (BInvokableType) initFunction.getBType(), classDefinition.pos); classDefinition.generatedInitFunction = initFunction; initFunction.returnTypeNode.setBType(returnType); return rewrite(initFunction, env); } private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) { /* * Desugar (lhsExpr && rhsExpr) to following if-else: * * logical AND: * ------------- * T $result$; * if (lhsExpr) { * $result$ = rhsExpr; * } else { * $result$ = false; * } * * logical OR: * ------------- * T $result$; * if (lhsExpr) { * $result$ = true; * } else { * $result$ = rhsExpr; * } * */ BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.getBType(), null, symTable.builtinPos); BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol); BLangExpression thenResult; if (binaryExpr.opKind == OperatorKind.AND) { thenResult = binaryExpr.rhsExpr; } else { thenResult = getBooleanLiteral(true); } BLangAssignment thenAssignment = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult); thenBody.addStatement(thenAssignment); BLangExpression elseResult; BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol); if (binaryExpr.opKind == OperatorKind.AND) { elseResult = getBooleanLiteral(false); } else { elseResult = binaryExpr.rhsExpr; } BLangAssignment elseAssignment = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult); elseBody.addStatement(elseAssignment); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol); BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse)); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.setBType(binaryExpr.getBType()); result = rewriteExpr(stmtExpr); } /** * Split packahe init function into several smaller functions. * * @param packageNode package node * @param env symbol environment * @return initial init function but trimmed in size */ private BLangFunction splitInitFunction(BLangPackage packageNode, SymbolEnv env) { int methodSize = INIT_METHOD_SPLIT_SIZE; BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) packageNode.initFunction.body; if (!isJvmTarget) { return packageNode.initFunction; } BLangFunction initFunction = packageNode.initFunction; List<BLangFunction> generatedFunctions = new ArrayList<>(); List<BLangStatement> stmts = new ArrayList<>(funcBody.stmts); funcBody.stmts.clear(); BLangFunction newFunc = initFunction; BLangBlockFunctionBody newFuncBody = (BLangBlockFunctionBody) newFunc.body; int varDefIndex = 0; for (int i = 0; i < stmts.size(); i++) { BLangStatement statement = stmts.get(i); if (statement.getKind() == NodeKind.VARIABLE_DEF) { break; } varDefIndex++; if (i > 0 && (i % methodSize == 0 || isAssignmentWithInitOrRecordLiteralExpr(statement))) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.add(stmts.get(i)); } List<BLangStatement> chunkStmts = new ArrayList<>(); for (int i = varDefIndex; i < stmts.size(); i++) { BLangStatement stmt = stmts.get(i); chunkStmts.add(stmt); varDefIndex++; if ((stmt.getKind() == NodeKind.ASSIGNMENT) && (((BLangAssignment) stmt).expr.getKind() == NodeKind.SERVICE_CONSTRUCTOR) && (newFuncBody.stmts.size() + chunkStmts.size() > methodSize)) { if (newFuncBody.stmts.size() + chunkStmts.size() > methodSize) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.addAll(chunkStmts); chunkStmts.clear(); } else if ((stmt.getKind() == NodeKind.ASSIGNMENT) && (((BLangAssignment) stmt).varRef instanceof BLangPackageVarRef) && Symbols.isFlagOn(((BLangPackageVarRef) ((BLangAssignment) stmt).varRef).varSymbol.flags, Flags.LISTENER) ) { break; } } newFuncBody.stmts.addAll(chunkStmts); for (int i = varDefIndex; i < stmts.size(); i++) { if (i > 0 && i % methodSize == 0) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.add(stmts.get(i)); } generatedFunctions.add(newFunc); for (int j = 0; j < generatedFunctions.size() - 1; j++) { BLangFunction thisFunction = generatedFunctions.get(j); BLangCheckedExpr checkedExpr = ASTBuilderUtil.createCheckExpr(initFunction.pos, createInvocationNode(generatedFunctions.get(j + 1).name.value, new ArrayList<>(), symTable.errorOrNilType), symTable.nilType); checkedExpr.equivalentErrorTypeList.add(symTable.errorType); BLangExpressionStmt expressionStmt = ASTBuilderUtil .createExpressionStmt(thisFunction.pos, (BLangBlockFunctionBody) thisFunction.body); expressionStmt.expr = checkedExpr; expressionStmt.expr.pos = initFunction.pos; if (j > 0) { thisFunction = rewrite(thisFunction, env); packageNode.functions.add(thisFunction); packageNode.topLevelNodes.add(thisFunction); } } if (generatedFunctions.size() > 1) { BLangFunction lastFunc = generatedFunctions.get(generatedFunctions.size() - 1); lastFunc = rewrite(lastFunc, env); packageNode.functions.add(lastFunc); packageNode.topLevelNodes.add(lastFunc); } return generatedFunctions.get(0); } private boolean isAssignmentWithInitOrRecordLiteralExpr(BLangStatement statement) { if (statement.getKind() == NodeKind.ASSIGNMENT) { return isMappingOrObjectConstructorOrObjInit(((BLangAssignment) statement).getExpression()); } return false; } protected boolean isMappingOrObjectConstructorOrObjInit(BLangExpression expression) { switch (expression.getKind()) { case TYPE_INIT_EXPR: case RECORD_LITERAL_EXPR: case OBJECT_CTOR_EXPRESSION: return true; case CHECK_EXPR: return isMappingOrObjectConstructorOrObjInit(((BLangCheckedExpr) expression).expr); case TYPE_CONVERSION_EXPR: return isMappingOrObjectConstructorOrObjInit(((BLangTypeConversionExpr) expression).expr); default: return false; } } /** * Create an intermediate package init function. * * @param pkgNode package node * @param env symbol environment of package */ private BLangFunction createIntermediateInitFunction(BLangPackage pkgNode, SymbolEnv env) { String alias = pkgNode.symbol.pkgID.toString(); BLangFunction initFunction = ASTBuilderUtil .createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias, new Name(Names.INIT_FUNCTION_SUFFIX.value + this.initFuncIndex++), symTable); createInvokableSymbol(initFunction, env); return initFunction; } private BType getRestType(BInvokableSymbol invokableSymbol) { if (invokableSymbol != null && invokableSymbol.restParam != null) { return invokableSymbol.restParam.type; } return null; } private BType getRestType(BLangFunction function) { if (function != null && function.restParam != null) { return function.restParam.getBType(); } return null; } private BVarSymbol getRestSymbol(BLangFunction function) { if (function != null && function.restParam != null) { return function.restParam.symbol; } return null; } private boolean isComputedKey(RecordLiteralNode.RecordField field) { if (!field.isKeyValueField()) { return false; } return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey; } private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) { List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields; BType type = mappingConstructorExpr.getBType(); Location pos = mappingConstructorExpr.pos; List<RecordLiteralNode.RecordField> rewrittenFields = new ArrayList<>(fields.size()); for (RecordLiteralNode.RecordField field : fields) { if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValueField = (BLangRecordLiteral.BLangRecordKeyValueField) field; BLangRecordLiteral.BLangRecordKey key = keyValueField.key; BLangExpression origKey = key.expr; BLangExpression keyExpr; if (key.computedKey) { keyExpr = origKey; } else { keyExpr = origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos, StringEscapeUtils.unescapeJava(((BLangSimpleVarRef) origKey).variableName.value)) : ((BLangLiteral) origKey); } BLangRecordLiteral.BLangRecordKeyValueField rewrittenField = ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr), rewriteExpr(keyValueField.valueExpr)); rewrittenField.pos = keyValueField.pos; rewrittenField.key.pos = key.pos; rewrittenFields.add(rewrittenField); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field; rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue( rewriteExpr(createStringLiteral(pos, StringEscapeUtils.unescapeJava(varRefField.variableName.value))), rewriteExpr(varRefField))); } else { BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField = (BLangRecordLiteral.BLangRecordSpreadOperatorField) field; spreadOpField.expr = rewriteExpr(spreadOpField.expr); rewrittenFields.add(spreadOpField); } } fields.clear(); return type.tag == TypeTags.RECORD ? new BLangStructLiteral(pos, type, rewrittenFields) : new BLangMapLiteral(pos, type, rewrittenFields); } protected void addTransactionInternalModuleImport() { if (!env.enclPkg.packageID.equals(PackageID.TRANSACTION_INTERNAL)) { BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode(); List<BLangIdentifier> pkgNameComps = new ArrayList<>(); pkgNameComps.add(ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.TRANSACTION.value)); importDcl.pkgNameComps = pkgNameComps; importDcl.pos = env.enclPkg.symbol.pos; importDcl.orgName = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.BALLERINA_INTERNAL_ORG.value); importDcl.alias = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "trx"); importDcl.version = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, ""); importDcl.symbol = symTable.internalTransactionModuleSymbol; env.enclPkg.imports.add(importDcl); env.enclPkg.symbol.imports.add(importDcl.symbol); } } }
result = addConversionExprIfRequired(literal, varRefExpr.getBType());
private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) { BVarSymbol iteratorSymbol = varDef.var.symbol; BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID, foreach.nillableResultType, this.env.scope.owner, foreach.pos, VIRTUAL); BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos, foreach.nillableResultType, iteratorSymbol, resultSymbol); BLangType userDefineType = getUserDefineTypeNode(foreach.resultType); BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol); BLangTypeTestExpr typeTestExpr = ASTBuilderUtil .createTypeTestExpr(foreach.pos, resultReferenceInWhile, userDefineType); BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode(); whileNode.pos = foreach.pos; whileNode.expr = typeTestExpr; whileNode.body = foreach.body; BLangAssignment resultAssignment = getIteratorNextAssignment(foreach.pos, iteratorSymbol, resultSymbol); VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode; BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol); BLangExpression expr = valueAccessExpr.expr; valueAccessExpr.expr = addConversionExprIfRequired(expr, symTable.mapAllType); variableDefinitionNode.getVariable() .setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType)); whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode); whileNode.body.stmts.add(1, resultAssignment); BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos); blockNode.addStatement(varDef); blockNode.addStatement(resultVariableDefinition); blockNode.addStatement(whileNode); return blockNode; } private BLangType getUserDefineTypeNode(BType type) { BLangUserDefinedType recordType = new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""), ASTBuilderUtil.createIdentifier(null, "")); recordType.setBType(type); return recordType; } @Override public void visit(BLangWhile whileNode) { if (whileNode.onFailClause != null) { BLangOnFailClause onFailClause = whileNode.onFailClause; whileNode.onFailClause = null; whileNode.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE; BLangDo doStmt = wrapStatementWithinDo(whileNode.pos, whileNode, onFailClause); result = rewrite(doStmt, env); } else { whileNode.expr = rewriteExpr(whileNode.expr); whileNode.body = rewrite(whileNode.body, env); result = whileNode; } } private BLangDo wrapStatementWithinDo(Location location, BLangStatement statement, BLangOnFailClause onFailClause) { BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode(); BLangBlockStmt doBlock = ASTBuilderUtil.createBlockStmt(location); doBlock.scope = new Scope(env.scope.owner); bLDo.body = doBlock; bLDo.pos = location; bLDo.onFailClause = onFailClause; bLDo.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK; doBlock.stmts.add(statement); return bLDo; } @Override public void visit(BLangLock lockNode) { BLangOnFailClause currentOnFailClause = this.onFailClause; BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos); if (lockNode.onFailClause != null) { blockStmt.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK; rewrite(lockNode.onFailClause, env); } BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos); blockStmt.addStatement(lockStmt); enclLocks.push(lockStmt); BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE); BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType); BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral); statementExpression.setBType(symTable.nilType); BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode(); trapExpr.setBType(nillableError); trapExpr.expr = statementExpression; BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"), this.env.scope.owner.pkgID, nillableError, this.env.scope.owner, lockNode.pos, VIRTUAL); BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult", nillableError, trapExpr, nillableErrorVarSymbol); BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable); blockStmt.addStatement(simpleVariableDef); BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos); unLockStmt.relatedLock = lockStmt; blockStmt.addStatement(unLockStmt); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol); BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos); BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.pos = lockNode.pos; panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType); ifBody.addStatement(panicNode); BLangTypeTestExpr isErrorTest = ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode()); isErrorTest.setBType(symTable.booleanType); BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null); blockStmt.addStatement(ifelse); result = rewrite(blockStmt, env); enclLocks.pop(); this.onFailClause = currentOnFailClause; } @Override public void visit(BLangLockStmt lockStmt) { result = lockStmt; } @Override public void visit(BLangUnLockStmt unLockStmt) { result = unLockStmt; } private BLangOnFailClause createTrxInternalOnFail(Location pos, BLangSimpleVarRef shouldPanicRef, BLangSimpleVarRef shouldRetryRef) { BLangOnFailClause trxOnFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode(); trxOnFailClause.pos = pos; trxOnFailClause.body = ASTBuilderUtil.createBlockStmt(pos); trxOnFailClause.body.scope = new Scope(env.scope.owner); trxOnFailClause.isInternal = true; BVarSymbol trxOnFailErrorSym = new BVarSymbol(0, names.fromString("$trxError$"), env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL); BLangSimpleVariable trxOnFailError = ASTBuilderUtil.createVariable(pos, "$trxError$", symTable.errorType, null, trxOnFailErrorSym); trxOnFailClause.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, trxOnFailError); trxOnFailClause.body.scope.define(trxOnFailErrorSym.name, trxOnFailErrorSym); transactionDesugar.createRollbackIfFailed(pos, trxOnFailClause.body, trxOnFailErrorSym, trxBlockId, shouldRetryRef); BLangGroupExpr shouldNotPanic = new BLangGroupExpr(); shouldNotPanic.setBType(symTable.booleanType); shouldNotPanic.expression = createNotBinaryExpression(pos, shouldPanicRef); BLangSimpleVarRef caughtError = ASTBuilderUtil.createVariableRef(pos, trxOnFailErrorSym); BLangBlockStmt failBlock = ASTBuilderUtil.createBlockStmt(pos); BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.pos = pos; panicNode.expr = caughtError; BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, shouldNotPanic, failBlock, panicNode); trxOnFailClause.body.stmts.add(exitIf); BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode(); failStmt.pos = pos; failStmt.expr = caughtError; failBlock.stmts.add(failStmt); trxOnFailClause.bodyContainsFail = true; return trxOnFailClause; } @Override public void visit(BLangTransaction transactionNode) { if (transactionNode.onFailClause != null) { BLangOnFailClause onFailClause = transactionNode.onFailClause; transactionNode.onFailClause = null; transactionNode.transactionBody.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE; BLangDo doStmt = wrapStatementWithinDo(transactionNode.pos, transactionNode, onFailClause); result = rewrite(doStmt, env); } else { BLangLiteral currentTrxBlockId = this.trxBlockId; String uniqueId = String.valueOf(++transactionBlockCount); this.trxBlockId = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.stringType, uniqueId); boolean currShouldReturnErrors = this.shouldReturnErrors; this.shouldReturnErrors = true; BLangOnFailClause currOnFailClause = this.onFailClause; BLangLiteral falseLiteral = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.booleanType, false); BVarSymbol shouldPanicVarSymbol = new BVarSymbol(0, names.fromString("$shouldPanic$"), env.scope.owner.pkgID, symTable.booleanType, this.env.scope.owner, transactionNode.pos, VIRTUAL); shouldPanicVarSymbol.closure = true; BLangSimpleVariable shouldPanicVariable = ASTBuilderUtil.createVariable(transactionNode.pos, "$shouldPanic$", symTable.booleanType, falseLiteral, shouldPanicVarSymbol); BLangSimpleVariableDef shouldPanicDef = ASTBuilderUtil.createVariableDef(transactionNode.pos, shouldPanicVariable); BLangSimpleVarRef shouldPanicRef = ASTBuilderUtil.createVariableRef(transactionNode.pos, shouldPanicVarSymbol); BLangOnFailClause trxInternalOnFail = createTrxInternalOnFail(transactionNode.pos, shouldPanicRef, this.shouldRetryRef); enclosingShouldPanic.put(trxInternalOnFail, shouldPanicRef); boolean userDefinedOnFailAvbl = this.onFailClause != null; analyzeOnFailClause(trxInternalOnFail, transactionNode.transactionBody); BLangBlockStmt transactionStmtBlock = transactionDesugar.rewrite(transactionNode, trxBlockId, env, uniqueId); transactionStmtBlock.stmts.add(0, shouldPanicDef); transactionStmtBlock.scope.define(shouldPanicVarSymbol.name, shouldPanicVarSymbol); transactionStmtBlock.failureBreakMode = userDefinedOnFailAvbl ? BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE : BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK; result = rewrite(transactionStmtBlock, this.env); this.shouldReturnErrors = currShouldReturnErrors; this.trxBlockId = currentTrxBlockId; swapAndResetEnclosingOnFail(currOnFailClause); } } @Override public void visit(BLangRollback rollbackNode) { BLangBlockStmt rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, trxBlockId, this.shouldRetryRef); result = rewrite(rollbackStmtExpr, env); } private BLangOnFailClause createRetryInternalOnFail(Location pos, BLangSimpleVarRef retryResultRef, BLangSimpleVarRef retryManagerRef, BLangSimpleVarRef shouldRetryRef, BLangSimpleVarRef continueLoopRef, BLangSimpleVarRef returnResult) { BLangOnFailClause internalOnFail = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode(); internalOnFail.pos = pos; internalOnFail.body = ASTBuilderUtil.createBlockStmt(pos); internalOnFail.body.scope = new Scope(env.scope.owner); BVarSymbol caughtErrorSym = new BVarSymbol(0, names.fromString("$caughtError$"), env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL); BLangSimpleVariable caughtError = ASTBuilderUtil.createVariable(pos, "$caughtError$", symTable.errorType, null, caughtErrorSym); internalOnFail.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, caughtError); env.scope.define(caughtErrorSym.name, caughtErrorSym); BLangSimpleVarRef caughtErrorRef = ASTBuilderUtil.createVariableRef(pos, caughtErrorSym); BLangAssignment errorAssignment = ASTBuilderUtil.createAssignmentStmt(pos, retryResultRef, caughtErrorRef); internalOnFail.body.stmts.add(errorAssignment); BLangAssignment continueLoopTrue = ASTBuilderUtil.createAssignmentStmt(pos, continueLoopRef, ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true)); internalOnFail.body.stmts.add(continueLoopTrue); BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(pos, retryManagerRef, caughtErrorRef); BLangAssignment shouldRetryAssignment = ASTBuilderUtil.createAssignmentStmt(pos, shouldRetryRef, shouldRetryInvocation); internalOnFail.body.stmts.add(shouldRetryAssignment); BLangGroupExpr shouldNotRetryCheck = new BLangGroupExpr(); shouldNotRetryCheck.setBType(symTable.booleanType); shouldNotRetryCheck.expression = createNotBinaryExpression(pos, shouldRetryRef); BLangGroupExpr exitCheck = new BLangGroupExpr(); exitCheck.setBType(symTable.booleanType); exitCheck.expression = shouldNotRetryCheck; BLangBlockStmt exitLogicBlock = ASTBuilderUtil.createBlockStmt(pos); BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, exitCheck, exitLogicBlock, null); if (this.onFailClause != null) { BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode(); failStmt.pos = pos; failStmt.expr = retryResultRef; exitLogicBlock.stmts.add(failStmt); internalOnFail.bodyContainsFail = true; internalOnFail.body.stmts.add(exitIf); BLangContinue loopContinueStmt = (BLangContinue) TreeBuilder.createContinueNode(); loopContinueStmt.pos = pos; internalOnFail.body.stmts.add(loopContinueStmt); } else { BLangAssignment returnErrorTrue = ASTBuilderUtil.createAssignmentStmt(pos, returnResult, ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true)); exitLogicBlock.stmts.add(returnErrorTrue); internalOnFail.body.stmts.add(exitIf); } return internalOnFail; } BLangUnaryExpr createNotBinaryExpression(Location pos, BLangExpression expression) { List<BType> paramTypes = new ArrayList<>(); paramTypes.add(symTable.booleanType); BInvokableType type = new BInvokableType(paramTypes, symTable.booleanType, null); BOperatorSymbol notOperatorSymbol = new BOperatorSymbol( names.fromString(OperatorKind.NOT.value()), symTable.rootPkgSymbol.pkgID, type, symTable.rootPkgSymbol, symTable.builtinPos, VIRTUAL); return ASTBuilderUtil.createUnaryExpr(pos, expression, symTable.booleanType, OperatorKind.NOT, notOperatorSymbol); } BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix, List<BLangSimpleVariable> lambdaFunctionVariable, TypeNode returnType, BLangFunctionBody lambdaBody) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++); lambdaFunction.function = func; func.requiredParams.addAll(lambdaFunctionVariable); func.setReturnTypeNode(returnType); func.desugaredReturnType = true; defineFunction(func, env.enclPkg); lambdaFunctionVariable = func.requiredParams; func.body = lambdaBody; func.desugared = false; lambdaFunction.pos = pos; List<BType> paramTypes = new ArrayList<>(); lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type)); lambdaFunction.setBType(new BInvokableType(paramTypes, func.symbol.type.getReturnType(), null)); return lambdaFunction; } protected BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix, List<BLangSimpleVariable> lambdaFunctionVariable, TypeNode returnType, List<BLangStatement> fnBodyStmts, SymbolEnv env, Scope bodyScope) { BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode(); body.scope = bodyScope; SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); this.forceCastReturnType = ((BLangType) returnType).getBType(); body.stmts = rewriteStmt(fnBodyStmts, bodyEnv); this.forceCastReturnType = null; return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body); } private BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix, TypeNode returnType) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++); lambdaFunction.function = func; func.setReturnTypeNode(returnType); func.desugaredReturnType = true; defineFunction(func, env.enclPkg); func.desugared = false; lambdaFunction.pos = pos; return lambdaFunction; } private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) { final BPackageSymbol packageSymbol = targetPkg.symbol; final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol); symbolEnter.defineNode(funcNode, packageEnv); packageEnv.enclPkg.functions.add(funcNode); packageEnv.enclPkg.topLevelNodes.add(funcNode); } @Override public void visit(BLangForkJoin forkJoin) { result = forkJoin; } @Override public void visit(BLangLiteral literalExpr) { if (literalExpr.getBType().tag == TypeTags.ARRAY && ((BArrayType) literalExpr.getBType()).eType.tag == TypeTags.BYTE) { result = rewriteBlobLiteral(literalExpr); return; } result = literalExpr; } private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) { String[] result = getBlobTextValue((String) literalExpr.value); byte[] values; if (BASE_64.equals(result[0])) { values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8)); } else { values = hexStringToByteArray(result[1]); } BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); arrayLiteralNode.setBType(literalExpr.getBType()); arrayLiteralNode.pos = literalExpr.pos; arrayLiteralNode.exprs = new ArrayList<>(); for (byte b : values) { arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b)); } return arrayLiteralNode; } private String[] getBlobTextValue(String blobLiteralNodeText) { String nodeText = blobLiteralNodeText.replace("\t", "").replace("\n", "").replace("\r", "") .replace(" ", ""); String[] result = new String[2]; result[0] = nodeText.substring(0, nodeText.indexOf('`')); result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`')); return result; } private static byte[] hexStringToByteArray(String str) { int len = str.length(); byte[] data = new byte[len / 2]; for (int i = 0; i < len; i += 2) { data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16)); } return data; } @Override public void visit(BLangListConstructorExpr listConstructor) { listConstructor.exprs = rewriteExprs(listConstructor.exprs); BLangExpression expr; if (listConstructor.getBType().tag == TypeTags.TUPLE) { expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType()); result = rewriteExpr(expr); } else if (listConstructor.getBType().tag == TypeTags.JSON) { expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.getBType())); result = rewriteExpr(expr); } else if (getElementType(listConstructor.getBType()).tag == TypeTags.JSON) { expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.getBType()); result = rewriteExpr(expr); } else if (listConstructor.getBType().tag == TypeTags.TYPEDESC) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = listConstructor.typedescType; typedescExpr.setBType(symTable.typeDesc); result = rewriteExpr(typedescExpr); } else { expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType()); result = rewriteExpr(expr); } } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { rewriteExprs(tableConstructorExpr.recordLiteralList); result = tableConstructorExpr; } @Override public void visit(BLangArrayLiteral arrayLiteral) { arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs); if (arrayLiteral.getBType().tag == TypeTags.JSON) { result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.getBType())); return; } else if (getElementType(arrayLiteral.getBType()).tag == TypeTags.JSON) { result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.getBType()); return; } result = arrayLiteral; } @Override public void visit(BLangTupleLiteral tupleLiteral) { if (tupleLiteral.isTypedescExpr) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = tupleLiteral.typedescType; typedescExpr.setBType(symTable.typeDesc); result = rewriteExpr(typedescExpr); return; } List<BLangExpression> exprs = tupleLiteral.exprs; BTupleType tupleType = (BTupleType) tupleLiteral.getBType(); List<BType> tupleMemberTypes = tupleType.tupleTypes; int tupleMemberTypeSize = tupleMemberTypes.size(); int tupleExprSize = exprs.size(); for (int i = 0; i < tupleExprSize; i++) { BLangExpression expr = exprs.get(i); BType expType = expr.impConversionExpr == null ? expr.getBType() : expr.impConversionExpr.getBType(); BType targetType = i < tupleMemberTypeSize ? tupleMemberTypes.get(i) : tupleType.restType; types.setImplicitCastExpr(expr, expType, targetType); } tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs); result = tupleLiteral; } @Override public void visit(BLangGroupExpr groupExpr) { if (groupExpr.isTypedescExpr) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = groupExpr.typedescType; typedescExpr.setBType(symTable.typeDesc); result = rewriteExpr(typedescExpr); } else { result = rewriteExpr(groupExpr.expression); } } @Override public void visit(BLangRecordLiteral recordLiteral) { List<RecordLiteralNode.RecordField> fields = recordLiteral.fields; fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2))); result = rewriteExpr(rewriteMappingConstructor(recordLiteral)); } @Override public void visit(BLangSimpleVarRef varRefExpr) { BLangSimpleVarRef genVarRefExpr = varRefExpr; if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) { BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName); qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol; qnameExpr.localname = varRefExpr.variableName; qnameExpr.prefix = varRefExpr.pkgAlias; qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI; qnameExpr.isUsedInXML = false; qnameExpr.pos = varRefExpr.pos; qnameExpr.setBType(symTable.stringType); result = qnameExpr; return; } if (varRefExpr.symbol == null) { result = varRefExpr; return; } if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) { BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol; if (varSymbol.originalSymbol != null) { varRefExpr.symbol = varSymbol.originalSymbol; } } BSymbol ownerSymbol = varRefExpr.symbol.owner; if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION && varRefExpr.symbol.type.tag == TypeTags.INVOKABLE) { genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol); } else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE && !((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) { genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (ownerSymbol.tag & SymTag.LET) == SymTag.LET) { genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) { genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE || (ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) { if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) { BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol; if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) { BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType, constSymbol.value.value); result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.getBType())); return; } } genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol); if (!enclLocks.isEmpty()) { BVarSymbol symbol = (BVarSymbol) varRefExpr.symbol; BLangLockStmt lockStmt = enclLocks.peek(); lockStmt.addLockVariable(symbol); lockStmt.addLockVariable(this.globalVariablesDependsOn.getOrDefault(symbol, new HashSet<>())); } } genVarRefExpr.setBType(varRefExpr.getBType()); genVarRefExpr.pos = varRefExpr.pos; if ((varRefExpr.isLValue) || genVarRefExpr.symbol.name.equals(IGNORE)) { genVarRefExpr.isLValue = varRefExpr.isLValue; genVarRefExpr.setBType(varRefExpr.symbol.type); result = genVarRefExpr; return; } genVarRefExpr.isLValue = varRefExpr.isLValue; BType targetType = genVarRefExpr.getBType(); genVarRefExpr.setBType(genVarRefExpr.symbol.type); BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType); result = expression.impConversionExpr != null ? expression.impConversionExpr : expression; } @Override public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) { rewriteFieldBasedAccess(nsPrefixedFieldBasedAccess); } private void rewriteFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr) { if (safeNavigate(fieldAccessExpr)) { result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr)); return; } BLangAccessExpression targetVarRef = fieldAccessExpr; BType varRefType = types.getTypeWithEffectiveIntersectionTypes(fieldAccessExpr.expr.getBType()); fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr); if (!types.isSameType(fieldAccessExpr.expr.getBType(), varRefType)) { fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType); } BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.field.pos, StringEscapeUtils.unescapeJava(fieldAccessExpr.field.value)); int varRefTypeTag = varRefType.tag; if (varRefTypeTag == TypeTags.OBJECT || (varRefTypeTag == TypeTags.UNION && ((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) { if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE && ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) { result = rewriteObjectMemberAccessAsField(fieldAccessExpr); return; } else { boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation; if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) { BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol; BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) varRefType.tsymbol; BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc; BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc; if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) || (initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) { isStoreOnCreation = true; } } targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, (BVarSymbol) fieldAccessExpr.symbol, false, isStoreOnCreation); } } else if (varRefTypeTag == TypeTags.RECORD || (varRefTypeTag == TypeTags.UNION && ((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) { if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE && ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) { targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol); } else { targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, (BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation); } } else if (types.isLax(varRefType)) { if (!(varRefType.tag == TypeTags.XML || varRefType.tag == TypeTags.XML_ELEMENT)) { if (varRefType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) varRefType).constraint.tag)) { result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr)); return; } fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType); targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit); } else { BLangInvocation xmlAccessInvocation = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr); xmlAccessInvocation.setBType(fieldAccessExpr.getBType()); result = xmlAccessInvocation; return; } } else if (varRefTypeTag == TypeTags.MAP) { targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, fieldAccessExpr.isStoreOnCreation); } else if (TypeTags.isXMLTypeTag(varRefTypeTag)) { targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, fieldAccessExpr.fieldKind); } targetVarRef.isLValue = fieldAccessExpr.isLValue; targetVarRef.setBType(fieldAccessExpr.getBType()); targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess; result = targetVarRef; } @Override public void visit(BLangFieldBasedAccess fieldAccessExpr) { rewriteFieldBasedAccess(fieldAccessExpr); } private BLangNode rewriteObjectMemberAccessAsField(BLangFieldBasedAccess fieldAccessExpr) { Location pos = fieldAccessExpr.pos; BInvokableSymbol originalMemberFuncSymbol = (BInvokableSymbol) fieldAccessExpr.symbol; BLangFunction func = (BLangFunction) TreeBuilder.createFunctionNode(); String funcName = "$annon$method$delegate$" + lambdaFunctionCount++; BInvokableSymbol funcSymbol = new BInvokableSymbol(SymTag.INVOKABLE, (Flags.ANONYMOUS | Flags.LAMBDA), names.fromString(funcName), env.enclPkg.packageID, originalMemberFuncSymbol.type, env.scope.owner, pos, VIRTUAL); funcSymbol.retType = originalMemberFuncSymbol.retType; funcSymbol.bodyExist = true; funcSymbol.params = new ArrayList<>(); funcSymbol.scope = new Scope(funcSymbol); func.pos = pos; func.name = createIdentifier(pos, funcName); func.flagSet.add(Flag.LAMBDA); func.flagSet.add(Flag.ANONYMOUS); func.body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode(); func.symbol = funcSymbol; func.setBType(funcSymbol.type); func.closureVarSymbols = new LinkedHashSet<>(); BLangExpression receiver = fieldAccessExpr.expr; BLangSimpleVariableDef intermediateObjDef = null; if (receiver.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BSymbol receiverSymbol = ((BLangVariableReference) receiver).symbol; receiverSymbol.closure = true; func.closureVarSymbols.add(new ClosureVarSymbol(receiverSymbol, pos)); } else { BLangSimpleVariableDef varDef = createVarDef("$$temp$obj$" + annonVarCount++, receiver.getBType(), receiver, pos); intermediateObjDef = varDef; varDef.var.symbol.closure = true; env.scope.define(varDef.var.symbol.name, varDef.var.symbol); BLangSimpleVarRef variableRef = createVariableRef(pos, varDef.var.symbol); func.closureVarSymbols.add(new ClosureVarSymbol(varDef.var.symbol, pos)); receiver = variableRef; } ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); for (BVarSymbol param : originalMemberFuncSymbol.params) { BLangSimpleVariable fParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); fParam.symbol = new BVarSymbol(0, param.name, env.enclPkg.packageID, param.type, funcSymbol, pos, VIRTUAL); fParam.pos = pos; fParam.name = createIdentifier(pos, param.name.value); fParam.setBType(param.type); func.requiredParams.add(fParam); funcSymbol.params.add(fParam.symbol); funcSymbol.scope.define(fParam.symbol.name, fParam.symbol); BLangSimpleVarRef paramRef = createVariableRef(pos, fParam.symbol); requiredArgs.add(paramRef); } ArrayList<BLangExpression> restArgs = new ArrayList<>(); if (originalMemberFuncSymbol.restParam != null) { BLangSimpleVariable restParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode(); func.restParam = restParam; BVarSymbol restSym = originalMemberFuncSymbol.restParam; restParam.name = ASTBuilderUtil.createIdentifier(pos, restSym.name.value); restParam.symbol = new BVarSymbol(0, restSym.name, env.enclPkg.packageID, restSym.type, funcSymbol, pos, VIRTUAL); restParam.pos = pos; restParam.setBType(restSym.type); funcSymbol.restParam = restParam.symbol; funcSymbol.scope.define(restParam.symbol.name, restParam.symbol); BLangSimpleVarRef restArg = createVariableRef(pos, restParam.symbol); BLangRestArgsExpression restArgExpr = new BLangRestArgsExpression(); restArgExpr.expr = restArg; restArgExpr.pos = pos; restArgExpr.setBType(restSym.type); restArgExpr.expectedType = restArgExpr.getBType(); restArgs.add(restArgExpr); } BLangIdentifier field = fieldAccessExpr.field; BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode(); retStmt.expr = createObjectMethodInvocation( receiver, field, fieldAccessExpr.symbol, requiredArgs, restArgs); ((BLangBlockFunctionBody) func.body).addStatement(retStmt); BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaFunction.function = func; lambdaFunction.capturedClosureEnv = env.createClone(); env.enclPkg.functions.add(func); env.enclPkg.topLevelNodes.add(func); lambdaFunction.parent = env.enclInvokable; lambdaFunction.setBType(func.getBType()); if (intermediateObjDef == null) { return rewrite(lambdaFunction, env); } else { BLangStatementExpression expr = createStatementExpression(intermediateObjDef, rewrite(lambdaFunction, env)); expr.setBType(lambdaFunction.getBType()); return rewrite(expr, env); } } private BLangInvocation createObjectMethodInvocation(BLangExpression receiver, BLangIdentifier field, BSymbol invocableSymbol, List<BLangExpression> requiredArgs, List<BLangExpression> restArgs) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.name = field; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.expr = receiver; invocationNode.symbol = invocableSymbol; invocationNode.setBType(((BInvokableType) invocableSymbol.type).retType); invocationNode.requiredArgs = requiredArgs; invocationNode.restArgs = restArgs; return invocationNode; } private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) { BLangStatementExpression statementExpression = new BLangStatementExpression(); BLangBlockStmt block = new BLangBlockStmt(); statementExpression.stmt = block; BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.errorType); Location pos = fieldAccessExpr.pos; BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos); block.addStatement(result); BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol); resultRef.setBType(fieldAccessType); statementExpression.setBType(fieldAccessType); BLangLiteral mapIndex = ASTBuilderUtil.createLiteral( fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value); BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex); BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.nilType); mapAccessExpr.setBType(xmlOrNil); BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos); BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol); block.addStatement(mapResult); BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block); BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType); ifStmt.expr = isLikeNilExpr; BLangBlockStmt resultNilBody = new BLangBlockStmt(); ifStmt.body = resultNilBody; BLangBlockStmt resultHasValueBody = new BLangBlockStmt(); ifStmt.elseStmt = resultHasValueBody; BLangErrorConstructorExpr errorConstructorExpr = (BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode(); BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env, names.fromString(""), names.fromString("error")); errorConstructorExpr.setBType(symbol.type); List<BLangExpression> positionalArgs = new ArrayList<>(); List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); positionalArgs.add(createStringLiteral(pos, "{" + RuntimeConstants.MAP_LANG_LIB + "}InvalidKey")); BLangNamedArgsExpression message = new BLangNamedArgsExpression(); message.name = ASTBuilderUtil.createIdentifier(pos, "key"); message.expr = createStringLiteral(pos, fieldAccessExpr.field.value); namedArgs.add(message); errorConstructorExpr.positionalArgs = positionalArgs; errorConstructorExpr.namedArgs = namedArgs; BLangSimpleVariableDef errorDef = createVarDef("$_invalid_key_error", symTable.errorType, errorConstructorExpr, pos); resultNilBody.addStatement(errorDef); BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol); BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody); errorVarAssignment.varRef = resultRef; errorVarAssignment.expr = errorRef; BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt( pos, resultHasValueBody); mapResultAssignment.varRef = resultRef; mapResultAssignment.expr = mapResultRef; statementExpression.expr = resultRef; return statementExpression; } private BLangInvocation rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) { ArrayList<BLangExpression> args = new ArrayList<>(); String fieldName = fieldAccessExpr.field.value; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess = (BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr; fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName); } if (fieldName.equals("_")) { return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING, fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>()); } BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName); args.add(attributeNameLiteral); args.add(isOptionalAccessToLiteral(fieldAccessExpr)); return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args, new ArrayList<>()); } private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) { return rewrite( createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env); } private String createExpandedQName(String nsURI, String localName) { return "{" + nsURI + "}" + localName; } @Override public void visit(BLangIndexBasedAccess indexAccessExpr) { if (safeNavigate(indexAccessExpr)) { result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr)); return; } BLangIndexBasedAccess targetVarRef = indexAccessExpr; indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr); BType varRefType = types.getTypeWithEffectiveIntersectionTypes(indexAccessExpr.expr.getBType()); indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr); if (!types.isSameType(indexAccessExpr.expr.getBType(), varRefType)) { indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType); } if (varRefType.tag == TypeTags.MAP) { targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation); } else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) { targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr, (BVarSymbol) indexAccessExpr.symbol, false); } else if (types.isSubTypeOfList(varRefType)) { targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (types.isAssignable(varRefType, symTable.stringType)) { indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType); targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (varRefType.tag == TypeTags.TABLE) { if (targetVarRef.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY) { BLangTupleLiteral listConstructorExpr = new BLangTupleLiteral(); listConstructorExpr.exprs = ((BLangTableMultiKeyExpr) indexAccessExpr.indexExpr).multiKeyIndexExprs; List<BType> memberTypes = new ArrayList<>(); ((BLangTableMultiKeyExpr) indexAccessExpr.indexExpr).multiKeyIndexExprs. forEach(expression -> memberTypes.add(expression.getBType())); listConstructorExpr.setBType(new BTupleType(memberTypes)); indexAccessExpr.indexExpr = listConstructorExpr; } targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } targetVarRef.isLValue = indexAccessExpr.isLValue; targetVarRef.setBType(indexAccessExpr.getBType()); result = targetVarRef; } @Override public void visit(BLangTableMultiKeyExpr tableMultiKeyExpr) { rewriteExprs(tableMultiKeyExpr.multiKeyIndexExprs); result = tableMultiKeyExpr; } @Override public void visit(BLangInvocation iExpr) { rewriteInvocation(iExpr, false); } @Override public void visit(BLangErrorConstructorExpr errorConstructorExpr) { if (errorConstructorExpr.positionalArgs.size() == 1) { errorConstructorExpr.positionalArgs.add(createNilLiteral()); } errorConstructorExpr.positionalArgs.set(1, addConversionExprIfRequired(errorConstructorExpr.positionalArgs.get(1), symTable.errorType)); rewriteExprs(errorConstructorExpr.positionalArgs); BLangExpression errorDetail; BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(errorConstructorExpr.pos, ((BErrorType) errorConstructorExpr.getBType()).detailType); if (errorConstructorExpr.namedArgs.isEmpty()) { errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.getBType()); } else { for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) { BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField(); member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos, symTable.stringType, namedArg.name.value)); if (recordLiteral.getBType().tag == TypeTags.RECORD) { member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType); } else { member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.getBType()); } recordLiteral.fields.add(member); } errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), ((BErrorType) errorConstructorExpr.getBType()).detailType); } errorConstructorExpr.errorDetail = errorDetail; result = errorConstructorExpr; } @Override public void visit(BLangInvocation.BLangActionInvocation actionInvocation) { if (!actionInvocation.async && actionInvocation.invokedInsideTransaction) { transactionDesugar.startTransactionCoordinatorOnce(env, actionInvocation.pos); } rewriteInvocation(actionInvocation, actionInvocation.async); } private void rewriteInvocation(BLangInvocation invocation, boolean async) { BLangInvocation invRef = invocation; if (!enclLocks.isEmpty()) { BLangLockStmt lock = enclLocks.peek(); lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars); } reorderArguments(invocation); invocation.requiredArgs = rewriteExprs(invocation.requiredArgs); fixStreamTypeCastsInInvocationParams(invocation); fixNonRestArgTypeCastInTypeParamInvocation(invocation); invocation.restArgs = rewriteExprs(invocation.restArgs); annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos, invocation.symbol.pkgID, invocation.symbol.owner, env); if (invocation.functionPointerInvocation) { visitFunctionPointerInvocation(invocation); return; } invocation.expr = rewriteExpr(invocation.expr); result = invRef; BInvokableSymbol invSym = (BInvokableSymbol) invocation.symbol; if (Symbols.isFlagOn(invSym.retType.flags, Flags.PARAMETERIZED)) { BType retType = unifier.build(invSym.retType); invocation.setBType(invocation.async ? new BFutureType(TypeTags.FUTURE, retType, null) : retType); } if (invocation.expr == null) { fixTypeCastInTypeParamInvocation(invocation, invRef); if (invocation.exprSymbol == null) { return; } invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol); invocation.expr = rewriteExpr(invocation.expr); } switch (invocation.expr.getBType().tag) { case TypeTags.OBJECT: case TypeTags.RECORD: if (!invocation.langLibInvocation) { List<BLangExpression> argExprs = new ArrayList<>(invocation.requiredArgs); argExprs.add(0, invocation.expr); BLangAttachedFunctionInvocation attachedFunctionInvocation = new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs, invocation.symbol, invocation.getBType(), invocation.expr, async); attachedFunctionInvocation.name = invocation.name; attachedFunctionInvocation.annAttachments = invocation.annAttachments; result = invRef = attachedFunctionInvocation; } break; } fixTypeCastInTypeParamInvocation(invocation, invRef); } private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) { if (!iExpr.langLibInvocation) { return; } List<BLangExpression> requiredArgs = iExpr.requiredArgs; List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params; for (int i = 0; i < requiredArgs.size(); i++) { requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type)); } } /* This function is a workaround and need improvement * Notes for improvement : * 1. Both arguments are same. * 2. Due to current type param logic we put type param flag on the original type. * 3. Error type having Cloneable type with type param flag, change expression type by this code. * 4. using error type is a problem as Cloneable type is an typeparm eg: ExprBodiedFunctionTest * added never to CloneableType type param * @typeParam type * CloneableType Cloneable|never; * */ private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) { var returnTypeOfInvokable = ((BInvokableSymbol) iExpr.symbol).retType; if (!iExpr.langLibInvocation && !TypeParamAnalyzer.containsTypeParam(returnTypeOfInvokable)) { return; } BType originalInvType = genIExpr.getBType(); if (!genIExpr.async) { genIExpr.setBType(returnTypeOfInvokable); } this.result = addConversionExprIfRequired(genIExpr, originalInvType); } private void fixStreamTypeCastsInInvocationParams(BLangInvocation iExpr) { List<BLangExpression> requiredArgs = iExpr.requiredArgs; List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params; if (!params.isEmpty()) { for (int i = 0; i < requiredArgs.size(); i++) { BVarSymbol param = params.get(i); if (param.type.tag == TypeTags.STREAM) { requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), param.type)); } } } } private BLangLiteral createNilLiteral() { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = null; literal.setBType(symTable.nilType); return literal; } public void visit(BLangTypeInit typeInitExpr) { if (typeInitExpr.getBType().tag == TypeTags.STREAM) { result = rewriteExpr(desugarStreamTypeInit(typeInitExpr)); } else { result = rewrite(desugarObjectTypeInit(typeInitExpr), env); } } private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) { typeInitExpr.desugared = true; BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos); BType objType = getObjectType(typeInitExpr.getBType()); BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos); objVarDef.var.name.pos = symTable.builtinPos; BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol); blockStmt.addStatement(objVarDef); typeInitExpr.initInvocation.exprSymbol = objVarDef.var.symbol; typeInitExpr.initInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol; if (typeInitExpr.initInvocation.getBType().tag == TypeTags.NIL) { BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt); initInvExpr.expr = typeInitExpr.initInvocation; typeInitExpr.initInvocation.name.value = GENERATED_INIT_SUFFIX.value; BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef); stmtExpr.setBType(objVarRef.symbol.type); return stmtExpr; } BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitExpr.initInvocation.getBType(), typeInitExpr.initInvocation, typeInitExpr.pos); blockStmt.addStatement(initInvRetValVarDef); BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.getBType(), null, typeInitExpr.pos); blockStmt.addStatement(resultVarDef); BLangSimpleVarRef initRetValVarRefInCondition = ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol); BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos); BLangTypeTestExpr isErrorTest = ASTBuilderUtil.createTypeTestExpr(symTable.builtinPos, initRetValVarRefInCondition, getErrorTypeNode()); isErrorTest.setBType(symTable.booleanType); BLangSimpleVarRef thenInitRetValVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol); BLangAssignment errAssignment = ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, thenResultVarRef, thenInitRetValVarRef); thenStmt.addStatement(errAssignment); BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol); BLangAssignment objAssignment = ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, elseResultVarRef, objVarRef); BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos); elseStmt.addStatement(objAssignment); BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(symTable.builtinPos, isErrorTest, thenStmt, elseStmt); blockStmt.addStatement(ifelse); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.setBType(resultVarRef.symbol.type); return stmtExpr; } private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope .lookup(Names.CONSTRUCT_STREAM).symbol; BType constraintType = ((BStreamType) typeInitExpr.getBType()).constraint; BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol); BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr(); constraintTdExpr.resolvedType = constraintType; constraintTdExpr.setBType(constraintTdType); BType completionType = ((BStreamType) typeInitExpr.getBType()).completionType; BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol); BLangTypedescExpr completionTdExpr = new BLangTypedescExpr(); completionTdExpr.resolvedType = completionType; completionTdExpr.setBType(completionTdType); List<BLangExpression> args = new ArrayList<>(Lists.of(constraintTdExpr, completionTdExpr)); if (!typeInitExpr.argsExpr.isEmpty()) { args.add(typeInitExpr.argsExpr.get(0)); } BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod( typeInitExpr.pos, symbol, args, symResolver); streamConstructInvocation.setBType(new BStreamType(TypeTags.STREAM, constraintType, completionType, null)); return streamConstructInvocation; } private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr, Location location) { BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name)); if (objSym == null || objSym == symTable.notFoundSymbol) { objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type, this.env.scope.owner, location, VIRTUAL); } BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(location, name, type, expr, (BVarSymbol) objSym); BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(location); objVarDef.var = objVar; objVarDef.setBType(objVar.getBType()); return objVarDef; } private BType getObjectType(BType type) { if (type.tag == TypeTags.OBJECT) { return type; } else if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream() .filter(t -> t.tag == TypeTags.OBJECT) .findFirst() .orElse(symTable.noType); } throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context"); } BLangErrorType getErrorTypeNode() { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.setBType(symTable.errorType); errorTypeNode.pos = symTable.builtinPos; return errorTypeNode; } BLangErrorType getErrorOrNillTypeNode() { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.setBType(symTable.errorOrNilType); return errorTypeNode; } @Override public void visit(BLangTernaryExpr ternaryExpr) { /* * First desugar to if-else: * * T $result$; * if () { * $result$ = thenExpr; * } else { * $result$ = elseExpr; * } * */ BLangSimpleVariableDef resultVarDef = createVarDef("$ternary_result$", ternaryExpr.getBType(), null, ternaryExpr.pos); BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangAssignment thenAssignment = ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr); thenBody.addStatement(thenAssignment); BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangAssignment elseAssignment = ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr); elseBody.addStatement(elseAssignment); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse)); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.setBType(ternaryExpr.getBType()); result = rewriteExpr(stmtExpr); } @Override public void visit(BLangWaitExpr waitExpr) { if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) { waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>()); } else { waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression())); } result = waitExpr; } private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) { visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs); visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs); return exprs; } private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) { if (expr.getKind() == NodeKind.BINARY_EXPR) { collectAllBinaryExprs((BLangBinaryExpr) expr, exprs); } else { expr = rewriteExpr(expr); exprs.add(expr); } } @Override public void visit(BLangWaitForAllExpr waitExpr) { waitExpr.keyValuePairs.forEach(keyValue -> { if (keyValue.valueExpr != null) { keyValue.valueExpr = rewriteExpr(keyValue.valueExpr); } else { keyValue.keyExpr = rewriteExpr(keyValue.keyExpr); } }); BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.getBType()); expr.pos = waitExpr.pos; result = rewriteExpr(expr); } @Override public void visit(BLangTrapExpr trapExpr) { trapExpr.expr = rewriteExpr(trapExpr.expr); if (trapExpr.expr.getBType().tag != TypeTags.NIL) { trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.getBType()); } result = trapExpr; } @Override public void visit(BLangBinaryExpr binaryExpr) { if (isNullableBinaryExpr(binaryExpr)) { BLangStatementExpression stmtExpr = createStmtExprForNullableBinaryExpr(binaryExpr); result = rewrite(stmtExpr, env); return; } if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) { if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) { binaryExpr.rhsExpr = getModifiedIntRangeEndExpr(binaryExpr.rhsExpr); } result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr)); return; } if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) { visitBinaryLogicalExpr(binaryExpr); return; } OperatorKind binaryOpKind = binaryExpr.opKind; if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB || binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV || binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND || binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) { checkByteTypeIncompatibleOperations(binaryExpr); } binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr); binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr); result = binaryExpr; int rhsExprTypeTag = binaryExpr.rhsExpr.getBType().tag; int lhsExprTypeTag = binaryExpr.lhsExpr.getBType().tag; if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL || binaryExpr.opKind == OperatorKind.NOT_EQUAL || binaryExpr.opKind == OperatorKind.REF_EQUAL || binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) { if (TypeTags.isIntegerTypeTag(lhsExprTypeTag) && rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType); return; } if (lhsExprTypeTag == TypeTags.BYTE && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType); return; } } boolean isBinaryShiftOperator = symResolver.isBinaryShiftOperator(binaryOpKind); boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryOpKind); if (lhsExprTypeTag == rhsExprTypeTag) { if (!isBinaryShiftOperator && !isArithmeticOperator) { return; } if (types.isValueType(binaryExpr.lhsExpr.getBType())) { return; } } if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(lhsExprTypeTag) && (rhsExprTypeTag == TypeTags.XML || rhsExprTypeTag == TypeTags.XML_TEXT)) { binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr, binaryExpr.lhsExpr.pos, symTable.xmlType); return; } if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(rhsExprTypeTag) && (lhsExprTypeTag == TypeTags.XML || lhsExprTypeTag == TypeTags.XML_TEXT)) { binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr, binaryExpr.rhsExpr.pos, symTable.xmlType); return; } if (lhsExprTypeTag == TypeTags.DECIMAL) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType()); return; } if (rhsExprTypeTag == TypeTags.DECIMAL) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType()); return; } if (lhsExprTypeTag == TypeTags.FLOAT) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType()); return; } if (rhsExprTypeTag == TypeTags.FLOAT) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType()); return; } if (isArithmeticOperator) { createTypeCastExprForArithmeticExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag); return; } if (isBinaryShiftOperator) { createTypeCastExprForBinaryShiftExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag); return; } if (symResolver.isBinaryComparisonOperator(binaryOpKind)) { createTypeCastExprForRelationalExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag); } } private BLangStatementExpression createStmtExprForNullableBinaryExpr(BLangBinaryExpr binaryExpr) { /* * int? x = 3; * int? y = 5; * int? z = x + y; * Above is desugared to * int? $result$; * if (x is () or y is ()) { * $result$ = (); * } else { * $result$ = x + y; * } * int z = $result$; */ BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BUnionType exprBType = (BUnionType) binaryExpr.getBType(); BType nonNilType = exprBType.getMemberTypes().iterator().next(); boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryExpr.opKind); boolean isShiftOperator = symResolver.isBinaryShiftOperator(binaryExpr.opKind); boolean isBitWiseOperator = !isArithmeticOperator && !isShiftOperator; BType rhsType = nonNilType; if (isBitWiseOperator) { if (binaryExpr.rhsExpr.getBType().isNullable()) { rhsType = types.getSafeType(binaryExpr.rhsExpr.getBType(), true, false); } else { rhsType = binaryExpr.rhsExpr.getBType(); } } BType lhsType = nonNilType; if (isBitWiseOperator) { if (binaryExpr.lhsExpr.getBType().isNullable()) { lhsType = types.getSafeType(binaryExpr.lhsExpr.getBType(), true, false); } else { lhsType = binaryExpr.lhsExpr.getBType(); } } if (binaryExpr.lhsExpr.getBType().isNullable()) { binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr); } BLangSimpleVariableDef tempVarDef = createVarDef("result", binaryExpr.getBType(), null, binaryExpr.pos); BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempVarDef.var.symbol); blockStmt.addStatement(tempVarDef); BLangTypeTestExpr typeTestExprOne = createTypeCheckExpr(binaryExpr.pos, binaryExpr.lhsExpr, getNillTypeNode()); typeTestExprOne.setBType(symTable.booleanType); BLangTypeTestExpr typeTestExprTwo = createTypeCheckExpr(binaryExpr.pos, binaryExpr.rhsExpr, getNillTypeNode()); typeTestExprTwo.setBType(symTable.booleanType); BLangBinaryExpr ifBlockCondition = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, typeTestExprOne, typeTestExprTwo, symTable.booleanType, OperatorKind.OR, binaryExpr.opSymbol); BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, ifBody); bLangAssignmentIf.varRef = tempVarRef; bLangAssignmentIf.expr = createNilLiteral(); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseBody); bLangAssignmentElse.varRef = tempVarRef; BLangBinaryExpr newBinaryExpr = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr, nonNilType, binaryExpr.opKind, binaryExpr.opSymbol); newBinaryExpr.lhsExpr = createTypeCastExpr(newBinaryExpr.lhsExpr, lhsType); newBinaryExpr.rhsExpr = createTypeCastExpr(newBinaryExpr.rhsExpr, rhsType); bLangAssignmentElse.expr = newBinaryExpr; BLangIf ifStatement = ASTBuilderUtil.createIfStmt(binaryExpr.pos, blockStmt); ifStatement.expr = ifBlockCondition; ifStatement.body = ifBody; ifStatement.elseStmt = elseBody; BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef); stmtExpr.setBType(binaryExpr.getBType()); return stmtExpr; } private boolean isNullableBinaryExpr(BLangBinaryExpr binaryExpr) { if ((binaryExpr.lhsExpr.getBType() != null && binaryExpr.rhsExpr.getBType() != null) && (binaryExpr.rhsExpr.getBType().isNullable() || binaryExpr.lhsExpr.getBType().isNullable())) { switch (binaryExpr.getOperatorKind()) { case ADD: case SUB: case MUL: case DIV: case MOD: case BITWISE_LEFT_SHIFT: case BITWISE_RIGHT_SHIFT: case BITWISE_UNSIGNED_RIGHT_SHIFT: case BITWISE_AND: case BITWISE_OR: case BITWISE_XOR: return true; } } return false; } private void createTypeCastExprForArithmeticExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag, int rhsExprTypeTag) { if ((TypeTags.isIntegerTypeTag(lhsExprTypeTag) && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) || (TypeTags.isStringTypeTag(lhsExprTypeTag) && TypeTags.isStringTypeTag(rhsExprTypeTag)) || (TypeTags.isXMLTypeTag(lhsExprTypeTag) && TypeTags.isXMLTypeTag(rhsExprTypeTag))) { return; } if (TypeTags.isXMLTypeTag(lhsExprTypeTag) && !TypeTags.isXMLTypeTag(rhsExprTypeTag)) { if (types.checkTypeContainString(binaryExpr.rhsExpr.getBType())) { binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr, binaryExpr.rhsExpr.pos, symTable.xmlType); return; } binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.xmlType); return; } if (TypeTags.isXMLTypeTag(rhsExprTypeTag) && !TypeTags.isXMLTypeTag(lhsExprTypeTag)) { if (types.checkTypeContainString(binaryExpr.lhsExpr.getBType())) { binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr, binaryExpr.rhsExpr.pos, symTable.xmlType); return; } binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.xmlType); return; } binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.getBType()); binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.getBType()); } private void createTypeCastExprForBinaryShiftExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag, int rhsExprTypeTag) { boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag); boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag); if (isLhsIntegerType || lhsExprTypeTag == TypeTags.BYTE) { if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) { return; } binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType); return; } if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType); return; } binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType); binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType); } private void createTypeCastExprForRelationalExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag, int rhsExprTypeTag) { boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag); boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag); if ((isLhsIntegerType && isRhsIntegerType) || (lhsExprTypeTag == TypeTags.BYTE && rhsExprTypeTag == TypeTags.BYTE)) { return; } if (isLhsIntegerType && !isRhsIntegerType) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType); return; } if (!isLhsIntegerType && isRhsIntegerType) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType); return; } if (lhsExprTypeTag == TypeTags.BYTE || rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType); binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType); return; } boolean isLhsStringType = TypeTags.isStringTypeTag(lhsExprTypeTag); boolean isRhsStringType = TypeTags.isStringTypeTag(rhsExprTypeTag); if (isLhsStringType && isRhsStringType) { return; } if (isLhsStringType && !isRhsStringType) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.stringType); return; } if (!isLhsStringType && isRhsStringType) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.stringType); } } private BLangInvocation replaceWithIntRange(Location location, BLangExpression lhsExpr, BLangExpression rhsExpr) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope .lookup(Names.CREATE_INT_RANGE).symbol; BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(location, symbol, new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver); createIntRangeInvocation.setBType(symTable.intRangeType); return createIntRangeInvocation; } private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) { if (binaryExpr.expectedType == null) { return; } int rhsExprTypeTag = binaryExpr.rhsExpr.getBType().tag; int lhsExprTypeTag = binaryExpr.lhsExpr.getBType().tag; if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) { return; } int resultTypeTag = binaryExpr.expectedType.tag; if (resultTypeTag == TypeTags.INT) { if (rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType); } if (lhsExprTypeTag == TypeTags.BYTE) { binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType); } } } /** * This method checks whether given binary expression is related to shift operation. * If its true, then both lhs and rhs of the binary expression will be converted to 'int' type. * <p> * byte a = 12; * byte b = 34; * int i = 234; * int j = -4; * <p> * true: where binary expression's expected type is 'int' * int i1 = a >> b; * int i2 = a << b; * int i3 = a >> i; * int i4 = a << i; * int i5 = i >> j; * int i6 = i << j; */ private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) { return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT || binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT || binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT; } public void visit(BLangElvisExpr elvisExpr) { BLangMatchExpression matchExpr = ASTBuilderUtil.createMatchExpression(elvisExpr.lhsExpr); matchExpr.patternClauses.add(getMatchNullPatternGivenExpression(elvisExpr.pos, rewriteExpr(elvisExpr.rhsExpr))); matchExpr.setBType(elvisExpr.getBType()); matchExpr.pos = elvisExpr.pos; result = rewriteExpr(matchExpr); } @Override public void visit(BLangUnaryExpr unaryExpr) { if (isNullableUnaryExpr(unaryExpr)) { BLangStatementExpression statementExpression = createStmtExprForNilableUnaryExpr(unaryExpr); result = rewrite(statementExpression, env); return; } if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) { rewriteBitwiseComplementOperator(unaryExpr); return; } unaryExpr.expr = rewriteExpr(unaryExpr.expr); result = unaryExpr; } /** * This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below. * Example : ~a -> a ^ -1; * ~ 11110011 -> 00001100 * 11110011 ^ 11111111 -> 00001100 * * @param unaryExpr the bitwise complement expression */ private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) { final Location pos = unaryExpr.pos; final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode(); binaryExpr.pos = pos; binaryExpr.opKind = OperatorKind.BITWISE_XOR; binaryExpr.lhsExpr = unaryExpr.expr; if (TypeTags.BYTE == unaryExpr.getBType().tag) { binaryExpr.setBType(symTable.byteType); binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR, symTable.byteType, symTable.byteType); } else { binaryExpr.setBType(symTable.intType); binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR, symTable.intType, symTable.intType); } result = rewriteExpr(binaryExpr); } private BLangStatementExpression createStmtExprForNilableUnaryExpr(BLangUnaryExpr unaryExpr) { /* * int? x = 3; * int? y = +x; * * * Above is desugared to * int? $result$; * if (x is ()) { * $result$ = (); * } else { * $result$ = +x; * } * int y = $result$ */ BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(unaryExpr.pos); BUnionType exprBType = (BUnionType) unaryExpr.getBType(); BType nilLiftType = exprBType.getMemberTypes().iterator().next(); unaryExpr.expr = rewriteExpr(unaryExpr.expr); BLangSimpleVariableDef tempVarDef = createVarDef("$result", unaryExpr.getBType(), createNilLiteral(), unaryExpr.pos); BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(unaryExpr.pos, tempVarDef.var.symbol); blockStmt.addStatement(tempVarDef); BLangTypeTestExpr typeTestExpr = createTypeCheckExpr(unaryExpr.pos, unaryExpr.expr, getNillTypeNode()); typeTestExpr.setBType(symTable.booleanType); BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos); BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, ifBody); bLangAssignmentIf.varRef = tempVarRef; bLangAssignmentIf.expr = createNilLiteral(); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos); BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, elseBody); bLangAssignmentElse.varRef = tempVarRef; BLangExpression expr = createTypeCastExpr(unaryExpr.expr, nilLiftType); bLangAssignmentElse.expr = ASTBuilderUtil.createUnaryExpr(unaryExpr.pos, expr, nilLiftType, unaryExpr.operator, unaryExpr.opSymbol); BLangIf ifStatement = ASTBuilderUtil.createIfStmt(unaryExpr.pos, blockStmt); ifStatement.expr = typeTestExpr; ifStatement.body = ifBody; ifStatement.elseStmt = elseBody; BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef); stmtExpr.setBType(unaryExpr.getBType()); return stmtExpr; } private boolean isNullableUnaryExpr(BLangUnaryExpr unaryExpr) { if (unaryExpr.getBType() != null && unaryExpr.getBType().isNullable()) { switch (unaryExpr.operator) { case ADD: case SUB: case BITWISE_COMPLEMENT: return true; } } return false; } @Override public void visit(BLangTypeConversionExpr conversionExpr) { if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) { result = rewriteExpr(conversionExpr.expr); return; } BType targetType = conversionExpr.targetType; conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env); conversionExpr.expr = rewriteExpr(conversionExpr.expr); result = conversionExpr; } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { if (!env.enclPkg.lambdaFunctions.contains(bLangLambdaFunction)) { env.enclPkg.lambdaFunctions.add(bLangLambdaFunction); } result = bLangLambdaFunction; } @Override public void visit(BLangArrowFunction bLangArrowFunction) { BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode(); bLangFunction.setName(bLangArrowFunction.functionName); BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaFunction.pos = bLangArrowFunction.pos; bLangFunction.addFlag(Flag.LAMBDA); lambdaFunction.function = bLangFunction; BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode(); returnType.setBType(bLangArrowFunction.body.expr.getBType()); bLangFunction.setReturnTypeNode(returnType); bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction)); bLangArrowFunction.params.forEach(bLangFunction::addParameter); lambdaFunction.parent = bLangArrowFunction.parent; lambdaFunction.setBType(bLangArrowFunction.funcType); BLangFunction funcNode = lambdaFunction.function; BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet), new Name(funcNode.name.value), new Name(funcNode.name.originalValue), env.enclPkg.symbol.pkgID, bLangArrowFunction.funcType, env.enclEnv.enclVarSym, true, bLangArrowFunction.pos, VIRTUAL); funcSymbol.originalName = new Name(funcNode.name.originalValue); SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env); defineInvokableSymbol(funcNode, funcSymbol, invokableEnv); List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> { Scope enclScope = invokableEnv.scope; varNode.symbol.kind = SymbolKind.FUNCTION; varNode.symbol.owner = invokableEnv.scope.owner; enclScope.define(varNode.symbol.name, varNode.symbol); }).map(varNode -> varNode.symbol).collect(Collectors.toList()); funcSymbol.params = paramSymbols; funcSymbol.restParam = getRestSymbol(funcNode); funcSymbol.retType = funcNode.returnTypeNode.getBType(); List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList()); funcNode.setBType( new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.getBType(), null)); lambdaFunction.function.pos = bLangArrowFunction.pos; lambdaFunction.function.body.pos = bLangArrowFunction.pos; lambdaFunction.capturedClosureEnv = env; rewrite(lambdaFunction.function, env); env.enclPkg.addFunction(lambdaFunction.function); bLangArrowFunction.function = lambdaFunction.function; result = rewriteExpr(lambdaFunction); } private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol, SymbolEnv invokableEnv) { invokableNode.symbol = funcSymbol; funcSymbol.scope = new Scope(funcSymbol); invokableEnv.scope = funcSymbol.scope; } @Override public void visit(BLangXMLQName xmlQName) { result = xmlQName; } @Override public void visit(BLangXMLAttribute xmlAttribute) { xmlAttribute.name = rewriteExpr(xmlAttribute.name); xmlAttribute.value = rewriteExpr(xmlAttribute.value); result = xmlAttribute; } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName); xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName); xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren); xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes); Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator(); while (attributesItr.hasNext()) { BLangXMLAttribute attribute = attributesItr.next(); if (!attribute.isNamespaceDeclr) { continue; } BLangXMLNS xmlns; if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) { xmlns = new BLangPackageXMLNS(); } else { xmlns = new BLangLocalXMLNS(); } xmlns.namespaceURI = attribute.value.concatExpr; xmlns.prefix = ((BLangXMLQName) attribute.name).localname; xmlns.symbol = attribute.symbol; xmlElementLiteral.inlineNamespaces.add(xmlns); } result = xmlElementLiteral; } @Override public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) { for (BLangExpression xmlItem : xmlSequenceLiteral.xmlItems) { rewriteExpr(xmlItem); } result = xmlSequenceLiteral; } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments)); result = xmlTextLiteral; } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { xmlCommentLiteral.concatExpr = rewriteExpr( constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments)); result = xmlCommentLiteral; } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target); xmlProcInsLiteral.dataConcatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments)); result = xmlProcInsLiteral; } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { xmlQuotedString.concatExpr = rewriteExpr( constructStringTemplateConcatExpression(xmlQuotedString.textFragments)); result = xmlQuotedString; } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs)); } /** * The raw template literal gets desugared to a type init expression. For each literal, a new object class type * def is generated from the object type. The type init expression creates an instance of this generated object * type. For example, consider the following statements: * string name = "Pubudu"; * 'object:RawTemplate rt = `Hello ${name}!`; * * The raw template literal above is desugared to: * type RawTemplate$Impl$0 object { * public string[] strings = ["Hello ", "!"]; * public (any|error)[] insertions; * * function init((any|error)[] insertions) { * self.insertions = insertions; * } * }; * * * 'object:RawTemplate rt = new RawTemplate$Impl$0([name]); * * @param rawTemplateLiteral The raw template literal to be desugared. */ @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { Location pos = rawTemplateLiteral.pos; BObjectType objType = (BObjectType) rawTemplateLiteral.getBType(); BLangClassDefinition objClassDef = desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos); BObjectType classObjType = (BObjectType) objClassDef.getBType(); BVarSymbol insertionsSym = classObjType.fields.get("insertions").symbol; BLangListConstructorExpr insertionsList = ASTBuilderUtil.createListConstructorExpr(pos, insertionsSym.type); insertionsList.exprs.addAll(rawTemplateLiteral.insertions); insertionsList.expectedType = insertionsSym.type; BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType); typeNewExpr.argsExpr.add(insertionsList); typeNewExpr.initInvocation.argExprs.add(insertionsList); typeNewExpr.initInvocation.requiredArgs.add(insertionsList); result = rewriteExpr(typeNewExpr); } /** * This method desugars a raw template literal object class for the provided raw template object type as follows. * A literal defined as 'object:RawTemplate rt = `Hello ${name}!`; * is desugared to, * type $anonType$0 object { * public string[] strings = ["Hello ", "!"]; * public (any|error)[] insertions; * * function init((any|error)[] insertions) { * self.insertions = insertions; * } * }; * @param strings The string portions of the literal * @param objectType The abstract object type for which an object class needs to be generated * @param pos The diagnostic position info for the type node * @return Returns the generated concrete object class def */ private BLangClassDefinition desugarTemplateLiteralObjectTypedef(List<BLangLiteral> strings, BObjectType objectType, Location pos) { BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol; Name objectClassName = names.fromString( anonModelHelper.getNextRawTemplateTypeKey(env.enclPkg.packageID, tSymbol.name)); BObjectTypeSymbol classTSymbol = Symbols.createClassSymbol(tSymbol.flags, objectClassName, env.enclPkg.packageID, null, env.enclPkg.symbol, pos, VIRTUAL, false); classTSymbol.flags |= Flags.CLASS; BObjectType objectClassType = new BObjectType(classTSymbol, classTSymbol.flags); objectClassType.fields = objectType.fields; classTSymbol.type = objectClassType; objectClassType.typeIdSet.add(objectType.typeIdSet); BLangClassDefinition classDef = TypeDefBuilderHelper.createClassDef(pos, classTSymbol, env); classDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value); BType stringsType = objectClassType.fields.get("strings").symbol.type; BLangListConstructorExpr stringsList = ASTBuilderUtil.createListConstructorExpr(pos, stringsType); stringsList.exprs.addAll(strings); stringsList.expectedType = stringsType; classDef.fields.get(0).expr = stringsList; BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(classDef, env); classDef.initFunction = userDefinedInitFunction; env.enclPkg.functions.add(userDefinedInitFunction); env.enclPkg.topLevelNodes.add(userDefinedInitFunction); BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(classDef, env); tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction, tempGeneratedInitFunction.symbol.scope, env); this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env); classDef.generatedInitFunction = tempGeneratedInitFunction; env.enclPkg.functions.add(classDef.generatedInitFunction); env.enclPkg.topLevelNodes.add(classDef.generatedInitFunction); return rewrite(classDef, env); } /** * Creates a user-defined init() method for the provided object type node. If there are fields without default * values specified in the type node, this will add parameters for those fields in the init() method and assign the * param values to the respective fields in the method body. * * @param classDefn The object type node for which the init() method is generated * @param env The symbol env for the object type node * @return The generated init() method */ private BLangFunction createUserDefinedObjectInitFn(BLangClassDefinition classDefn, SymbolEnv env) { BLangFunction initFunction = TypeDefBuilderHelper.createInitFunctionForStructureType(classDefn.pos, classDefn.symbol, env, names, Names.USER_DEFINED_INIT_SUFFIX, symTable, classDefn.getBType()); BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefn.getBType().tsymbol); typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol, (BInvokableType) initFunction.getBType(), classDefn.pos); classDefn.initFunction = initFunction; initFunction.returnTypeNode.setBType(symTable.nilType); BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body; BInvokableType initFnType = (BInvokableType) initFunction.getBType(); for (BLangSimpleVariable field : classDefn.fields) { if (field.expr != null) { continue; } BVarSymbol fieldSym = field.symbol; BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type, initFunction.symbol, classDefn.pos, VIRTUAL); BLangSimpleVariable param = ASTBuilderUtil.createVariable(classDefn.pos, fieldSym.name.value, fieldSym.type, null, paramSym); param.flagSet.add(Flag.FINAL); initFunction.symbol.scope.define(paramSym.name, paramSym); initFunction.symbol.params.add(paramSym); initFnType.paramTypes.add(param.getBType()); initFunction.requiredParams.add(param); BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym); BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.getBType(), initFunction.receiver.symbol, field.name); initFuncBody.addStatement(fieldInit); } return initFunction; } @Override public void visit(BLangWorkerSend workerSendNode) { workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.getBType()); result = workerSendNode; } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.getBType()); result = syncSendExpr; } @Override public void visit(BLangWorkerReceive workerReceiveNode) { result = workerReceiveNode; } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts .stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList()); result = workerFlushExpr; } @Override public void visit(BLangTransactionalExpr transactionalExpr) { BInvokableSymbol isTransactionalSymbol = (BInvokableSymbol) transactionDesugar.getInternalTransactionModuleInvokableSymbol(IS_TRANSACTIONAL); result = ASTBuilderUtil .createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(), Collections.emptyList(), symResolver); } @Override public void visit(BLangCommitExpr commitExpr) { BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env); result = rewriteExpr(stmtExpr); } @Override public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { xmlAttributeAccessExpr.indexExpr = rewriteExpr(xmlAttributeAccessExpr.indexExpr); xmlAttributeAccessExpr.expr = rewriteExpr(xmlAttributeAccessExpr.expr); if (xmlAttributeAccessExpr.indexExpr != null && xmlAttributeAccessExpr.indexExpr.getKind() == NodeKind.XML_QNAME) { ((BLangXMLQName) xmlAttributeAccessExpr.indexExpr).isUsedInXML = true; } xmlAttributeAccessExpr.desugared = true; if (xmlAttributeAccessExpr.isLValue || xmlAttributeAccessExpr.indexExpr != null) { result = xmlAttributeAccessExpr; } else { result = rewriteExpr(xmlAttributeAccessExpr); } } @Override public void visit(BLangFail failNode) { if (this.onFailClause != null) { if (this.onFailClause.bodyContainsFail) { result = rewriteNestedOnFail(this.onFailClause, failNode); } else { result = createOnFailInvocation(onFailClause, failNode); } } else { BLangReturn stmt = ASTBuilderUtil.createReturnStmt(failNode.pos, rewrite(failNode.expr, env)); stmt.desugared = true; result = stmt; } } @Override public void visit(BLangLocalVarRef localVarRef) { result = localVarRef; } @Override public void visit(BLangFieldVarRef fieldVarRef) { result = fieldVarRef; } @Override public void visit(BLangPackageVarRef packageVarRef) { result = packageVarRef; } @Override public void visit(BLangFunctionVarRef functionVarRef) { result = functionVarRef; } @Override public void visit(BLangStructFieldAccessExpr fieldAccessExpr) { result = fieldAccessExpr; } @Override public void visit(BLangStructFunctionVarRef functionVarRef) { result = functionVarRef; } @Override public void visit(BLangMapAccessExpr mapKeyAccessExpr) { result = mapKeyAccessExpr; } @Override public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) { result = arrayIndexAccessExpr; } @Override public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) { result = arrayIndexAccessExpr; } @Override public void visit(BLangTableAccessExpr tableKeyAccessExpr) { result = tableKeyAccessExpr; } @Override public void visit(BLangMapLiteral mapLiteral) { result = mapLiteral; } @Override public void visit(BLangStructLiteral structLiteral) { result = structLiteral; } @Override public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) { result = waitLiteral; } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr); ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters); BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS, xmlElementAccess.expr, new ArrayList<>(), filters); result = rewriteExpr(invocationNode); } private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) { Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env); BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX)); String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null; ArrayList<BLangExpression> args = new ArrayList<>(); for (BLangXMLElementFilter filter : filters) { BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace)); if (nsSymbol == symTable.notFoundSymbol) { if (defaultNS != null && !filter.name.equals("*")) { String expandedName = createExpandedQName(defaultNS, filter.name); args.add(createStringLiteral(filter.elemNamePos, expandedName)); } else { args.add(createStringLiteral(filter.elemNamePos, filter.name)); } } else { BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol; String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name); BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName); args.add(stringLiteral); } } return args; } private BLangInvocation createLanglibXMLInvocation(Location pos, String functionName, BLangExpression invokeOnExpr, ArrayList<BLangExpression> args, ArrayList<BLangExpression> restArgs) { invokeOnExpr = rewriteExpr(invokeOnExpr); BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); name.pos = pos; invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.expr = invokeOnExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName)); ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); requiredArgs.add(invokeOnExpr); requiredArgs.addAll(args); invocationNode.requiredArgs = requiredArgs; invocationNode.restArgs = rewriteExprs(restArgs); invocationNode.setBType(((BInvokableType) invocationNode.symbol.type).getReturnType()); invocationNode.langLibInvocation = true; return invocationNode; } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { xmlNavigation.expr = rewriteExpr(xmlNavigation.expr); xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex); ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters); if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) { BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters); result = rewriteExpr(invocationNode); } else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) { BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN, xmlNavigation.expr, new ArrayList<>(), new ArrayList<>()); result = rewriteExpr(invocationNode); } else { BLangExpression childIndexExpr; if (xmlNavigation.childIndex == null) { childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType); } else { childIndexExpr = xmlNavigation.childIndex; } ArrayList<BLangExpression> args = new ArrayList<>(); args.add(rewriteExpr(childIndexExpr)); BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters); result = rewriteExpr(invocationNode); } } @Override public void visit(BLangIsAssignableExpr assignableExpr) { assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr); result = assignableExpr; } @Override public void visit(BFunctionPointerInvocation fpInvocation) { result = fpInvocation; } @Override public void visit(BLangTypedescExpr typedescExpr) { typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env); result = typedescExpr; } @Override public void visit(BLangIntRangeExpression intRangeExpression) { if (!intRangeExpression.includeStart) { intRangeExpression.startExpr = getModifiedIntRangeStartExpr(intRangeExpression.startExpr); } if (!intRangeExpression.includeEnd) { intRangeExpression.endExpr = getModifiedIntRangeEndExpr(intRangeExpression.endExpr); } intRangeExpression.startExpr = rewriteExpr(intRangeExpression.startExpr); intRangeExpression.endExpr = rewriteExpr(intRangeExpression.endExpr); result = intRangeExpression; } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { result = rewriteExpr(bLangVarArgsExpression.expr); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr); result = bLangNamedArgsExpression.expr; } @Override public void visit(BLangMatchExpression bLangMatchExpression) { addMatchExprDefaultCase(bLangMatchExpression); String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result"; BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos, matchTempResultVarName, bLangMatchExpression.getBType(), null, new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID, bLangMatchExpression.getBType(), this.env.scope.owner, bLangMatchExpression.pos, VIRTUAL)); BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(bLangMatchExpression.pos, tempResultVar); tempResultVarDef.desugared = true; BLangBlockStmt stmts = ASTBuilderUtil.createBlockStmt(bLangMatchExpression.pos, Lists.of(tempResultVarDef)); List<BLangMatchTypedBindingPatternClause> patternClauses = new ArrayList<>(); for (int i = 0; i < bLangMatchExpression.patternClauses.size(); i++) { BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i); pattern.expr = rewriteExpr(pattern.expr); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol); pattern.expr = addConversionExprIfRequired(pattern.expr, tempResultVarRef.getBType()); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pattern.pos, tempResultVarRef, pattern.expr); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(pattern.pos, Lists.of(assignmentStmt)); patternClauses.add(ASTBuilderUtil.createMatchStatementPattern(pattern.pos, pattern.variable, patternBody)); } stmts.addStatement(ASTBuilderUtil.createMatchStatement(bLangMatchExpression.pos, bLangMatchExpression.expr, patternClauses)); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol); BLangStatementExpression statementExpr = createStatementExpression(stmts, tempResultVarRef); statementExpr.setBType(bLangMatchExpression.getBType()); result = rewriteExpr(statementExpr); } @Override public void visit(BLangCheckedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr, false); } @Override public void visit(BLangCheckPanickedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr, true); } private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) { String checkedExprVarName = GEN_VAR_PREFIX.value; BLangSimpleVariable checkedExprVar = ASTBuilderUtil.createVariable(checkedExpr.pos, checkedExprVarName, checkedExpr.getBType(), null, new BVarSymbol(0, names.fromString(checkedExprVarName), this.env.scope.owner.pkgID, checkedExpr.getBType(), this.env.scope.owner, checkedExpr.pos, VIRTUAL)); BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(checkedExpr.pos, checkedExprVar); checkedExprVarDef.desugared = true; BLangMatchTypedBindingPatternClause patternSuccessCase = getSafeAssignSuccessPattern(checkedExprVar.pos, checkedExprVar.symbol.type, true, checkedExprVar.symbol, null); BLangMatchTypedBindingPatternClause patternErrorCase = getSafeAssignErrorPattern(checkedExpr.pos, this.env.enclInvokable.symbol, checkedExpr.equivalentErrorTypeList, isCheckPanic); BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(checkedExpr.pos, checkedExpr.expr, new ArrayList<BLangMatchTypedBindingPatternClause>() {{ add(patternSuccessCase); add(patternErrorCase); }}); BLangBlockStmt generatedStmtBlock = ASTBuilderUtil.createBlockStmt(checkedExpr.pos, new ArrayList<BLangStatement>() {{ add(checkedExprVarDef); add(matchStmt); }}); BLangSimpleVarRef tempCheckedExprVarRef = ASTBuilderUtil.createVariableRef( checkedExpr.pos, checkedExprVar.symbol); BLangStatementExpression statementExpr = createStatementExpression( generatedStmtBlock, tempCheckedExprVarRef); statementExpr.setBType(checkedExpr.getBType()); result = rewriteExpr(statementExpr); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos, serviceConstructorExpr.serviceNode.serviceClass.symbol.type); serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = rewriteExpr(typeInit); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { BLangExpression expr = typeTestExpr.expr; if (types.isValueType(expr.getBType())) { expr = addConversionExprIfRequired(expr, symTable.anyType); } if (typeTestExpr.isNegation) { BLangTypeTestExpr bLangTypeTestExpr = ASTBuilderUtil.createTypeTestExpr(typeTestExpr.pos, typeTestExpr.expr, typeTestExpr.typeNode); BLangGroupExpr bLangGroupExpr = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode(); bLangGroupExpr.expression = bLangTypeTestExpr; bLangGroupExpr.setBType(typeTestExpr.getBType()); BLangUnaryExpr unaryExpr = ASTBuilderUtil.createUnaryExpr(typeTestExpr.pos, bLangGroupExpr, typeTestExpr.getBType(), OperatorKind.NOT, null); result = rewriteExpr(unaryExpr); return; } typeTestExpr.expr = rewriteExpr(expr); typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env); result = typeTestExpr; } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode(); binaryExpr.pos = annotAccessExpr.pos; binaryExpr.opKind = OperatorKind.ANNOT_ACCESS; binaryExpr.lhsExpr = annotAccessExpr.expr; binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType, annotAccessExpr.annotationSymbol.bvmAlias()); binaryExpr.setBType(annotAccessExpr.getBType()); binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null, new BInvokableType(Lists.of(binaryExpr.lhsExpr.getBType(), binaryExpr.rhsExpr.getBType()), annotAccessExpr.getBType(), null), null, symTable.builtinPos, VIRTUAL); result = rewriteExpr(binaryExpr); } @Override public void visit(BLangIsLikeExpr isLikeExpr) { isLikeExpr.expr = rewriteExpr(isLikeExpr.expr); result = isLikeExpr; } @Override public void visit(BLangStatementExpression bLangStatementExpression) { bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr); bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env); result = bLangStatementExpression; } @Override public void visit(BLangQueryExpr queryExpr) { BLangStatementExpression stmtExpr = queryDesugar.desugar(queryExpr, env); result = rewrite(stmtExpr, env); } @Override public void visit(BLangQueryAction queryAction) { BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env); result = rewrite(stmtExpr, env); } @Override public void visit(BLangJSONArrayLiteral jsonArrayLiteral) { jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs); result = jsonArrayLiteral; } @Override public void visit(BLangConstant constant) { BConstantSymbol constSymbol = constant.symbol; if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) { if (constSymbol.literalType.tag != TypeTags.NIL && (constSymbol.value == null || constSymbol.value.value == null)) { throw new IllegalStateException(); } BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType, constSymbol.value.value); constant.expr = rewriteExpr(literal); } else { constant.expr = rewriteExpr(constant.expr); } constant.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = constant; } @Override public void visit(BLangIgnoreExpr ignoreExpr) { result = ignoreExpr; } @Override public void visit(BLangDynamicArgExpr dynamicParamExpr) { dynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument); dynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition); result = dynamicParamExpr; } @Override public void visit(BLangConstRef constantRef) { result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.getBType(), constantRef.value); } BLangSimpleVariableDef getIteratorVariableDefinition(Location pos, BVarSymbol collectionSymbol, BInvokableSymbol iteratorInvokableSymbol, boolean isIteratorFuncFromLangLib) { BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol); BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); iteratorInvocation.pos = pos; iteratorInvocation.expr = dataReference; iteratorInvocation.symbol = iteratorInvokableSymbol; iteratorInvocation.setBType(iteratorInvokableSymbol.retType); iteratorInvocation.argExprs = Lists.of(dataReference); iteratorInvocation.requiredArgs = iteratorInvocation.argExprs; iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib; BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID, iteratorInvokableSymbol.retType, this.env.scope.owner, pos, VIRTUAL); BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$", iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol); return ASTBuilderUtil.createVariableDef(pos, iteratorVariable); } BLangSimpleVariableDef getIteratorNextVariableDefinition(Location pos, BType nillableResultType, BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) { BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol); BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$", nillableResultType, nextInvocation, resultSymbol); return ASTBuilderUtil.createVariableDef(pos, resultVariable); } BLangAssignment getIteratorNextAssignment(Location pos, BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) { BLangSimpleVarRef resultReferenceInAssignment = ASTBuilderUtil.createVariableRef(pos, resultSymbol); BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol); nextInvocation.expr.setBType(types.getSafeType(nextInvocation.expr.getBType(), true, false)); return ASTBuilderUtil.createAssignmentStmt(pos, resultReferenceInAssignment, nextInvocation, false); } BLangInvocation createIteratorNextInvocation(Location pos, BVarSymbol iteratorSymbol) { BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next"); BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol); BInvokableSymbol nextFuncSymbol = getNextFunc((BObjectType) iteratorSymbol.type).symbol; BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); nextInvocation.pos = pos; nextInvocation.name = nextIdentifier; nextInvocation.expr = iteratorReferenceInNext; nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol)); nextInvocation.argExprs = nextInvocation.requiredArgs; nextInvocation.symbol = nextFuncSymbol; nextInvocation.setBType(nextFuncSymbol.retType); return nextInvocation; } private BAttachedFunction getNextFunc(BObjectType iteratorType) { BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol; for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) { if (bAttachedFunction.funcName.value.equals("next")) { return bAttachedFunction; } } return null; } BLangFieldBasedAccess getValueAccessExpression(Location location, BType varType, BVarSymbol resultSymbol) { return getFieldAccessExpression(location, "value", varType, resultSymbol); } BLangFieldBasedAccess getFieldAccessExpression(Location pos, String fieldName, BType varType, BVarSymbol resultSymbol) { BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol); BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName); BLangFieldBasedAccess fieldBasedAccessExpression = ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier); fieldBasedAccessExpression.pos = pos; fieldBasedAccessExpression.setBType(varType); fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.getBType(); return fieldBasedAccessExpression; } private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) { BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode(); BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.pos = bLangArrowFunction.body.expr.pos; returnNode.setExpression(bLangArrowFunction.body.expr); blockNode.addStatement(returnNode); return blockNode; } private BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol; invocationNode.setBType(retType); invocationNode.requiredArgs = args; return invocationNode; } private BLangInvocation createLangLibInvocationNode(String functionName, BLangExpression onExpr, List<BLangExpression> args, BType retType, Location pos) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); name.pos = pos; invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.expr = onExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.getBType(), names.fromString(functionName)); ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); requiredArgs.add(onExpr); requiredArgs.addAll(args); invocationNode.requiredArgs = requiredArgs; invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType); invocationNode.langLibInvocation = true; return invocationNode; } private BLangInvocation createLangLibInvocationNode(String functionName, List<BLangExpression> args, BType retType, Location pos) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); name.pos = pos; invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.symbol = symResolver.lookupMethodInModule(symTable.langInternalModuleSymbol, names.fromString(functionName), env); ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); requiredArgs.addAll(args); invocationNode.requiredArgs = requiredArgs; invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType); invocationNode.langLibInvocation = true; return invocationNode; } private BLangArrayLiteral createArrayLiteralExprNode() { BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); expr.exprs = new ArrayList<>(); expr.setBType(new BArrayType(symTable.anyType)); return expr; } private void visitFunctionPointerInvocation(BLangInvocation iExpr) { BLangValueExpression expr; if (iExpr.expr == null) { expr = new BLangSimpleVarRef(); } else { BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess(); fieldBasedAccess.expr = iExpr.expr; fieldBasedAccess.field = iExpr.name; expr = fieldBasedAccess; } expr.symbol = iExpr.symbol; expr.setBType(iExpr.symbol.type); BLangExpression rewritten = rewriteExpr(expr); result = new BFunctionPointerInvocation(iExpr, rewritten); } private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) { if (types.isValueType(expr.getBType())) { return expr; } if (expr.getBType().tag == TypeTags.ERROR) { return expr; } BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), null, expr.pos); return addConversionExprIfRequired(cloneInvok, lhsType); } private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) { if (types.isValueType(expr.getBType())) { return expr; } if (expr.getBType().tag == TypeTags.ERROR) { return expr; } BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(), expr.getBType(), expr.pos); return addConversionExprIfRequired(cloneInvok, lhsType); } @SuppressWarnings("unchecked") <E extends BLangNode> E rewrite(E node, SymbolEnv env) { if (node == null) { return null; } if (node.desugared) { return node; } SymbolEnv previousEnv = this.env; this.env = env; node.accept(this); BLangNode resultNode = this.result; this.result = null; resultNode.desugared = true; this.env = previousEnv; return (E) resultNode; } @SuppressWarnings("unchecked") <E extends BLangExpression> E rewriteExpr(E node) { if (node == null) { return null; } if (node.desugared) { return node; } BLangExpression expr = node; if (node.impConversionExpr != null) { expr = node.impConversionExpr; node.impConversionExpr = null; } expr.accept(this); BLangNode resultNode = this.result; this.result = null; resultNode.desugared = true; return (E) resultNode; } @SuppressWarnings("unchecked") <E extends BLangStatement> E rewrite(E statement, SymbolEnv env) { if (statement == null) { return null; } BLangStatementLink link = new BLangStatementLink(); link.parent = currentLink; currentLink = link; BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env); link.statement = stmt; stmt.statementLink = link; currentLink = link.parent; return (E) stmt; } private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewrite(nodeList.get(i), env)); } return nodeList; } private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewrite(nodeList.get(i), env)); } return nodeList; } private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewriteExpr(nodeList.get(i))); } return nodeList; } private BLangLiteral createStringLiteral(Location pos, String value) { BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType); stringLit.pos = pos; return stringLit; } private BLangLiteral createIntLiteral(long value) { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = value; literal.setBType(symTable.intType); return literal; } private BLangLiteral createByteLiteral(Location pos, Byte value) { BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType); byteLiteral.pos = pos; return byteLiteral; } private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) { if (types.isSameType(expr.getBType(), targetType)) { return expr; } BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.pos = expr.pos; conversionExpr.expr = expr; conversionExpr.setBType(targetType); conversionExpr.targetType = targetType; conversionExpr.internal = true; return conversionExpr; } private BType getElementType(BType type) { if (type.tag != TypeTags.ARRAY) { return type; } return getElementType(((BArrayType) type).getElementType()); } private void addReturnIfNotPresent(BLangInvokableNode invokableNode) { if (Symbols.isNative(invokableNode.symbol) || (invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) { return; } BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body; if (invokableNode.workers.size() == 0 && invokableNode.symbol.type.getReturnType().isNullable() && (funcBody.stmts.size() < 1 || funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) { BLangReturn returnStmt; if (invokableNode.name.value.contains(GENERATED_INIT_SUFFIX.value)) { returnStmt = ASTBuilderUtil.createNilReturnStmt(null, symTable.nilType); } else { Location invPos = invokableNode.pos; Location returnStmtPos = new BLangDiagnosticLocation(invPos.lineRange().filePath(), invPos.lineRange().endLine().line(), invPos.lineRange().endLine().line(), invPos.lineRange().startLine().offset(), invPos.lineRange().startLine().offset(), 0, 0); returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType); } funcBody.addStatement(returnStmt); } } /** * Reorder the invocation arguments to match the original function signature. * * @param iExpr Function invocation expressions to reorder the arguments */ private void reorderArguments(BLangInvocation iExpr) { BSymbol symbol = iExpr.symbol; if (symbol == null || symbol.type.tag != TypeTags.INVOKABLE) { return; } BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol; List<BLangExpression> restArgs = iExpr.restArgs; int originalRequiredArgCount = iExpr.requiredArgs.size(); BLangSimpleVarRef varargRef = null; BLangBlockStmt blockStmt = null; BType varargVarType = null; int restArgCount = restArgs.size(); if (restArgCount > 0 && restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR && originalRequiredArgCount < invokableSymbol.params.size()) { BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr; Location varargExpPos = expr.pos; varargVarType = expr.getBType(); String varargVarName = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++; BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID, varargVarType, this.env.scope.owner, varargExpPos, VIRTUAL); varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol); BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos); varDef.var = var; varDef.setBType(varargVarType); blockStmt = createBlockStmt(varargExpPos); blockStmt.stmts.add(varDef); } if (!invokableSymbol.params.isEmpty()) { reorderNamedArgs(iExpr, invokableSymbol, varargRef); } if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) { if (invokableSymbol.restParam == null) { return; } BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); List<BLangExpression> exprs = new ArrayList<>(); BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type; BType elemType = arrayType.eType; for (BLangExpression restArg : restArgs) { exprs.add(addConversionExprIfRequired(restArg, elemType)); } arrayLiteral.exprs = exprs; arrayLiteral.setBType(arrayType); if (restArgCount != 0) { iExpr.restArgs = new ArrayList<>(); } iExpr.restArgs.add(arrayLiteral); return; } if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) { if (iExpr.requiredArgs.size() == originalRequiredArgCount) { return; } BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0); BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg); stmtExpression.setBType(firstNonRestArg.getBType()); iExpr.requiredArgs.add(0, stmtExpression); if (invokableSymbol.restParam == null) { restArgs.remove(0); return; } BLangRestArgsExpression restArgsExpression = (BLangRestArgsExpression) restArgs.remove(0); BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type; if (restArgsExpression.getBType().tag == TypeTags.RECORD) { BLangExpression expr = ASTBuilderUtil.createEmptyArrayLiteral(invokableSymbol.pos, restParamType); restArgs.add(expr); return; } Location pos = restArgsExpression.pos; BLangArrayLiteral newArrayLiteral = createArrayLiteralExprNode(); newArrayLiteral.setBType(restParamType); String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++; BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType, this.env.scope.owner, pos, VIRTUAL); BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); BLangSimpleVariable var = createVariable(pos, name, restParamType, newArrayLiteral, varSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos); varDef.var = var; varDef.setBType(restParamType); BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount); BLangInvocation lengthInvocation = createLengthInvocation(pos, varargRef); BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndex, getModifiedIntRangeEndExpr(lengthInvocation)); BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode(); foreach.pos = pos; foreach.collection = intRangeInvocation; types.setForeachTypedBindingPatternType(foreach); final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType); foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name), this.env.scope.owner.pkgID, foreachVariable.getBType(), this.env.scope.owner, pos, VIRTUAL); BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol); foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable); foreach.isDeclaredWithVar = true; BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos); BLangIndexBasedAccess valueExpr = ASTBuilderUtil.createIndexAccessExpr(varargRef, foreachVarRef); if (varargVarType.tag == TypeTags.ARRAY) { BArrayType arrayType = (BArrayType) varargVarType; if (arrayType.state == BArrayState.CLOSED && arrayType.size == (iExpr.requiredArgs.size() - originalRequiredArgCount)) { valueExpr.setBType(restParamType.eType); } else { valueExpr.setBType(arrayType.eType); } } else { valueExpr.setBType(symTable.anyOrErrorType); } BLangExpression pushExpr = addConversionExprIfRequired(valueExpr, restParamType.eType); BLangExpressionStmt expressionStmt = createExpressionStmt(pos, foreachBody); BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef, List.of(pushExpr), restParamType, pos); pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1)); expressionStmt.expr = pushInvocation; foreach.body = foreachBody; BLangBlockStmt newArrayBlockStmt = createBlockStmt(pos); newArrayBlockStmt.addStatement(varDef); newArrayBlockStmt.addStatement(foreach); BLangStatementExpression newArrayStmtExpression = createStatementExpression(newArrayBlockStmt, arrayVarRef); newArrayStmtExpression.setBType(restParamType); restArgs.add(addConversionExprIfRequired(newArrayStmtExpression, restParamType)); return; } BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type; BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); arrayLiteral.setBType(restParamType); BType elemType = restParamType.eType; Location pos = restArgs.get(0).pos; List<BLangExpression> exprs = new ArrayList<>(); for (int i = 0; i < restArgCount - 1; i++) { exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType)); } arrayLiteral.exprs = exprs; BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode(); pushRestArgsExpr.pos = pos; pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1); String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++; BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType, this.env.scope.owner, pos, VIRTUAL); BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos); varDef.var = var; varDef.setBType(restParamType); BLangBlockStmt pushBlockStmt = createBlockStmt(pos); pushBlockStmt.stmts.add(varDef); BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt); BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef, new ArrayList<BLangExpression>() {{ add(pushRestArgsExpr); }}, restParamType, pos); pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1)); expressionStmt.expr = pushInvocation; BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef); stmtExpression.setBType(restParamType); iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }}; } private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) { List<BLangExpression> args = new ArrayList<>(); Map<String, BLangExpression> namedArgs = new LinkedHashMap<>(); iExpr.requiredArgs.stream() .filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR) .forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr)); List<BVarSymbol> params = invokableSymbol.params; List<BLangRecordLiteral> incRecordLiterals = new ArrayList<>(); BLangRecordLiteral incRecordParamAllowAdditionalFields = null; int varargIndex = 0; BType varargType = null; boolean tupleTypedVararg = false; if (varargRef != null) { varargType = varargRef.getBType(); tupleTypedVararg = varargType.tag == TypeTags.TUPLE; } for (int i = 0; i < params.size(); i++) { BVarSymbol param = params.get(i); if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) { args.add(iExpr.requiredArgs.get(i)); } else if (namedArgs.containsKey(param.name.value)) { args.add(namedArgs.remove(param.name.value)); } else if (param.getFlags().contains(Flag.INCLUDED)) { BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode(); BType paramType = param.type; recordLiteral.setBType(paramType); args.add(recordLiteral); incRecordLiterals.add(recordLiteral); if (((BRecordType) paramType).restFieldType != symTable.noType) { incRecordParamAllowAdditionalFields = recordLiteral; } } else if (varargRef == null) { BLangExpression expr = new BLangIgnoreExpr(); expr.setBType(param.type); args.add(expr); } else { if (varargRef.getBType().tag == TypeTags.RECORD) { if (param.isDefaultable) { BLangInvocation hasKeyInvocation = createLangLibInvocationNode(HAS_KEY, varargRef, List.of(createStringLiteral(param.pos, param.name.value)), null, varargRef.pos); BLangExpression indexExpr = rewriteExpr(createStringLiteral(param.pos, param.name.value)); BLangIndexBasedAccess memberAccessExpr = ASTBuilderUtil.createMemberAccessExprNode(param.type, varargRef, indexExpr); BLangExpression ignoreExpr = ASTBuilderUtil.createIgnoreExprNode(param.type); BLangTernaryExpr ternaryExpr = ASTBuilderUtil.createTernaryExprNode(param.type, hasKeyInvocation, memberAccessExpr, ignoreExpr); args.add(ASTBuilderUtil.createDynamicParamExpression(hasKeyInvocation, ternaryExpr)); } else { BLangFieldBasedAccess fieldBasedAccessExpression = ASTBuilderUtil.createFieldAccessExpr(varargRef, ASTBuilderUtil.createIdentifier(param.pos, param.name.value)); fieldBasedAccessExpression.setBType(param.type); args.add(fieldBasedAccessExpression); } } else { BLangExpression indexExpr = rewriteExpr(createIntLiteral(varargIndex)); BType memberAccessExprType = tupleTypedVararg ? ((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType; args.add(addConversionExprIfRequired(ASTBuilderUtil.createMemberAccessExprNode(memberAccessExprType, varargRef, indexExpr), param.type)); varargIndex++; } } } if (namedArgs.size() > 0) { setFieldsForIncRecordLiterals(namedArgs, incRecordLiterals, incRecordParamAllowAdditionalFields); } iExpr.requiredArgs = args; } private void setFieldsForIncRecordLiterals(Map<String, BLangExpression> namedArgs, List<BLangRecordLiteral> incRecordLiterals, BLangRecordLiteral incRecordParamAllowAdditionalFields) { for (String name : namedArgs.keySet()) { boolean isAdditionalField = true; BLangNamedArgsExpression expr = (BLangNamedArgsExpression) namedArgs.get(name); for (BLangRecordLiteral recordLiteral : incRecordLiterals) { LinkedHashMap<String, BField> fields = ((BRecordType) recordLiteral.getBType()).fields; if (fields.containsKey(name) && fields.get(name).type.tag != TypeTags.NEVER) { isAdditionalField = false; createAndAddRecordFieldForIncRecordLiteral(recordLiteral, expr); break; } } if (isAdditionalField) { createAndAddRecordFieldForIncRecordLiteral(incRecordParamAllowAdditionalFields, expr); } } } private void createAndAddRecordFieldForIncRecordLiteral(BLangRecordLiteral recordLiteral, BLangNamedArgsExpression expr) { BLangSimpleVarRef varRef = new BLangSimpleVarRef(); varRef.variableName = expr.name; BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField = ASTBuilderUtil. createBLangRecordKeyValue(varRef, expr.expr); recordLiteral.fields.add(recordKeyValueField); } private BLangMatchTypedBindingPatternClause getSafeAssignErrorPattern(Location location, BSymbol invokableSymbol, List<BType> equivalentErrorTypes, boolean isCheckPanicExpr) { BType enclosingFuncReturnType = ((BInvokableType) invokableSymbol.type).retType; Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ? ((BUnionType) enclosingFuncReturnType).getMemberTypes() : new LinkedHashSet<BType>() {{ add(enclosingFuncReturnType); }}; boolean returnOnError = equivalentErrorTypes.stream() .allMatch(errorType -> returnTypeSet.stream() .anyMatch(retType -> types.isAssignable(errorType, retType))); String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure"; BLangSimpleVariable patternFailureCaseVar = ASTBuilderUtil.createVariable(location, patternFailureCaseVarName, symTable.errorType, null, new BVarSymbol(0, names.fromString(patternFailureCaseVarName), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner, location, VIRTUAL)); BLangVariableReference patternFailureCaseVarRef = ASTBuilderUtil.createVariableRef(location, patternFailureCaseVar.symbol); BLangBlockStmt patternBlockFailureCase = (BLangBlockStmt) TreeBuilder.createBlockNode(); patternBlockFailureCase.pos = location; if (!isCheckPanicExpr && (returnOnError || this.onFailClause != null)) { BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode(); failStmt.pos = location; failStmt.expr = patternFailureCaseVarRef; patternBlockFailureCase.stmts.add(failStmt); if (returnOnError && this.shouldReturnErrors) { BLangReturn errorReturn = ASTBuilderUtil.createReturnStmt(location, rewrite(patternFailureCaseVarRef, env)); errorReturn.desugared = true; failStmt.exprStmt = errorReturn; } } else { BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.pos = location; panicNode.expr = patternFailureCaseVarRef; patternBlockFailureCase.stmts.add(panicNode); } return ASTBuilderUtil.createMatchStatementPattern(location, patternFailureCaseVar, patternBlockFailureCase); } private BLangMatchTypedBindingPatternClause getSafeAssignSuccessPattern(Location location, BType lhsType, boolean isVarDef, BVarSymbol varSymbol, BLangExpression lhsExpr) { String patternSuccessCaseVarName = GEN_VAR_PREFIX.value + "t_match"; BLangSimpleVariable patternSuccessCaseVar = ASTBuilderUtil.createVariable(location, patternSuccessCaseVarName, lhsType, null, new BVarSymbol(0, names.fromString(patternSuccessCaseVarName), this.env.scope.owner.pkgID, lhsType, this.env.scope.owner, location, VIRTUAL)); BLangExpression varRefExpr; if (isVarDef) { varRefExpr = ASTBuilderUtil.createVariableRef(location, varSymbol); } else { varRefExpr = lhsExpr; } BLangVariableReference patternSuccessCaseVarRef = ASTBuilderUtil.createVariableRef(location, patternSuccessCaseVar.symbol); BLangAssignment assignmentStmtSuccessCase = ASTBuilderUtil.createAssignmentStmt(location, varRefExpr, patternSuccessCaseVarRef, false); BLangBlockStmt patternBlockSuccessCase = ASTBuilderUtil.createBlockStmt(location, new ArrayList<BLangStatement>() {{ add(assignmentStmtSuccessCase); }}); return ASTBuilderUtil.createMatchStatementPattern(location, patternSuccessCaseVar, patternBlockSuccessCase); } private BLangStatement generateIfElseStmt(BLangMatch matchStmt, BLangSimpleVariable matchExprVar) { List<BLangMatchBindingPatternClause> patterns = matchStmt.patternClauses; BLangIf parentIfNode = generateIfElseStmt(patterns.get(0), matchExprVar); BLangIf currentIfNode = parentIfNode; for (int i = 1; i < patterns.size(); i++) { BLangMatchBindingPatternClause patternClause = patterns.get(i); if (i == patterns.size() - 1 && patternClause.isLastPattern) { currentIfNode.elseStmt = getMatchPatternElseBody(patternClause, matchExprVar); } else { currentIfNode.elseStmt = generateIfElseStmt(patternClause, matchExprVar); currentIfNode = (BLangIf) currentIfNode.elseStmt; } } return parentIfNode; } /** * Generate an if-else statement from the given match statement. * * @param pattern match pattern statement node * @param matchExprVar variable node of the match expression * @return if else statement node */ private BLangIf generateIfElseStmt(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangExpression ifCondition = createPatternIfCondition(pattern, matchExprVar.symbol); if (NodeKind.MATCH_TYPED_PATTERN_CLAUSE == pattern.getKind()) { BLangBlockStmt patternBody = getMatchPatternBody(pattern, matchExprVar); return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, patternBody, null); } BType expectedType = matchExprVar.getBType(); if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) { BLangMatchStructuredBindingPatternClause matchPattern = (BLangMatchStructuredBindingPatternClause) pattern; expectedType = getStructuredBindingPatternType(matchPattern.bindingPatternVariable); } if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) { BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) pattern; BLangSimpleVariableDef varDef = forceCastIfApplicable(matchExprVar.symbol, pattern.pos, expectedType); BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, varDef.var.symbol); structuredPattern.bindingPatternVariable.expr = matchExprVarRef; BLangStatement varDefStmt; if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos, (BLangTupleVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos, (BLangRecordVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos, (BLangErrorVariable) structuredPattern.bindingPatternVariable); } else { varDefStmt = ASTBuilderUtil .createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable); } if (structuredPattern.typeGuardExpr != null) { BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(structuredPattern.pos); blockStmt.addStatement(varDef); blockStmt.addStatement(varDefStmt); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, structuredPattern.typeGuardExpr); stmtExpr.setBType(symTable.booleanType); ifCondition = ASTBuilderUtil .createBinaryExpr(pattern.pos, ifCondition, stmtExpr, symTable.booleanType, OperatorKind.AND, (BOperatorSymbol) symResolver .resolveBinaryOperator(OperatorKind.AND, symTable.booleanType, symTable.booleanType)); } else { structuredPattern.body.stmts.add(0, varDef); structuredPattern.body.stmts.add(1, varDefStmt); } } return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, pattern.body, null); } private BLangBlockStmt getMatchPatternBody(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangBlockStmt body; BLangMatchTypedBindingPatternClause patternClause = (BLangMatchTypedBindingPatternClause) pattern; if (patternClause.variable.name.value.equals(Names.IGNORE.value)) { return patternClause.body; } BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(patternClause.pos, matchExprVar.symbol); BLangExpression patternVarExpr = addConversionExprIfRequired(matchExprVarRef, patternClause.variable.getBType()); BLangSimpleVariable patternVar = ASTBuilderUtil.createVariable(patternClause.pos, "", patternClause.variable.getBType(), patternVarExpr, patternClause.variable.symbol); BLangSimpleVariableDef patternVarDef = ASTBuilderUtil.createVariableDef(patternVar.pos, patternVar); patternClause.body.stmts.add(0, patternVarDef); body = patternClause.body; return body; } private BLangBlockStmt getMatchPatternElseBody(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangBlockStmt body = pattern.body; if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) { BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, matchExprVar.symbol); BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) pattern; structuredPattern.bindingPatternVariable.expr = matchExprVarRef; BLangStatement varDefStmt; if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos, (BLangTupleVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos, (BLangRecordVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos, (BLangErrorVariable) structuredPattern.bindingPatternVariable); } else { varDefStmt = ASTBuilderUtil .createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable); } structuredPattern.body.stmts.add(0, varDefStmt); body = structuredPattern.body; } return body; } BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) { if (lhsType.tag == TypeTags.NONE) { return expr; } BType rhsType = expr.getBType(); if (types.isSameType(rhsType, lhsType)) { return expr; } types.setImplicitCastExpr(expr, rhsType, lhsType); if (expr.impConversionExpr != null) { BLangExpression impConversionExpr = expr.impConversionExpr; expr.impConversionExpr = null; return impConversionExpr; } if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) { return expr; } if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) { return expr; } if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) { return expr; } BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.expr = expr; conversionExpr.targetType = lhsType; conversionExpr.setBType(lhsType); conversionExpr.pos = expr.pos; conversionExpr.checkTypes = false; conversionExpr.internal = true; return conversionExpr; } private BLangExpression createPatternIfCondition(BLangMatchBindingPatternClause patternClause, BVarSymbol varSymbol) { BType patternType; switch (patternClause.getKind()) { case MATCH_STATIC_PATTERN_CLAUSE: BLangMatchStaticBindingPatternClause staticPattern = (BLangMatchStaticBindingPatternClause) patternClause; patternType = staticPattern.literal.getBType(); break; case MATCH_STRUCTURED_PATTERN_CLAUSE: BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) patternClause; patternType = getStructuredBindingPatternType(structuredPattern.bindingPatternVariable); break; default: BLangMatchTypedBindingPatternClause simplePattern = (BLangMatchTypedBindingPatternClause) patternClause; patternType = simplePattern.variable.getBType(); break; } BLangExpression binaryExpr; BType[] memberTypes; if (patternType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) patternType; memberTypes = unionType.getMemberTypes().toArray(new BType[0]); } else { memberTypes = new BType[1]; memberTypes[0] = patternType; } if (memberTypes.length == 1) { binaryExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]); } else { BLangExpression lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]); BLangExpression rhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[1]); binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR, lhsExpr.getBType(), rhsExpr.getBType())); for (int i = 2; i < memberTypes.length; i++) { lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[i]); rhsExpr = binaryExpr; binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR, lhsExpr.getBType(), rhsExpr.getBType())); } } return binaryExpr; } private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) { if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) { BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable; List<BType> memberTypes = new ArrayList<>(); for (int i = 0; i < tupleVariable.memberVariables.size(); i++) { memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i))); } BTupleType tupleType = new BTupleType(memberTypes); if (tupleVariable.restVariable != null) { BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable); tupleType.restType = restArrayType.eType; } return tupleType; } if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) { BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable; BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + UNDERSCORE + recordCount++), env.enclPkg.symbol.pkgID, null, env.scope.owner, recordVariable.pos, VIRTUAL); recordSymbol.initializerFunc = createRecordInitFunc(); recordSymbol.scope = new Scope(recordSymbol); recordSymbol.scope.define( names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value), recordSymbol.initializerFunc.symbol); LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); List<BLangSimpleVariable> typeDefFields = new ArrayList<>(); for (int i = 0; i < recordVariable.variableList.size(); i++) { String fieldNameStr = recordVariable.variableList.get(i).key.value; Name fieldName = names.fromString(fieldNameStr); BType fieldType = getStructuredBindingPatternType( recordVariable.variableList.get(i).valueBindingPattern); BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType, recordSymbol, bindingPatternVariable.pos, VIRTUAL); fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol)); typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordVarType = new BRecordType(recordSymbol); recordVarType.fields = fields; recordVarType.restFieldType = recordVariable.restParam != null ? ((BRecordType) ((BLangSimpleVariable) recordVariable.restParam).getBType()).restFieldType : symTable.anydataType; recordSymbol.type = recordVarType; recordVarType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields, recordVarType, bindingPatternVariable.pos); recordTypeNode.initFunction = rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable), env); TypeDefBuilderHelper.addTypeDefinition(recordVarType, recordSymbol, recordTypeNode, env); return recordVarType; } if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) { BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable; BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol( SymTag.ERROR, Flags.PUBLIC, names.fromString("$anonErrorType$" + UNDERSCORE + errorCount++), env.enclPkg.symbol.pkgID, null, null, errorVariable.pos, VIRTUAL); BType detailType; if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) { detailType = symTable.detailType; } else { detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++, errorVariable.pos); BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType); recordTypeNode.initFunction = TypeDefBuilderHelper .createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(detailType, detailType.tsymbol, recordTypeNode, env); } BErrorType errorType = new BErrorType(errorTypeSymbol, detailType); errorTypeSymbol.type = errorType; TypeDefBuilderHelper.addTypeDefinition(errorType, errorTypeSymbol, createErrorTypeNode(errorType), env); return errorType; } return bindingPatternVariable.getBType(); } private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) { List<BLangSimpleVariable> fieldList = new ArrayList<>(); for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) { BVarSymbol symbol = field.valueBindingPattern.symbol; if (symbol == null) { symbol = new BVarSymbol(Flags.PUBLIC, names.fromString(field.key.value + "$"), this.env.enclPkg.packageID, symTable.pureType, null, field.valueBindingPattern.pos, VIRTUAL); } BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable( field.valueBindingPattern.pos, symbol.name.value, field.valueBindingPattern.getBType(), field.valueBindingPattern.expr, symbol); fieldList.add(fieldVar); } return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos); } private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail, BLangSimpleVariable restDetail, int errorNo, Location pos) { BRecordType detailRecordType = createAnonRecordType(pos); if (restDetail == null) { detailRecordType.sealed = true; } for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) { Name fieldName = names.fromIdNode(detailEntry.key); BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern); BVarSymbol fieldSym = new BVarSymbol(Flags.PUBLIC, fieldName, detailRecordType.tsymbol.pkgID, fieldType, detailRecordType.tsymbol, detailEntry.key.pos, VIRTUAL); detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym)); detailRecordType.tsymbol.scope.define(fieldName, fieldSym); } return detailRecordType; } private BRecordType createAnonRecordType(Location pos) { BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol( SymTag.RECORD, Flags.PUBLIC, names.fromString(anonModelHelper.getNextRecordVarKey(env.enclPkg.packageID)), env.enclPkg.symbol.pkgID, null, null, pos, VIRTUAL); detailRecordTypeSymbol.initializerFunc = createRecordInitFunc(); detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol); detailRecordTypeSymbol.scope.define( names.fromString(detailRecordTypeSymbol.name.value + "." + detailRecordTypeSymbol.initializerFunc.funcName.value), detailRecordTypeSymbol.initializerFunc.symbol); BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol); detailRecordType.restFieldType = symTable.anydataType; return detailRecordType; } private BAttachedFunction createRecordInitFunc() { BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null); BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol( Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false, symTable.builtinPos, VIRTUAL); initFuncSymbol.retType = symTable.nilType; return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, symTable.builtinPos); } BLangErrorType createErrorTypeNode(BErrorType errorType) { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.setBType(errorType); return errorTypeNode; } private BLangExpression createPatternMatchBinaryExpr(BLangMatchBindingPatternClause patternClause, BVarSymbol varSymbol, BType patternType) { Location pos = patternClause.pos; BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); if (NodeKind.MATCH_STATIC_PATTERN_CLAUSE == patternClause.getKind()) { BLangMatchStaticBindingPatternClause pattern = (BLangMatchStaticBindingPatternClause) patternClause; return createBinaryExpression(pos, varRef, pattern.literal); } if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == patternClause.getKind()) { return createIsLikeExpression(pos, ASTBuilderUtil.createVariableRef(pos, varSymbol), patternType); } if (patternType == symTable.nilType) { BLangLiteral bLangLiteral = ASTBuilderUtil.createLiteral(pos, symTable.nilType, null); return ASTBuilderUtil.createBinaryExpr(pos, varRef, bLangLiteral, symTable.booleanType, OperatorKind.EQUAL, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.EQUAL, symTable.anyType, symTable.nilType)); } else { return createIsAssignableExpression(pos, varSymbol, patternType); } } private BLangExpression createBinaryExpression(Location pos, BLangSimpleVarRef varRef, BLangExpression expression) { BLangBinaryExpr binaryExpr; if (NodeKind.GROUP_EXPR == expression.getKind()) { return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression); } if (NodeKind.BINARY_EXPR == expression.getKind()) { binaryExpr = (BLangBinaryExpr) expression; BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr); BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr); binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver .resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType)); } else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) { BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode(); anyType.setBType(symTable.anyType); anyType.typeKind = TypeKind.ANY; return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType); } else { binaryExpr = ASTBuilderUtil .createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null); BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.getBType(), expression.getBType()); if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver .getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.getBType(), binaryExpr, env); } binaryExpr.opSymbol = (BOperatorSymbol) opSymbol; } return binaryExpr; } private BLangIsAssignableExpr createIsAssignableExpression(Location pos, BVarSymbol varSymbol, BType patternType) { BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); return ASTBuilderUtil.createIsAssignableExpr(pos, varRef, patternType, symTable.booleanType, names, symTable.builtinPos); } private BLangIsLikeExpr createIsLikeExpression(Location pos, BLangExpression expr, BType type) { return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType); } private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) { BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); varRef.pos = variable.pos; varRef.variableName = variable.name; varRef.symbol = variable.symbol; varRef.setBType(variable.getBType()); BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode(); assignmentStmt.expr = variable.expr; assignmentStmt.pos = variable.pos; assignmentStmt.setVariable(varRef); return assignmentStmt; } private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable, BVarSymbol selfSymbol) { return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.getBType(), selfSymbol, variable.name); } private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr, BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol, BLangIdentifier fieldName) { BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol); BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName); fieldAccess.symbol = fieldSymbol; fieldAccess.setBType(fieldType); fieldAccess.isStoreOnCreation = true; BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode(); assignmentStmt.expr = expr; assignmentStmt.pos = function.pos; assignmentStmt.setVariable(fieldAccess); SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env); return rewrite(assignmentStmt, initFuncEnv); } private void addMatchExprDefaultCase(BLangMatchExpression bLangMatchExpression) { List<BType> exprTypes; List<BType> unmatchedTypes = new ArrayList<>(); if (bLangMatchExpression.expr.getBType().tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) bLangMatchExpression.expr.getBType(); exprTypes = new ArrayList<>(unionType.getMemberTypes()); } else { exprTypes = Lists.of(bLangMatchExpression.getBType()); } for (BType type : exprTypes) { boolean assignable = false; for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) { if (this.types.isAssignable(type, pattern.variable.getBType())) { assignable = true; break; } } if (!assignable) { unmatchedTypes.add(type); } } if (unmatchedTypes.isEmpty()) { return; } BType defaultPatternType; if (unmatchedTypes.size() == 1) { defaultPatternType = unmatchedTypes.get(0); } else { defaultPatternType = BUnionType.create(null, new LinkedHashSet<>(unmatchedTypes)); } String patternCaseVarName = GEN_VAR_PREFIX.value + "t_match_default"; BLangSimpleVariable patternMatchCaseVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos, patternCaseVarName, defaultPatternType, null, new BVarSymbol(0, names.fromString(patternCaseVarName), this.env.scope.owner.pkgID, defaultPatternType, this.env.scope.owner, bLangMatchExpression.pos, VIRTUAL)); BLangMatchExprPatternClause defaultPattern = (BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern(); defaultPattern.variable = patternMatchCaseVar; defaultPattern.expr = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, patternMatchCaseVar.symbol); defaultPattern.pos = bLangMatchExpression.pos; bLangMatchExpression.patternClauses.add(defaultPattern); } private boolean safeNavigate(BLangAccessExpression accessExpr) { if (accessExpr.isLValue || accessExpr.expr == null) { return false; } if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) { return true; } NodeKind kind = accessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) { return safeNavigate((BLangAccessExpression) accessExpr.expr); } return false; } private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) { BType originalExprType = accessExpr.getBType(); String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result"; BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.getBType(), null, new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID, accessExpr.getBType(), this.env.scope.owner, accessExpr.pos, VIRTUAL)); BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol); handleSafeNavigation(accessExpr, accessExpr.getBType(), tempResultVar); BLangMatch matcEXpr = this.matchStmtStack.firstElement(); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matcEXpr)); BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef); stmtExpression.setBType(originalExprType); this.matchStmtStack = new Stack<>(); this.accessExprStack = new Stack<>(); this.successPattern = null; this.safeNavigationAssignment = null; return stmtExpression; } private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) { if (accessExpr.expr == null) { return; } NodeKind kind = accessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) { handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar); } if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) { BType originalType = accessExpr.originalType; if (TypeTags.isXMLTypeTag(originalType.tag)) { accessExpr.setBType(BUnionType.create(null, originalType, symTable.errorType)); } else { accessExpr.setBType(originalType); } if (this.safeNavigationAssignment != null) { this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.getBType()); } return; } /* * If the field access is a safe navigation, create a match expression. * Then chain the current expression as the success-pattern of the parent * match expr, if available. * eg: * x but { <--- parent match expr * error e => e, * T t => t.y but { <--- current expr * error e => e, * R r => r.z * } * } */ BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(accessExpr.pos, accessExpr.expr, new ArrayList<>()); boolean isAllTypesRecords = false; LinkedHashSet<BType> memTypes = new LinkedHashSet<>(); if (accessExpr.expr.getBType().tag == TypeTags.UNION) { memTypes = new LinkedHashSet<>(((BUnionType) accessExpr.expr.getBType()).getMemberTypes()); isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes); } if (accessExpr.nilSafeNavigation) { matchStmt.patternClauses.add(getMatchNullPattern(accessExpr, tempResultVar)); matchStmt.setBType(type); memTypes.remove(symTable.nilType); } if (accessExpr.errorSafeNavigation) { matchStmt.patternClauses.add(getMatchErrorPattern(accessExpr, tempResultVar)); matchStmt.setBType(type); matchStmt.pos = accessExpr.pos; memTypes.remove(symTable.errorType); } BLangMatchTypedBindingPatternClause successPattern = null; Name field = getFieldName(accessExpr); if (field == Names.EMPTY) { successPattern = getSuccessPattern(accessExpr.expr.getBType(), accessExpr, tempResultVar, accessExpr.errorSafeNavigation); matchStmt.patternClauses.add(successPattern); pushToMatchStatementStack(matchStmt, accessExpr, successPattern); return; } if (isAllTypesRecords) { for (BType memberType : memTypes) { BRecordType recordType = (BRecordType) memberType; if (recordType.fields.containsKey(field.value) || !recordType.sealed) { successPattern = getSuccessPattern(memberType, accessExpr, tempResultVar, accessExpr.errorSafeNavigation); matchStmt.patternClauses.add(successPattern); } } matchStmt.patternClauses.add(getMatchAllAndNilReturnPattern(accessExpr, tempResultVar)); pushToMatchStatementStack(matchStmt, accessExpr, successPattern); return; } successPattern = getSuccessPattern(accessExpr.expr.getBType(), accessExpr, tempResultVar, accessExpr.errorSafeNavigation); matchStmt.patternClauses.add(successPattern); pushToMatchStatementStack(matchStmt, accessExpr, successPattern); } private void pushToMatchStatementStack(BLangMatch matchStmt, BLangAccessExpression accessExpr, BLangMatchTypedBindingPatternClause successPattern) { this.matchStmtStack.push(matchStmt); if (this.successPattern != null) { this.successPattern.body = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(matchStmt)); } this.successPattern = successPattern; } private Name getFieldName(BLangAccessExpression accessExpr) { Name field = Names.EMPTY; if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { field = new Name(((BLangFieldBasedAccess) accessExpr).field.value); } else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr; if (indexBasedExpression.getKind() == NodeKind.LITERAL) { field = new Name(((BLangLiteral) indexBasedExpression).value.toString()); } } return field; } private boolean isAllTypesAreRecordsInUnion(LinkedHashSet<BType> memTypes) { for (BType memType : memTypes) { int typeTag = memType.tag; if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) { return false; } } return true; } private BLangMatchTypedBindingPatternClause getMatchErrorPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error"; BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(expr.pos, errorPatternVarName, symTable.errorType, null, new BVarSymbol(0, names.fromString(errorPatternVarName), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner, expr.pos, VIRTUAL)); BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, errorPatternVar.symbol); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause errorPattern = ASTBuilderUtil .createMatchStatementPattern(expr.pos, errorPatternVar, patternBody); return errorPattern; } private BLangMatchExprPatternClause getMatchNullPatternGivenExpression(Location pos, BLangExpression expr) { String nullPatternVarName = IGNORE.toString(); BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(pos, nullPatternVarName, symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName), this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner, pos, VIRTUAL)); BLangMatchExprPatternClause nullPattern = (BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern(); nullPattern.variable = errorPatternVar; nullPattern.expr = expr; nullPattern.pos = pos; return nullPattern; } private BLangMatchTypedBindingPatternClause getMatchNullPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null"; BLangSimpleVariable nullPatternVar = ASTBuilderUtil.createVariable(expr.pos, nullPatternVarName, symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName), this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner, expr.pos, VIRTUAL)); BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, nullPatternVar.symbol); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause nullPattern = ASTBuilderUtil .createMatchStatementPattern(expr.pos, nullPatternVar, patternBody); return nullPattern; } private BLangMatchStaticBindingPatternClause getMatchAllAndNilReturnPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, createLiteral(expr.pos, symTable.nilType, Names.NIL_VALUE), false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchStaticBindingPatternClause matchAllPattern = (BLangMatchStaticBindingPatternClause) TreeBuilder.createMatchStatementStaticBindingPattern(); String matchAllVarName = "_"; matchAllPattern.literal = ASTBuilderUtil.createVariableRef(expr.pos, new BVarSymbol(0, names.fromString(matchAllVarName), this.env.scope.owner.pkgID, symTable.anyType, this.env.scope.owner, expr.pos, VIRTUAL)); matchAllPattern.body = patternBody; return matchAllPattern; } private BLangMatchTypedBindingPatternClause getSuccessPattern(BType type, BLangAccessExpression accessExpr, BLangSimpleVariable tempResultVar, boolean liftError) { type = types.getSafeType(type, true, liftError); String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success"; BVarSymbol successPatternSymbol; if (type.tag == TypeTags.INVOKABLE) { successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, names.fromString(successPatternVarName), this.env.scope.owner.pkgID, type, this.env.scope.owner, accessExpr.pos, VIRTUAL); } else { successPatternSymbol = new BVarSymbol(0, names.fromString(successPatternVarName), this.env.scope.owner.pkgID, type, this.env.scope.owner, accessExpr.pos, VIRTUAL); } BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName, type, null, successPatternSymbol); BLangAccessExpression tempAccessExpr = nodeCloner.cloneNode(accessExpr); if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { ((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr; } if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) { ((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol = ((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol; } tempAccessExpr.expr = ASTBuilderUtil.createVariableRef(accessExpr.pos, successPatternVar.symbol); tempAccessExpr.errorSafeNavigation = false; tempAccessExpr.nilSafeNavigation = false; accessExpr.cloneRef = null; if (TypeTags.isXMLTypeTag(tempAccessExpr.expr.getBType().tag)) { tempAccessExpr.setBType(BUnionType.create(null, accessExpr.originalType, symTable.errorType, symTable.nilType)); } else { tempAccessExpr.setBType(accessExpr.originalType); } tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess; BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol); BLangExpression assignmentRhsExpr = addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.getBType()); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause successPattern = ASTBuilderUtil.createMatchStatementPattern(accessExpr.pos, successPatternVar, patternBody); this.safeNavigationAssignment = assignmentStmt; return successPattern; } private boolean safeNavigateLHS(BLangExpression expr) { if (expr.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR && expr.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) { return false; } BLangExpression varRef = ((BLangAccessExpression) expr).expr; if (varRef.getBType().isNullable()) { return true; } return safeNavigateLHS(varRef); } private BLangStatement rewriteSafeNavigationAssignment(BLangAccessExpression accessExpr, BLangExpression rhsExpr, boolean safeAssignment) { this.accessExprStack = new Stack<>(); List<BLangStatement> stmts = new ArrayList<>(); createLHSSafeNavigation(stmts, accessExpr.expr); BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, cloneExpression(accessExpr), rhsExpr); stmts.add(assignment); return ASTBuilderUtil.createBlockStmt(accessExpr.pos, stmts); } private void createLHSSafeNavigation(List<BLangStatement> stmts, BLangExpression expr) { NodeKind kind = expr.getKind(); boolean root = false; if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR || kind == NodeKind.INVOCATION) { BLangAccessExpression accessExpr = (BLangAccessExpression) expr; createLHSSafeNavigation(stmts, accessExpr.expr); accessExpr.expr = accessExprStack.pop(); } else { root = true; } if (expr.getKind() == NodeKind.INVOCATION) { BLangInvocation invocation = (BLangInvocation) expr; BVarSymbol interMediateSymbol = new BVarSymbol(0, names.fromString(GEN_VAR_PREFIX.value + "i_intermediate"), this.env.scope.owner.pkgID, invocation.getBType(), this.env.scope.owner, expr.pos, VIRTUAL); BLangSimpleVariable intermediateVariable = ASTBuilderUtil.createVariable(expr.pos, interMediateSymbol.name.value, invocation.getBType(), invocation, interMediateSymbol); BLangSimpleVariableDef intermediateVariableDefinition = ASTBuilderUtil.createVariableDef(invocation.pos, intermediateVariable); stmts.add(intermediateVariableDefinition); expr = ASTBuilderUtil.createVariableRef(invocation.pos, interMediateSymbol); } if (expr.getBType().isNullable()) { BLangTypeTestExpr isNillTest = ASTBuilderUtil.createTypeTestExpr(expr.pos, expr, getNillTypeNode()); isNillTest.setBType(symTable.booleanType); BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(expr.pos); expr = cloneExpression(expr); expr.setBType(types.getSafeType(expr.getBType(), true, false)); if (isDefaultableMappingType(expr.getBType()) && !root) { BLangRecordLiteral jsonLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode(); jsonLiteral.setBType(expr.getBType()); jsonLiteral.pos = expr.pos; BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(expr.pos, expr, jsonLiteral); thenStmt.addStatement(assignment); } else { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = ERROR_REASON_NULL_REFERENCE_ERROR; literal.setBType(symTable.stringType); BLangErrorConstructorExpr errorConstructorExpr = (BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode(); BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env, names.fromString(""), names.fromString("error")); errorConstructorExpr.setBType(symbol.type); errorConstructorExpr.pos = expr.pos; List<BLangExpression> positionalArgs = new ArrayList<>(); positionalArgs.add(literal); errorConstructorExpr.positionalArgs = positionalArgs; BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.expr = errorConstructorExpr; panicNode.pos = expr.pos; thenStmt.addStatement(panicNode); } BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(expr.pos, isNillTest, thenStmt, null); stmts.add(ifelse); } accessExprStack.push(expr); } BLangValueType getNillTypeNode() { BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); nillTypeNode.typeKind = TypeKind.NIL; nillTypeNode.setBType(symTable.nilType); return nillTypeNode; } private BLangValueExpression cloneExpression(BLangExpression expr) { switch (expr.getKind()) { case SIMPLE_VARIABLE_REF: return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol); case FIELD_BASED_ACCESS_EXPR: case INDEX_BASED_ACCESS_EXPR: return cloneAccessExpr((BLangAccessExpression) expr); default: throw new IllegalStateException(); } } private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) { if (originalAccessExpr.expr == null) { return originalAccessExpr; } BLangExpression varRef; NodeKind kind = originalAccessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) { varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr); } else { varRef = cloneExpression(originalAccessExpr.expr); } varRef.setBType(types.getSafeType(originalAccessExpr.expr.getBType(), true, false)); BLangAccessExpression accessExpr; switch (originalAccessExpr.getKind()) { case FIELD_BASED_ACCESS_EXPR: accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef, ((BLangFieldBasedAccess) originalAccessExpr).field); break; case INDEX_BASED_ACCESS_EXPR: accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef, ((BLangIndexBasedAccess) originalAccessExpr).indexExpr); break; default: throw new IllegalStateException(); } accessExpr.originalType = originalAccessExpr.originalType; accessExpr.pos = originalAccessExpr.pos; accessExpr.isLValue = originalAccessExpr.isLValue; accessExpr.symbol = originalAccessExpr.symbol; accessExpr.errorSafeNavigation = false; accessExpr.nilSafeNavigation = false; accessExpr.setBType(originalAccessExpr.originalType); return accessExpr; } private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) { BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L); return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD, symTable.intType, symTable.intType)); } private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) { BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L); return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB, symTable.intType, symTable.intType)); } private BLangLiteral getBooleanLiteral(boolean value) { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = value; literal.setBType(symTable.booleanType); literal.pos = symTable.builtinPos; return literal; } private boolean isDefaultableMappingType(BType type) { switch (types.getSafeType(type, true, false).tag) { case TypeTags.JSON: case TypeTags.MAP: case TypeTags.RECORD: return true; default: return false; } } private BLangFunction createInitFunctionForClassDefn(BLangClassDefinition classDefinition, SymbolEnv env) { BType returnType = symTable.nilType; BLangFunction userDefinedInitMethod = classDefinition.initFunction; if (userDefinedInitMethod != null) { returnType = userDefinedInitMethod.getBType().getReturnType(); } BLangFunction initFunction = TypeDefBuilderHelper.createInitFunctionForStructureType(classDefinition.pos, classDefinition.symbol, env, names, GENERATED_INIT_SUFFIX, classDefinition.getBType(), returnType); BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefinition.getBType().tsymbol); typeSymbol.generatedInitializerFunc = new BAttachedFunction(GENERATED_INIT_SUFFIX, initFunction.symbol, (BInvokableType) initFunction.getBType(), classDefinition.pos); classDefinition.generatedInitFunction = initFunction; initFunction.returnTypeNode.setBType(returnType); return rewrite(initFunction, env); } private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) { /* * Desugar (lhsExpr && rhsExpr) to following if-else: * * logical AND: * ------------- * T $result$; * if (lhsExpr) { * $result$ = rhsExpr; * } else { * $result$ = false; * } * * logical OR: * ------------- * T $result$; * if (lhsExpr) { * $result$ = true; * } else { * $result$ = rhsExpr; * } * */ BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.getBType(), null, symTable.builtinPos); BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol); BLangExpression thenResult; if (binaryExpr.opKind == OperatorKind.AND) { thenResult = binaryExpr.rhsExpr; } else { thenResult = getBooleanLiteral(true); } BLangAssignment thenAssignment = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult); thenBody.addStatement(thenAssignment); BLangExpression elseResult; BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol); if (binaryExpr.opKind == OperatorKind.AND) { elseResult = getBooleanLiteral(false); } else { elseResult = binaryExpr.rhsExpr; } BLangAssignment elseAssignment = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult); elseBody.addStatement(elseAssignment); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol); BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse)); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.setBType(binaryExpr.getBType()); result = rewriteExpr(stmtExpr); } /** * Split packahe init function into several smaller functions. * * @param packageNode package node * @param env symbol environment * @return initial init function but trimmed in size */ private BLangFunction splitInitFunction(BLangPackage packageNode, SymbolEnv env) { int methodSize = INIT_METHOD_SPLIT_SIZE; BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) packageNode.initFunction.body; if (!isJvmTarget) { return packageNode.initFunction; } BLangFunction initFunction = packageNode.initFunction; List<BLangFunction> generatedFunctions = new ArrayList<>(); List<BLangStatement> stmts = new ArrayList<>(funcBody.stmts); funcBody.stmts.clear(); BLangFunction newFunc = initFunction; BLangBlockFunctionBody newFuncBody = (BLangBlockFunctionBody) newFunc.body; int varDefIndex = 0; for (int i = 0; i < stmts.size(); i++) { BLangStatement statement = stmts.get(i); if (statement.getKind() == NodeKind.VARIABLE_DEF) { break; } varDefIndex++; if (i > 0 && (i % methodSize == 0 || isAssignmentWithInitOrRecordLiteralExpr(statement))) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.add(stmts.get(i)); } List<BLangStatement> chunkStmts = new ArrayList<>(); for (int i = varDefIndex; i < stmts.size(); i++) { BLangStatement stmt = stmts.get(i); chunkStmts.add(stmt); varDefIndex++; if ((stmt.getKind() == NodeKind.ASSIGNMENT) && (((BLangAssignment) stmt).expr.getKind() == NodeKind.SERVICE_CONSTRUCTOR) && (newFuncBody.stmts.size() + chunkStmts.size() > methodSize)) { if (newFuncBody.stmts.size() + chunkStmts.size() > methodSize) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.addAll(chunkStmts); chunkStmts.clear(); } else if ((stmt.getKind() == NodeKind.ASSIGNMENT) && (((BLangAssignment) stmt).varRef instanceof BLangPackageVarRef) && Symbols.isFlagOn(((BLangPackageVarRef) ((BLangAssignment) stmt).varRef).varSymbol.flags, Flags.LISTENER) ) { break; } } newFuncBody.stmts.addAll(chunkStmts); for (int i = varDefIndex; i < stmts.size(); i++) { if (i > 0 && i % methodSize == 0) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.add(stmts.get(i)); } generatedFunctions.add(newFunc); for (int j = 0; j < generatedFunctions.size() - 1; j++) { BLangFunction thisFunction = generatedFunctions.get(j); BLangCheckedExpr checkedExpr = ASTBuilderUtil.createCheckExpr(initFunction.pos, createInvocationNode(generatedFunctions.get(j + 1).name.value, new ArrayList<>(), symTable.errorOrNilType), symTable.nilType); checkedExpr.equivalentErrorTypeList.add(symTable.errorType); BLangExpressionStmt expressionStmt = ASTBuilderUtil .createExpressionStmt(thisFunction.pos, (BLangBlockFunctionBody) thisFunction.body); expressionStmt.expr = checkedExpr; expressionStmt.expr.pos = initFunction.pos; if (j > 0) { thisFunction = rewrite(thisFunction, env); packageNode.functions.add(thisFunction); packageNode.topLevelNodes.add(thisFunction); } } if (generatedFunctions.size() > 1) { BLangFunction lastFunc = generatedFunctions.get(generatedFunctions.size() - 1); lastFunc = rewrite(lastFunc, env); packageNode.functions.add(lastFunc); packageNode.topLevelNodes.add(lastFunc); } return generatedFunctions.get(0); } private boolean isAssignmentWithInitOrRecordLiteralExpr(BLangStatement statement) { if (statement.getKind() == NodeKind.ASSIGNMENT) { return isMappingOrObjectConstructorOrObjInit(((BLangAssignment) statement).getExpression()); } return false; } protected boolean isMappingOrObjectConstructorOrObjInit(BLangExpression expression) { switch (expression.getKind()) { case TYPE_INIT_EXPR: case RECORD_LITERAL_EXPR: case OBJECT_CTOR_EXPRESSION: return true; case CHECK_EXPR: return isMappingOrObjectConstructorOrObjInit(((BLangCheckedExpr) expression).expr); case TYPE_CONVERSION_EXPR: return isMappingOrObjectConstructorOrObjInit(((BLangTypeConversionExpr) expression).expr); default: return false; } } /** * Create an intermediate package init function. * * @param pkgNode package node * @param env symbol environment of package */ private BLangFunction createIntermediateInitFunction(BLangPackage pkgNode, SymbolEnv env) { String alias = pkgNode.symbol.pkgID.toString(); BLangFunction initFunction = ASTBuilderUtil .createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias, new Name(Names.INIT_FUNCTION_SUFFIX.value + this.initFuncIndex++), symTable); createInvokableSymbol(initFunction, env); return initFunction; } private BType getRestType(BInvokableSymbol invokableSymbol) { if (invokableSymbol != null && invokableSymbol.restParam != null) { return invokableSymbol.restParam.type; } return null; } private BType getRestType(BLangFunction function) { if (function != null && function.restParam != null) { return function.restParam.getBType(); } return null; } private BVarSymbol getRestSymbol(BLangFunction function) { if (function != null && function.restParam != null) { return function.restParam.symbol; } return null; } private boolean isComputedKey(RecordLiteralNode.RecordField field) { if (!field.isKeyValueField()) { return false; } return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey; } private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) { List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields; BType type = mappingConstructorExpr.getBType(); Location pos = mappingConstructorExpr.pos; List<RecordLiteralNode.RecordField> rewrittenFields = new ArrayList<>(fields.size()); for (RecordLiteralNode.RecordField field : fields) { if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValueField = (BLangRecordLiteral.BLangRecordKeyValueField) field; BLangRecordLiteral.BLangRecordKey key = keyValueField.key; BLangExpression origKey = key.expr; BLangExpression keyExpr; if (key.computedKey) { keyExpr = origKey; } else { keyExpr = origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos, StringEscapeUtils.unescapeJava(((BLangSimpleVarRef) origKey).variableName.value)) : ((BLangLiteral) origKey); } BLangRecordLiteral.BLangRecordKeyValueField rewrittenField = ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr), rewriteExpr(keyValueField.valueExpr)); rewrittenField.pos = keyValueField.pos; rewrittenField.key.pos = key.pos; rewrittenFields.add(rewrittenField); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field; rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue( rewriteExpr(createStringLiteral(pos, StringEscapeUtils.unescapeJava(varRefField.variableName.value))), rewriteExpr(varRefField))); } else { BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField = (BLangRecordLiteral.BLangRecordSpreadOperatorField) field; spreadOpField.expr = rewriteExpr(spreadOpField.expr); rewrittenFields.add(spreadOpField); } } fields.clear(); return type.tag == TypeTags.RECORD ? new BLangStructLiteral(pos, type, rewrittenFields) : new BLangMapLiteral(pos, type, rewrittenFields); } protected void addTransactionInternalModuleImport() { if (!env.enclPkg.packageID.equals(PackageID.TRANSACTION_INTERNAL)) { BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode(); List<BLangIdentifier> pkgNameComps = new ArrayList<>(); pkgNameComps.add(ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.TRANSACTION.value)); importDcl.pkgNameComps = pkgNameComps; importDcl.pos = env.enclPkg.symbol.pos; importDcl.orgName = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.BALLERINA_INTERNAL_ORG.value); importDcl.alias = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "trx"); importDcl.version = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, ""); importDcl.symbol = symTable.internalTransactionModuleSymbol; env.enclPkg.imports.add(importDcl); env.enclPkg.symbol.imports.add(importDcl.symbol); } } }
class definition node for which the initializer is created * @param env The env for the type node * @return The generated initializer method */ private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) { BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env); if (classDefinition.initFunction == null) { return generatedInitFunc; } return wireUpGeneratedInitFunction(generatedInitFunc, (BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction); }
class definition node for which the initializer is created * @param env The env for the type node * @return The generated initializer method */ private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) { BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env); if (classDefinition.initFunction == null) { return generatedInitFunc; } return wireUpGeneratedInitFunction(generatedInitFunc, (BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction); }
This can not be `checkState(writer == null)` because single dispatcher will handle 5 `CheckpointStartRequests` from 5 subtasks (assuming 5 subtasks are configured to share the same file?). If so, maybe add a comment explaining this?
private void dispatchInternal(ChannelStateWriteRequest request) throws Exception { if (request instanceof SubtaskRegisterRequest) { SubtaskRegisterRequest req = (SubtaskRegisterRequest) request; SubtaskID subtaskID = SubtaskID.of(req.getJobID(), req.getJobVertexID(), req.getSubtaskIndex()); subtasks.add(subtaskID); return; } else if (request instanceof SubtaskReleaseRequest) { SubtaskReleaseRequest req = (SubtaskReleaseRequest) request; SubtaskID subtaskID = SubtaskID.of(req.getJobID(), req.getJobVertexID(), req.getSubtaskIndex()); subtasks.remove(subtaskID); if (writer == null) { return; } writer.releaseSubtask(subtaskID); return; } if (isAbortedCheckpoint(request.getCheckpointId())) { if (request.getCheckpointId() != maxAbortedCheckpointId) { request.cancel(new CheckpointException(CHECKPOINT_DECLINED_SUBSUMED)); return; } SubtaskID requestSubtask = SubtaskID.of( request.getJobID(), request.getJobVertexID(), request.getSubtaskIndex()); if (requestSubtask.equals(abortedSubtaskID)) { request.cancel(abortedCause); } else { request.cancel( new CheckpointException( CHANNEL_STATE_SHARED_STREAM_EXCEPTION, abortedCause)); } return; } if (request instanceof CheckpointStartRequest) { checkState( request.getCheckpointId() >= ongoingCheckpointId, String.format( "Checkpoint must be incremented, ongoingCheckpointId is %s, but the request is %s.", ongoingCheckpointId, request)); if (request.getCheckpointId() > ongoingCheckpointId) { failAndClearWriter(new CheckpointException(CHECKPOINT_DECLINED_SUBSUMED)); } CheckpointStartRequest req = (CheckpointStartRequest) request; if (writer == null) { this.writer = buildWriter(req); this.ongoingCheckpointId = request.getCheckpointId(); } writer.registerSubtaskResult( SubtaskID.of(req.getJobID(), req.getJobVertexID(), req.getSubtaskIndex()), req.getTargetResult()); } else if (request instanceof CheckpointInProgressRequest) { CheckpointInProgressRequest req = (CheckpointInProgressRequest) request; checkArgument( ongoingCheckpointId == req.getCheckpointId() && writer != null, "writer not found while processing request: " + req); req.execute(writer); } else if (request instanceof CheckpointAbortRequest) { CheckpointAbortRequest req = (CheckpointAbortRequest) request; if (request.getCheckpointId() > maxAbortedCheckpointId) { this.maxAbortedCheckpointId = req.getCheckpointId(); this.abortedCause = req.getThrowable(); this.abortedSubtaskID = SubtaskID.of(req.getJobID(), req.getJobVertexID(), req.getSubtaskIndex()); } if (req.getCheckpointId() == ongoingCheckpointId) { failAndClearWriter( req.getJobID(), req.getJobVertexID(), req.getSubtaskIndex(), req.getThrowable()); } else if (request.getCheckpointId() > ongoingCheckpointId) { failAndClearWriter(new CheckpointException(CHECKPOINT_DECLINED_SUBSUMED)); } } else { throw new IllegalArgumentException("unknown request type: " + request); } }
if (writer == null) {
private void dispatchInternal(ChannelStateWriteRequest request) throws Exception { if (request instanceof SubtaskRegisterRequest) { SubtaskRegisterRequest req = (SubtaskRegisterRequest) request; SubtaskID subtaskID = SubtaskID.of(req.getJobVertexID(), req.getSubtaskIndex()); registeredSubtasks.add(subtaskID); return; } else if (request instanceof SubtaskReleaseRequest) { SubtaskReleaseRequest req = (SubtaskReleaseRequest) request; SubtaskID subtaskID = SubtaskID.of(req.getJobVertexID(), req.getSubtaskIndex()); registeredSubtasks.remove(subtaskID); if (writer == null) { return; } writer.releaseSubtask(subtaskID); return; } if (isAbortedCheckpoint(request.getCheckpointId())) { handleAbortedRequest(request); } else if (request instanceof CheckpointStartRequest) { handleCheckpointStartRequest(request); } else if (request instanceof CheckpointInProgressRequest) { handleCheckpointInProgressRequest((CheckpointInProgressRequest) request); } else if (request instanceof CheckpointAbortRequest) { handleCheckpointAbortRequest(request); } else { throw new IllegalArgumentException("unknown request type: " + request); } }
class ChannelStateWriteRequestDispatcherImpl implements ChannelStateWriteRequestDispatcher { private static final Logger LOG = LoggerFactory.getLogger(ChannelStateWriteRequestDispatcherImpl.class); private final CheckpointStorage checkpointStorage; private final JobID jobID; private final ChannelStateSerializer serializer; private final Set<SubtaskID> subtasks; private CheckpointStorageWorkerView streamFactoryResolver; /** * It is the checkpointId corresponding to writer. And It should be always update with {@link * */ private long ongoingCheckpointId; /** * The checkpoint that checkpointId is less than or equal to maxAbortedCheckpointId should be * aborted. */ private long maxAbortedCheckpointId; /** The aborted subtask of the maxAbortedCheckpointId. */ private SubtaskID abortedSubtaskID; /** The aborted cause of the maxAbortedCheckpointId. */ private Throwable abortedCause; /** * The channelState writer of ongoing checkpointId, it can be null when the writer is finished. */ private ChannelStateCheckpointWriter writer; ChannelStateWriteRequestDispatcherImpl( CheckpointStorage checkpointStorage, JobID jobID, ChannelStateSerializer serializer) { this.checkpointStorage = checkNotNull(checkpointStorage); this.jobID = jobID; this.serializer = checkNotNull(serializer); this.subtasks = new HashSet<>(); this.ongoingCheckpointId = -1; this.maxAbortedCheckpointId = -1; } @Override public void dispatch(ChannelStateWriteRequest request) throws Exception { LOG.trace("process {}", request); try { dispatchInternal(request); } catch (Exception e) { try { request.cancel(e); } catch (Exception ex) { e.addSuppressed(ex); } throw e; } } private boolean isAbortedCheckpoint(long checkpointId) { return checkpointId < ongoingCheckpointId || checkpointId <= maxAbortedCheckpointId; } private void failAndClearWriter(Throwable e) { if (writer == null) { return; } writer.fail(e); writer = null; } private void failAndClearWriter( JobID jobID, JobVertexID jobVertexID, int subtaskIndex, Throwable throwable) { if (writer == null) { return; } writer.fail(jobID, jobVertexID, subtaskIndex, throwable); writer = null; } private ChannelStateCheckpointWriter buildWriter(CheckpointStartRequest request) throws Exception { return new ChannelStateCheckpointWriter( subtasks, request.getCheckpointId(), getStreamFactoryResolver() .resolveCheckpointStorageLocation( request.getCheckpointId(), request.getLocationReference()), serializer, () -> { checkState( request.getCheckpointId() == ongoingCheckpointId, "The ongoingCheckpointId[%s] was changed when clear writer of checkpoint[%s], it might be a bug.", ongoingCheckpointId, request.getCheckpointId()); this.writer = null; }); } @Override public void fail(Throwable cause) { if (writer == null) { return; } try { writer.fail(cause); } catch (Exception ex) { LOG.warn("unable to fail write channel state writer", cause); } writer = null; } CheckpointStorageWorkerView getStreamFactoryResolver() throws IOException { if (streamFactoryResolver == null) { streamFactoryResolver = checkpointStorage.createCheckpointStorage(jobID); } return streamFactoryResolver; } }
class ChannelStateWriteRequestDispatcherImpl implements ChannelStateWriteRequestDispatcher { private static final Logger LOG = LoggerFactory.getLogger(ChannelStateWriteRequestDispatcherImpl.class); private final CheckpointStorage checkpointStorage; private final JobID jobID; private final ChannelStateSerializer serializer; private final Set<SubtaskID> registeredSubtasks; private CheckpointStorageWorkerView streamFactoryResolver; /** * It is the checkpointId corresponding to writer. And It should be always update with {@link * */ private long ongoingCheckpointId; /** * The checkpoint that checkpointId is less than or equal to maxAbortedCheckpointId should be * aborted. */ private long maxAbortedCheckpointId; /** The aborted subtask of the maxAbortedCheckpointId. */ private SubtaskID abortedSubtaskID; /** The aborted cause of the maxAbortedCheckpointId. */ private Throwable abortedCause; /** * The channelState writer of ongoing checkpointId, it can be null when the writer is finished. */ private ChannelStateCheckpointWriter writer; ChannelStateWriteRequestDispatcherImpl( CheckpointStorage checkpointStorage, JobID jobID, ChannelStateSerializer serializer) { this.checkpointStorage = checkNotNull(checkpointStorage); this.jobID = jobID; this.serializer = checkNotNull(serializer); this.registeredSubtasks = new HashSet<>(); this.ongoingCheckpointId = -1; this.maxAbortedCheckpointId = -1; } @Override public void dispatch(ChannelStateWriteRequest request) throws Exception { LOG.trace("process {}", request); try { dispatchInternal(request); } catch (Exception e) { try { request.cancel(e); } catch (Exception ex) { e.addSuppressed(ex); } throw e; } } private void handleAbortedRequest(ChannelStateWriteRequest request) throws Exception { if (request.getCheckpointId() != maxAbortedCheckpointId) { request.cancel(new CheckpointException(CHECKPOINT_DECLINED_SUBSUMED)); return; } SubtaskID requestSubtask = SubtaskID.of(request.getJobVertexID(), request.getSubtaskIndex()); if (requestSubtask.equals(abortedSubtaskID)) { request.cancel(abortedCause); } else { request.cancel( new CheckpointException(CHANNEL_STATE_SHARED_STREAM_EXCEPTION, abortedCause)); } } private void handleCheckpointStartRequest(ChannelStateWriteRequest request) throws Exception { checkState( request.getCheckpointId() >= ongoingCheckpointId, String.format( "Checkpoint must be incremented, ongoingCheckpointId is %s, but the request is %s.", ongoingCheckpointId, request)); if (request.getCheckpointId() > ongoingCheckpointId) { failAndClearWriter(new CheckpointException(CHECKPOINT_DECLINED_SUBSUMED)); } CheckpointStartRequest req = (CheckpointStartRequest) request; if (writer == null) { this.writer = buildWriter(req); this.ongoingCheckpointId = request.getCheckpointId(); } writer.registerSubtaskResult( SubtaskID.of(req.getJobVertexID(), req.getSubtaskIndex()), req.getTargetResult()); } private void handleCheckpointInProgressRequest(CheckpointInProgressRequest req) throws Exception { checkArgument( ongoingCheckpointId == req.getCheckpointId() && writer != null, "writer not found while processing request: " + req); req.execute(writer); } private void handleCheckpointAbortRequest(ChannelStateWriteRequest request) { CheckpointAbortRequest req = (CheckpointAbortRequest) request; if (request.getCheckpointId() > maxAbortedCheckpointId) { this.maxAbortedCheckpointId = req.getCheckpointId(); this.abortedCause = req.getThrowable(); this.abortedSubtaskID = SubtaskID.of(req.getJobVertexID(), req.getSubtaskIndex()); } if (req.getCheckpointId() == ongoingCheckpointId) { failAndClearWriter(req.getJobVertexID(), req.getSubtaskIndex(), req.getThrowable()); } else if (request.getCheckpointId() > ongoingCheckpointId) { failAndClearWriter(new CheckpointException(CHECKPOINT_DECLINED_SUBSUMED)); } } private boolean isAbortedCheckpoint(long checkpointId) { return checkpointId < ongoingCheckpointId || checkpointId <= maxAbortedCheckpointId; } private void failAndClearWriter(Throwable e) { if (writer == null) { return; } writer.fail(e); writer = null; } private void failAndClearWriter( JobVertexID jobVertexID, int subtaskIndex, Throwable throwable) { if (writer == null) { return; } writer.fail(jobVertexID, subtaskIndex, throwable); writer = null; } private ChannelStateCheckpointWriter buildWriter(CheckpointStartRequest request) throws Exception { return new ChannelStateCheckpointWriter( registeredSubtasks, request.getCheckpointId(), getStreamFactoryResolver() .resolveCheckpointStorageLocation( request.getCheckpointId(), request.getLocationReference()), serializer, () -> { checkState( request.getCheckpointId() == ongoingCheckpointId, "The ongoingCheckpointId[%s] was changed when clear writer of checkpoint[%s], it might be a bug.", ongoingCheckpointId, request.getCheckpointId()); this.writer = null; }); } @Override public void fail(Throwable cause) { if (writer == null) { return; } try { writer.fail(cause); } catch (Exception ex) { LOG.warn("unable to fail write channel state writer", cause); } writer = null; } CheckpointStorageWorkerView getStreamFactoryResolver() throws IOException { if (streamFactoryResolver == null) { streamFactoryResolver = checkpointStorage.createCheckpointStorage(jobID); } return streamFactoryResolver; } }
Can you modify the kernel logic instead of sql parse logic?
private BinaryOperationExpression createPatternMatchingOperationSegment(final AExprContext ctx) { String operator = getOriginalText(ctx.patternMatchingOperator()).toUpperCase(); ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ListExpression right = new ListExpression(ctx.aExpr(1).start.getStartIndex(), ctx.aExpr().get(ctx.aExpr().size() - 1).stop.getStopIndex()); for (int i = 1; i < ctx.aExpr().size(); i++) { right.getItems().add((ExpressionSegment) visit(ctx.aExpr().get(i))); } String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); }
String operator = getOriginalText(ctx.patternMatchingOperator()).toUpperCase();
private BinaryOperationExpression createPatternMatchingOperationSegment(final AExprContext ctx) { String operator = getOriginalText(ctx.patternMatchingOperator()).toUpperCase(); ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ListExpression right = new ListExpression(ctx.aExpr(1).start.getStartIndex(), ctx.aExpr().get(ctx.aExpr().size() - 1).stop.getStopIndex()); for (int i = 1; i < ctx.aExpr().size(); i++) { right.getItems().add((ExpressionSegment) visit(ctx.aExpr().get(i))); } String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); }
class PostgreSQLStatementSQLVisitor extends PostgreSQLStatementParserBaseVisitor<ASTNode> { private final Collection<ParameterMarkerSegment> parameterMarkerSegments = new LinkedList<>(); public PostgreSQLStatementSQLVisitor(final Properties props) { } @Override public final ASTNode visitParameterMarker(final ParameterMarkerContext ctx) { if (null == ctx.DOLLAR_()) { return new ParameterMarkerValue(parameterMarkerSegments.size(), ParameterMarkerType.QUESTION); } return new ParameterMarkerValue(new NumberLiteralValue(ctx.NUMBER_().getText()).getValue().intValue() - 1, ParameterMarkerType.DOLLAR); } @Override public final ASTNode visitNumberLiterals(final NumberLiteralsContext ctx) { return new NumberLiteralValue(ctx.NUMBER_().getText()); } @Override public final ASTNode visitIdentifier(final IdentifierContext ctx) { UnreservedWordContext unreservedWord = ctx.unreservedWord(); return null != unreservedWord ? visit(unreservedWord) : new IdentifierValue(ctx.getText()); } @Override public final ASTNode visitUnreservedWord(final UnreservedWordContext ctx) { return new IdentifierValue(ctx.getText()); } @Override public final ASTNode visitSchemaName(final SchemaNameContext ctx) { return visit(ctx.identifier()); } @Override public final ASTNode visitTableName(final TableNameContext ctx) { SimpleTableSegment result = new SimpleTableSegment(new TableNameSegment(ctx.name().getStart().getStartIndex(), ctx.name().getStop().getStopIndex(), (IdentifierValue) visit(ctx.name()))); OwnerContext owner = ctx.owner(); if (null != owner) { result.setOwner(new OwnerSegment(owner.getStart().getStartIndex(), owner.getStop().getStopIndex(), (IdentifierValue) visit(owner.identifier()))); } return result; } @Override public final ASTNode visitColumnName(final ColumnNameContext ctx) { ColumnSegment result = new ColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.name())); OwnerContext owner = ctx.owner(); if (null != owner) { result.setOwner(new OwnerSegment(owner.getStart().getStartIndex(), owner.getStop().getStopIndex(), (IdentifierValue) visit(owner.identifier()))); } return result; } @Override public final ASTNode visitIndexName(final IndexNameContext ctx) { IndexNameSegment indexName = new IndexNameSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), (IdentifierValue) visit(ctx.identifier())); return new IndexSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), indexName); } @Override public final ASTNode visitConstraintName(final ConstraintNameContext ctx) { return new ConstraintSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.identifier())); } @Override public final ASTNode visitTableNames(final TableNamesContext ctx) { CollectionValue<SimpleTableSegment> result = new CollectionValue<>(); for (TableNameContext each : ctx.tableName()) { result.getValue().add((SimpleTableSegment) visit(each)); } return result; } @Override public final ASTNode visitColumnNames(final ColumnNamesContext ctx) { CollectionValue<ColumnSegment> result = new CollectionValue<>(); for (ColumnNameContext each : ctx.columnName()) { result.getValue().add((ColumnSegment) visit(each)); } return result; } @Override public ASTNode visitAExpr(final AExprContext ctx) { if (null != ctx.cExpr()) { return visit(ctx.cExpr()); } if (null != ctx.TYPE_CAST_()) { return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), (ExpressionSegment) visit(ctx.aExpr(0)), ctx.typeName().getText()); } if (null != ctx.BETWEEN()) { return createBetweenSegment(ctx); } if (null != ctx.IN()) { return createInSegment(ctx); } if (null != ctx.patternMatchingOperator()) { return createPatternMatchingOperationSegment(ctx); } Optional<String> binaryOperator = findBinaryOperator(ctx); if (binaryOperator.isPresent()) { return createBinaryOperationSegment(ctx, binaryOperator.get()); } super.visitAExpr(ctx); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new CommonExpressionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), text); } private Optional<String> findBinaryOperator(final AExprContext ctx) { if (null != ctx.IS()) { return Optional.of(ctx.IS().getText()); } if (null != ctx.ISNULL()) { return Optional.of("IS"); } if (1 == ctx.aExpr().size()) { return Optional.empty(); } if (null != ctx.comparisonOperator()) { return Optional.of(ctx.comparisonOperator().getText()); } if (null != ctx.andOperator()) { return Optional.of(ctx.andOperator().getText()); } if (null != ctx.orOperator()) { return Optional.of(ctx.orOperator().getText()); } if (null != ctx.PLUS_()) { return Optional.of(ctx.PLUS_().getText()); } if (null != ctx.MINUS_()) { return Optional.of(ctx.MINUS_().getText()); } if (null != ctx.ASTERISK_()) { return Optional.of(ctx.ASTERISK_().getText()); } if (null != ctx.SLASH_()) { return Optional.of(ctx.SLASH_().getText()); } return Optional.empty(); } private BinaryOperationExpression createBinaryOperationSegment(final AExprContext ctx, final String operator) { if ("IS".equalsIgnoreCase(operator)) { ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); String rightText; ExpressionSegment right; if (null != ctx.IS()) { rightText = ctx.start.getInputStream().getText(new Interval(ctx.IS().getSymbol().getStopIndex() + 2, ctx.stop.getStopIndex())).trim(); right = new LiteralExpressionSegment(ctx.IS().getSymbol().getStopIndex() + 2, ctx.stop.getStopIndex(), rightText); } else { rightText = ctx.start.getInputStream().getText(new Interval(ctx.ISNULL().getSymbol().getStartIndex() + 2, ctx.stop.getStopIndex())).trim(); right = new LiteralExpressionSegment(ctx.ISNULL().getSymbol().getStartIndex() + 2, ctx.stop.getStopIndex(), rightText); } return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, "IS", ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()))); } ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ExpressionSegment right = (ExpressionSegment) visit(ctx.aExpr(1)); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } @Override public ASTNode visitCExpr(final CExprContext ctx) { if (null != ctx.columnref()) { return visit(ctx.columnref()); } if (null != ctx.parameterMarker()) { ParameterMarkerValue parameterMarker = (ParameterMarkerValue) visit(ctx.parameterMarker()); ParameterMarkerExpressionSegment result = new ParameterMarkerExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), parameterMarker.getValue(), parameterMarker.getType()); parameterMarkerSegments.add(result); return result; } if (null != ctx.aexprConst()) { return visit(ctx.aexprConst()); } if (null != ctx.aExpr()) { return visit(ctx.aExpr()); } if (null != ctx.funcExpr()) { return visit(ctx.funcExpr()); } if (null != ctx.selectWithParens()) { return createSubqueryExpressionSegment(ctx); } if (null != ctx.caseExpr()) { return visit(ctx.caseExpr()); } super.visitCExpr(ctx); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new CommonExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), text); } private ExpressionSegment createSubqueryExpressionSegment(final CExprContext ctx) { SubquerySegment subquerySegment = new SubquerySegment(ctx.selectWithParens().getStart().getStartIndex(), ctx.selectWithParens().getStop().getStopIndex(), (PostgreSQLSelectStatement) visit(ctx.selectWithParens())); return null == ctx.EXISTS() ? new SubqueryExpressionSegment(subquerySegment) : new ExistsSubqueryExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), subquerySegment); } @Override public ASTNode visitCaseExpr(final CaseExprContext ctx) { Collection<ExpressionSegment> whenExprs = new LinkedList<>(); Collection<ExpressionSegment> thenExprs = new LinkedList<>(); for (WhenClauseContext each : ctx.whenClauseList().whenClause()) { whenExprs.add((ExpressionSegment) visit(each.aExpr(0))); thenExprs.add((ExpressionSegment) visit(each.aExpr(1))); } ExpressionSegment caseExpr = null == ctx.caseArg() ? null : (ExpressionSegment) visit(ctx.caseArg().aExpr()); ExpressionSegment elseExpr = null == ctx.caseDefault() ? null : (ExpressionSegment) visit(ctx.caseDefault().aExpr()); return new CaseWhenExpression(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), caseExpr, whenExprs, thenExprs, elseExpr); } @Override public ASTNode visitFuncExpr(final FuncExprContext ctx) { if (null != ctx.functionExprCommonSubexpr()) { return visit(ctx.functionExprCommonSubexpr()); } Collection<ExpressionSegment> expressionSegments = getExpressionSegments(getTargetRuleContextFromParseTree(ctx, AExprContext.class)); String aggregationType = ctx.funcApplication().funcName().getText(); if (AggregationType.isAggregationType(aggregationType)) { return createAggregationSegment(ctx.funcApplication(), aggregationType, expressionSegments); } FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.funcApplication().funcName().getText(), getOriginalText(ctx)); result.getParameters().addAll(expressionSegments); return result; } @Override public ASTNode visitFunctionExprCommonSubexpr(final FunctionExprCommonSubexprContext ctx) { if (null != ctx.CAST()) { return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), (ExpressionSegment) visit(ctx.aExpr(0)), ctx.typeName().getText()); } FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.getChild(0).getText(), getOriginalText(ctx)); Collection<ExpressionSegment> expressionSegments = getExpressionSegments(getTargetRuleContextFromParseTree(ctx, AExprContext.class)); result.getParameters().addAll(expressionSegments); return result; } private <T extends ParseTree> Collection<T> getTargetRuleContextFromParseTree(final ParseTree parseTree, final Class<? extends T> clazz) { Collection<T> result = new LinkedList<>(); for (int index = 0; index < parseTree.getChildCount(); index++) { ParseTree child = parseTree.getChild(index); if (clazz.isInstance(child)) { result.add(clazz.cast(child)); } else { result.addAll(getTargetRuleContextFromParseTree(child, clazz)); } } return result; } private Collection<ExpressionSegment> getExpressionSegments(final Collection<AExprContext> aExprContexts) { Collection<ExpressionSegment> result = new LinkedList<>(); for (AExprContext each : aExprContexts) { result.add((ExpressionSegment) visit(each)); } return result; } @Override public ASTNode visitAexprConst(final AexprConstContext ctx) { LiteralValue<?> value; if (null != ctx.numberConst()) { value = new NumberLiteralValue(ctx.numberConst().getText()); } else if (null != ctx.STRING_()) { value = new StringLiteralValue(ctx.STRING_().getText()); } else if (null != ctx.FALSE()) { value = new BooleanLiteralValue(ctx.FALSE().getText()); } else if (null != ctx.TRUE()) { value = new BooleanLiteralValue(ctx.TRUE().getText()); } else if (null != ctx.NULL()) { value = new NullLiteralValue(ctx.getText()); } else { value = new OtherLiteralValue(ctx.getText()); } if (null != ctx.constTypeName() || null != ctx.funcName() && null == ctx.LP_()) { LiteralExpressionSegment expression = new LiteralExpressionSegment(ctx.STRING_().getSymbol().getStartIndex(), ctx.STRING_().getSymbol().getStopIndex(), value.getValue().toString()); String dataType = null != ctx.constTypeName() ? ctx.constTypeName().getText() : ctx.funcName().getText(); return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), expression, dataType); } return SQLUtils.createLiteralExpression(value, ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitColumnref(final ColumnrefContext ctx) { if (null != ctx.indirection()) { AttrNameContext attrName = ctx.indirection().indirectionEl().attrName(); ColumnSegment result = new ColumnSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue(attrName.getText())); OwnerSegment owner = new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); result.setOwner(owner); return result; } return new ColumnSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); } private InExpression createInSegment(final AExprContext ctx) { ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ExpressionSegment right = createInExpressionSegment(ctx.inExpr()); boolean not = null != ctx.NOT(); return new InExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, not); } @SuppressWarnings("unchecked") private ExpressionSegment createInExpressionSegment(final InExprContext ctx) { if (null != ctx.selectWithParens()) { PostgreSQLSelectStatement select = (PostgreSQLSelectStatement) visit(ctx.selectWithParens()); SubquerySegment subquerySegment = new SubquerySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), select); return new SubqueryExpressionSegment(subquerySegment); } ListExpression result = new ListExpression(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex()); result.getItems().addAll(((CollectionValue<ExpressionSegment>) visit(ctx.exprList())).getValue()); return result; } @SuppressWarnings("unchecked") @Override public ASTNode visitExprList(final ExprListContext ctx) { CollectionValue<ExpressionSegment> result = new CollectionValue<>(); if (null != ctx.exprList()) { result.combine((CollectionValue<ExpressionSegment>) visitExprList(ctx.exprList())); } result.getValue().add((ExpressionSegment) visit(ctx.aExpr())); return result; } private BetweenExpression createBetweenSegment(final AExprContext ctx) { ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ExpressionSegment between = (ExpressionSegment) visit(ctx.bExpr()); ExpressionSegment and = (ExpressionSegment) visit(ctx.aExpr(1)); boolean not = null != ctx.NOT(); return new BetweenExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, between, and, not); } @Override public ASTNode visitBExpr(final BExprContext ctx) { if (null != ctx.cExpr()) { return visit(ctx.cExpr()); } if (null != ctx.TYPE_CAST_()) { return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), (ExpressionSegment) visit(ctx.bExpr(0)), ctx.typeName().getText()); } if (null != ctx.qualOp()) { ExpressionSegment left = (ExpressionSegment) visit(ctx.bExpr(0)); ExpressionSegment right = (ExpressionSegment) visit(ctx.bExpr(1)); String operator = ctx.qualOp().getText(); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } for (BExprContext each : ctx.bExpr()) { visit(each); } return new LiteralExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } private ProjectionSegment createAggregationSegment(final FuncApplicationContext ctx, final String aggregationType, final Collection<ExpressionSegment> expressionSegments) { AggregationType type = AggregationType.valueOf(aggregationType.toUpperCase()); String innerExpression = ctx.start.getInputStream().getText(new Interval(ctx.LP_().getSymbol().getStartIndex(), ctx.stop.getStopIndex())); if (null == ctx.DISTINCT()) { AggregationProjectionSegment result = new AggregationProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), type, innerExpression); result.getParameters().addAll(expressionSegments); return result; } AggregationDistinctProjectionSegment result = new AggregationDistinctProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), type, innerExpression, getDistinctExpression(ctx)); result.getParameters().addAll(expressionSegments); return result; } private String getDistinctExpression(final FuncApplicationContext ctx) { StringBuilder result = new StringBuilder(); result.append(ctx.funcArgList().getText()); if (null != ctx.sortClause()) { result.append(ctx.sortClause().getText()); } return result.toString(); } @Override public final ASTNode visitDataTypeName(final DataTypeNameContext ctx) { IdentifierContext identifierContext = ctx.identifier(); if (null != identifierContext) { return new KeywordValue(identifierContext.getText()); } Collection<String> dataTypeNames = new LinkedList<>(); for (int i = 0; i < ctx.getChildCount(); i++) { dataTypeNames.add(ctx.getChild(i).getText()); } return new KeywordValue(String.join(" ", dataTypeNames)); } @Override public final ASTNode visitSortClause(final SortClauseContext ctx) { Collection<OrderByItemSegment> items = new LinkedList<>(); for (SortbyContext each : ctx.sortbyList().sortby()) { items.add((OrderByItemSegment) visit(each)); } return new OrderBySegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), items); } @Override public final ASTNode visitSortby(final SortbyContext ctx) { OrderDirection orderDirection = null != ctx.ascDesc() ? generateOrderDirection(ctx.ascDesc()) : OrderDirection.ASC; NullsOrderType nullsOrderType = generateNullsOrderType(ctx.nullsOrder()); ASTNode expr = visit(ctx.aExpr()); if (expr instanceof ColumnSegment) { ColumnSegment column = (ColumnSegment) expr; return new ColumnOrderByItemSegment(column, orderDirection, nullsOrderType); } if (expr instanceof LiteralExpressionSegment) { LiteralExpressionSegment index = (LiteralExpressionSegment) expr; return new IndexOrderByItemSegment(index.getStartIndex(), index.getStopIndex(), Integer.parseInt(index.getLiterals().toString()), orderDirection, nullsOrderType); } if (expr instanceof ExpressionSegment) { return new ExpressionOrderByItemSegment(ctx.aExpr().getStart().getStartIndex(), ctx.aExpr().getStop().getStopIndex(), getOriginalText(ctx.aExpr()), orderDirection, nullsOrderType, (ExpressionSegment) expr); } return new ExpressionOrderByItemSegment(ctx.aExpr().getStart().getStartIndex(), ctx.aExpr().getStop().getStopIndex(), getOriginalText(ctx.aExpr()), orderDirection, nullsOrderType); } private NullsOrderType generateNullsOrderType(final NullsOrderContext ctx) { if (null == ctx) { return null; } return null == ctx.FIRST() ? NullsOrderType.LAST : NullsOrderType.FIRST; } private OrderDirection generateOrderDirection(final AscDescContext ctx) { return null == ctx.DESC() ? OrderDirection.ASC : OrderDirection.DESC; } @Override public final ASTNode visitDataType(final DataTypeContext ctx) { DataTypeSegment result = new DataTypeSegment(); result.setDataTypeName(((KeywordValue) visit(ctx.dataTypeName())).getValue()); result.setStartIndex(ctx.start.getStartIndex()); result.setStopIndex(ctx.stop.getStopIndex()); if (null != ctx.dataTypeLength()) { DataTypeLengthSegment dataTypeLengthSegment = (DataTypeLengthSegment) visit(ctx.dataTypeLength()); result.setDataLength(dataTypeLengthSegment); } return result; } @Override public final ASTNode visitDataTypeLength(final DataTypeLengthContext ctx) { DataTypeLengthSegment result = new DataTypeLengthSegment(); result.setStartIndex(ctx.start.getStartIndex()); result.setStopIndex(ctx.stop.getStartIndex()); List<TerminalNode> numbers = ctx.NUMBER_(); if (1 == numbers.size()) { result.setPrecision(Integer.parseInt(numbers.get(0).getText())); } if (2 == numbers.size()) { result.setPrecision(Integer.parseInt(numbers.get(0).getText())); result.setScale(Integer.parseInt(numbers.get(1).getText())); } return result; } @Override public ASTNode visitInsert(final InsertContext ctx) { PostgreSQLInsertStatement result = (PostgreSQLInsertStatement) visit(ctx.insertRest()); result.setTable((SimpleTableSegment) visit(ctx.insertTarget())); if (null != ctx.optOnConflict()) { result.setOnDuplicateKeyColumnsSegment((OnDuplicateKeyColumnsSegment) visit(ctx.optOnConflict())); } if (null != ctx.returningClause()) { result.setReturningSegment((ReturningSegment) visit(ctx.returningClause())); } result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitOptOnConflict(final OptOnConflictContext ctx) { SetClauseListContext setClauseListContext = ctx.setClauseList(); Collection<AssignmentSegment> assignments = ((SetAssignmentSegment) visit(setClauseListContext)).getAssignments(); return new OnDuplicateKeyColumnsSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), assignments); } @Override public ASTNode visitInsertTarget(final InsertTargetContext ctx) { SimpleTableSegment result = (SimpleTableSegment) visit(ctx.qualifiedName()); if (null != ctx.AS()) { ColIdContext colId = ctx.colId(); result.setAlias(new AliasSegment(colId.start.getStartIndex(), colId.stop.getStopIndex(), new IdentifierValue(colId.getText()))); } return result; } @Override public ASTNode visitQualifiedNameList(final QualifiedNameListContext ctx) { CollectionValue<SimpleTableSegment> result = new CollectionValue<>(); if (null != ctx.qualifiedName()) { result.getValue().add((SimpleTableSegment) visit(ctx.qualifiedName())); } if (null != ctx.qualifiedNameList()) { result.combine((CollectionValue) visit(ctx.qualifiedNameList())); } return result; } @Override public ASTNode visitQualifiedName(final QualifiedNameContext ctx) { if (null != ctx.indirection()) { AttrNameContext attrName = ctx.indirection().indirectionEl().attrName(); TableNameSegment tableName = new TableNameSegment(attrName.start.getStartIndex(), attrName.stop.getStopIndex(), new IdentifierValue(attrName.getText())); OwnerSegment owner = new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); SimpleTableSegment result = new SimpleTableSegment(tableName); if (null != ctx.indirection().indirection()) { OwnerSegment tableOwner = createTableOwner(ctx.indirection().indirection()); tableOwner.setOwner(owner); result.setOwner(tableOwner); } else { result.setOwner(owner); } return result; } return new SimpleTableSegment(new TableNameSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); } @Override public ASTNode visitInsertRest(final InsertRestContext ctx) { PostgreSQLInsertStatement result = new PostgreSQLInsertStatement(); if (null == ctx.insertColumnList()) { result.setInsertColumns(new InsertColumnsSegment(ctx.start.getStartIndex() - 1, ctx.start.getStartIndex() - 1, Collections.emptyList())); } else { InsertColumnListContext insertColumns = ctx.insertColumnList(); CollectionValue<ColumnSegment> columns = (CollectionValue<ColumnSegment>) visit(insertColumns); InsertColumnsSegment insertColumnsSegment = new InsertColumnsSegment(insertColumns.start.getStartIndex() - 1, insertColumns.stop.getStopIndex() + 1, columns.getValue()); result.setInsertColumns(insertColumnsSegment); } ValuesClauseContext valuesClause = ctx.select().selectNoParens().selectClauseN().simpleSelect().valuesClause(); if (null == valuesClause) { PostgreSQLSelectStatement selectStatement = (PostgreSQLSelectStatement) visit(ctx.select()); result.setInsertSelect(new SubquerySegment(ctx.select().start.getStartIndex(), ctx.select().stop.getStopIndex(), selectStatement)); } else { result.getValues().addAll(createInsertValuesSegments(valuesClause)); } return result; } @Override public ASTNode visitInsertColumnList(final InsertColumnListContext ctx) { CollectionValue<ColumnSegment> result = new CollectionValue<>(); if (null != ctx.insertColumnList()) { result.getValue().addAll(((CollectionValue<ColumnSegment>) visit(ctx.insertColumnList())).getValue()); } result.getValue().add((ColumnSegment) visit(ctx.insertColumnItem())); return result; } @Override public ASTNode visitInsertColumnItem(final InsertColumnItemContext ctx) { if (null == ctx.optIndirection().indirectionEl()) { return new ColumnSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); } ColumnSegment result = new ColumnSegment(ctx.colId().start.getStartIndex(), ctx.optIndirection().stop.getStopIndex(), new IdentifierValue(ctx.optIndirection().indirectionEl().attrName().getText())); result.setOwner(new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); return result; } private Collection<InsertValuesSegment> createInsertValuesSegments(final ValuesClauseContext ctx) { Collection<InsertValuesSegment> result = new LinkedList<>(); if (null != ctx.valuesClause()) { Collection<InsertValuesSegment> expressions = createInsertValuesSegments(ctx.valuesClause()); result.addAll(expressions); } Collection<ExpressionSegment> expressions = createInsertValuesSegments(ctx.exprList()); InsertValuesSegment insertValuesSegment = new InsertValuesSegment(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex(), (List<ExpressionSegment>) expressions); result.add(insertValuesSegment); return result; } private Collection<ExpressionSegment> createInsertValuesSegments(final ExprListContext ctx) { Collection<ExpressionSegment> result = new LinkedList<>(); if (null != ctx.exprList()) { Collection<ExpressionSegment> tmpResult = createInsertValuesSegments(ctx.exprList()); result.addAll(tmpResult); } ExpressionSegment expr = (ExpressionSegment) visit(ctx.aExpr()); result.add(expr); return result; } private Collection<AssignmentSegment> generateAssignmentSegments(final SetClauseListContext ctx) { Collection<AssignmentSegment> result = new LinkedList<>(); if (null != ctx.setClauseList()) { Collection<AssignmentSegment> tmpResult = generateAssignmentSegments(ctx.setClauseList()); result.addAll(tmpResult); } AssignmentSegment assignmentSegment = (AssignmentSegment) visit(ctx.setClause()); result.add(assignmentSegment); return result; } @Override public ASTNode visitSetClause(final SetClauseContext ctx) { ColumnSegment columnSegment = (ColumnSegment) visit(ctx.setTarget()); List<ColumnSegment> columnSegments = new LinkedList<>(); columnSegments.add(columnSegment); ExpressionSegment expressionSegment = (ExpressionSegment) visit(ctx.aExpr()); return new ColumnAssignmentSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), columnSegments, expressionSegment); } @Override public ASTNode visitSetTarget(final SetTargetContext ctx) { IdentifierValue identifierValue = new IdentifierValue(ctx.colId().getText()); return new ColumnSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), identifierValue); } @Override public ASTNode visitRelationExprOptAlias(final RelationExprOptAliasContext ctx) { SimpleTableSegment result = (SimpleTableSegment) visit(ctx.relationExpr().qualifiedName()); if (null != ctx.colId()) { result.setAlias(new AliasSegment(ctx.colId().start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); } return result; } @Override public ASTNode visitUpdate(final UpdateContext ctx) { PostgreSQLUpdateStatement result = new PostgreSQLUpdateStatement(); SimpleTableSegment tableSegment = (SimpleTableSegment) visit(ctx.relationExprOptAlias()); result.setTable(tableSegment); result.setSetAssignment((SetAssignmentSegment) visit(ctx.setClauseList())); if (null != ctx.whereOrCurrentClause()) { result.setWhere((WhereSegment) visit(ctx.whereOrCurrentClause())); } result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitSetClauseList(final SetClauseListContext ctx) { Collection<AssignmentSegment> assignments = generateAssignmentSegments(ctx); return new SetAssignmentSegment(ctx.start.getStartIndex() - 4, ctx.stop.getStopIndex(), assignments); } @Override public ASTNode visitDelete(final DeleteContext ctx) { PostgreSQLDeleteStatement result = new PostgreSQLDeleteStatement(); SimpleTableSegment tableSegment = (SimpleTableSegment) visit(ctx.relationExprOptAlias()); result.setTable(tableSegment); if (null != ctx.whereOrCurrentClause()) { result.setWhere((WhereSegment) visit(ctx.whereOrCurrentClause())); } result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitWhereOrCurrentClause(final WhereOrCurrentClauseContext ctx) { return visit(ctx.whereClause()); } @Override public ASTNode visitSelect(final SelectContext ctx) { PostgreSQLSelectStatement result = (PostgreSQLSelectStatement) visit(ctx.selectNoParens()); result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitSelectNoParens(final SelectNoParensContext ctx) { PostgreSQLSelectStatement result = (PostgreSQLSelectStatement) visit(ctx.selectClauseN()); if (null != ctx.sortClause()) { OrderBySegment orderBySegment = (OrderBySegment) visit(ctx.sortClause()); result.setOrderBy(orderBySegment); } if (null != ctx.selectLimit()) { LimitSegment limitSegment = (LimitSegment) visit(ctx.selectLimit()); result.setLimit(limitSegment); } if (null != ctx.forLockingClause()) { LockSegment lockSegment = (LockSegment) visit(ctx.forLockingClause()); result.setLock(lockSegment); } return result; } @Override public ASTNode visitForLockingClause(final ForLockingClauseContext ctx) { return new LockSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); } @Override public ASTNode visitSelectWithParens(final SelectWithParensContext ctx) { if (null != ctx.selectWithParens()) { return visit(ctx.selectWithParens()); } return visit(ctx.selectNoParens()); } @Override public ASTNode visitSelectClauseN(final SelectClauseNContext ctx) { if (null != ctx.simpleSelect()) { return visit(ctx.simpleSelect()); } if (null != ctx.selectClauseN() && !ctx.selectClauseN().isEmpty()) { PostgreSQLSelectStatement result = new PostgreSQLSelectStatement(); PostgreSQLSelectStatement left = (PostgreSQLSelectStatement) visit(ctx.selectClauseN(0)); result.setProjections(left.getProjections()); result.setFrom(left.getFrom()); CombineSegment combineSegment = new CombineSegment(((TerminalNode) ctx.getChild(1)).getSymbol().getStartIndex(), ctx.getStop().getStopIndex(), left, getCombineType(ctx), (PostgreSQLSelectStatement) visit(ctx.selectClauseN(1))); result.setCombine(combineSegment); return result; } return visit(ctx.selectWithParens()); } private CombineType getCombineType(final SelectClauseNContext ctx) { boolean isDistinct = null == ctx.allOrDistinct() || null != ctx.allOrDistinct().DISTINCT(); if (null != ctx.UNION()) { return isDistinct ? CombineType.UNION : CombineType.UNION_ALL; } if (null != ctx.INTERSECT()) { return isDistinct ? CombineType.INTERSECT : CombineType.INTERSECT_ALL; } return isDistinct ? CombineType.EXCEPT : CombineType.EXCEPT_ALL; } @Override public ASTNode visitSimpleSelect(final SimpleSelectContext ctx) { PostgreSQLSelectStatement result = new PostgreSQLSelectStatement(); if (null == ctx.targetList()) { result.setProjections(new ProjectionsSegment(-1, -1)); } else { ProjectionsSegment projects = (ProjectionsSegment) visit(ctx.targetList()); if (null != ctx.distinctClause()) { projects.setDistinctRow(true); } result.setProjections(projects); } if (null != ctx.fromClause()) { TableSegment tableSegment = (TableSegment) visit(ctx.fromClause()); result.setFrom(tableSegment); } if (null != ctx.whereClause()) { result.setWhere((WhereSegment) visit(ctx.whereClause())); } if (null != ctx.groupClause()) { result.setGroupBy((GroupBySegment) visit(ctx.groupClause())); } if (null != ctx.havingClause()) { result.setHaving((HavingSegment) visit(ctx.havingClause())); } if (null != ctx.windowClause()) { result.setWindow((WindowSegment) visit(ctx.windowClause())); } return result; } @Override public ASTNode visitHavingClause(final HavingClauseContext ctx) { ExpressionSegment expr = (ExpressionSegment) visit(ctx.aExpr()); return new HavingSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), expr); } @Override public ASTNode visitWindowClause(final WindowClauseContext ctx) { return new WindowSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex()); } @Override public ASTNode visitGroupClause(final GroupClauseContext ctx) { Collection<OrderByItemSegment> items = new LinkedList<>(); for (GroupByItemContext each : ctx.groupByList().groupByItem()) { items.add((OrderByItemSegment) visit(each)); } return new GroupBySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), items); } @Override public ASTNode visitGroupByItem(final GroupByItemContext ctx) { if (null != ctx.aExpr()) { ASTNode astNode = visit(ctx.aExpr()); if (astNode instanceof ColumnSegment) { return new ColumnOrderByItemSegment((ColumnSegment) astNode, OrderDirection.ASC, null); } if (astNode instanceof LiteralExpressionSegment) { LiteralExpressionSegment index = (LiteralExpressionSegment) astNode; return new IndexOrderByItemSegment(index.getStartIndex(), index.getStopIndex(), Integer.parseInt(index.getLiterals().toString()), OrderDirection.ASC, null); } return new ExpressionOrderByItemSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(ctx), OrderDirection.ASC, null); } return new ExpressionOrderByItemSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(ctx), OrderDirection.ASC, null); } @Override public ASTNode visitTargetList(final TargetListContext ctx) { ProjectionsSegment result = new ProjectionsSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); if (null != ctx.targetList()) { ProjectionsSegment projections = (ProjectionsSegment) visit(ctx.targetList()); result.getProjections().addAll(projections.getProjections()); } ProjectionSegment projection = (ProjectionSegment) visit(ctx.targetEl()); result.getProjections().add(projection); return result; } @Override public ASTNode visitTargetEl(final TargetElContext ctx) { ProjectionSegment result = createProjectionSegment(ctx, ctx.aExpr()); if (null != ctx.identifier()) { ((AliasAvailable) result).setAlias(new AliasSegment(ctx.identifier().start.getStartIndex(), ctx.identifier().stop.getStopIndex(), new IdentifierValue(ctx.identifier().getText()))); } return result; } private ProjectionSegment createProjectionSegment(final TargetElContext ctx, final AExprContext expr) { if (null != ctx.ASTERISK_()) { return new ShorthandProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); } if (null != ctx.DOT_ASTERISK_()) { ShorthandProjectionSegment result = new ShorthandProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); result.setOwner(new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); return result; } if (null != ctx.aExpr()) { ASTNode projection = visit(ctx.aExpr()); return createProjectionSegment(ctx, expr, projection); } return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(expr), null); } private ProjectionSegment createProjectionSegment(final TargetElContext ctx, final AExprContext expr, final ASTNode projection) { if (projection instanceof ColumnSegment) { return new ColumnProjectionSegment((ColumnSegment) projection); } if (projection instanceof AggregationProjectionSegment) { return (AggregationProjectionSegment) projection; } if (projection instanceof SubqueryExpressionSegment) { SubqueryExpressionSegment subqueryExpression = (SubqueryExpressionSegment) projection; String text = ctx.start.getInputStream().getText(new Interval(subqueryExpression.getStartIndex(), subqueryExpression.getStopIndex())); return new SubqueryProjectionSegment(subqueryExpression.getSubquery(), text); } if (projection instanceof ExistsSubqueryExpression) { ExistsSubqueryExpression existsSubqueryExpression = (ExistsSubqueryExpression) projection; String text = ctx.start.getInputStream().getText(new Interval(existsSubqueryExpression.getStartIndex(), existsSubqueryExpression.getStopIndex())); return new SubqueryProjectionSegment(existsSubqueryExpression.getSubquery(), text); } if (projection instanceof ExpressionSegment) { return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(expr), (ExpressionSegment) projection); } return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(expr), null); } @Override public ASTNode visitFromClause(final FromClauseContext ctx) { return visit(ctx.fromList()); } @Override public ASTNode visitFromList(final FromListContext ctx) { if (null != ctx.fromList()) { JoinTableSegment result = new JoinTableSegment(); result.setStartIndex(ctx.start.getStartIndex()); result.setStopIndex(ctx.stop.getStopIndex()); result.setLeft((TableSegment) visit(ctx.fromList())); result.setRight((TableSegment) visit(ctx.tableReference())); result.setJoinType(JoinType.COMMA.name()); return result; } return visit(ctx.tableReference()); } @Override public ASTNode visitTableReference(final TableReferenceContext ctx) { if (null != ctx.relationExpr()) { SimpleTableSegment result = (SimpleTableSegment) visit(ctx.relationExpr().qualifiedName()); if (null != ctx.aliasClause()) { result.setAlias((AliasSegment) visit(ctx.aliasClause())); } return result; } if (null != ctx.selectWithParens()) { PostgreSQLSelectStatement select = (PostgreSQLSelectStatement) visit(ctx.selectWithParens()); SubquerySegment subquery = new SubquerySegment(ctx.selectWithParens().start.getStartIndex(), ctx.selectWithParens().stop.getStopIndex(), select); AliasSegment alias = null != ctx.aliasClause() ? (AliasSegment) visit(ctx.aliasClause()) : null; SubqueryTableSegment result = new SubqueryTableSegment(subquery); result.setAlias(alias); return result; } if (null == ctx.tableReference()) { TableNameSegment tableName = new TableNameSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue("not support")); return new SimpleTableSegment(tableName); } JoinTableSegment result = new JoinTableSegment(); result.setLeft((TableSegment) visit(ctx.tableReference())); int startIndex = null != ctx.LP_() ? ctx.LP_().getSymbol().getStartIndex() : ctx.tableReference().start.getStartIndex(); int stopIndex = 0; AliasSegment alias = null; if (null == ctx.aliasClause()) { stopIndex = null != ctx.RP_() ? ctx.RP_().getSymbol().getStopIndex() : ctx.tableReference().start.getStopIndex(); } else { alias = (AliasSegment) visit(ctx.aliasClause()); startIndex = null != ctx.RP_() ? ctx.RP_().getSymbol().getStopIndex() : ctx.joinedTable().stop.getStopIndex(); } result.setStartIndex(startIndex); result.setStopIndex(stopIndex); result = visitJoinedTable(ctx.joinedTable(), result); result.setAlias(alias); return result; } private JoinTableSegment visitJoinedTable(final JoinedTableContext ctx, final JoinTableSegment tableSegment) { TableSegment right = (TableSegment) visit(ctx.tableReference()); tableSegment.setRight(right); tableSegment.setJoinType(getJoinType(ctx)); tableSegment.setNatural(null != ctx.naturalJoinType()); return null != ctx.joinQual() ? visitJoinQual(ctx.joinQual(), tableSegment) : tableSegment; } private String getJoinType(final JoinedTableContext ctx) { if (null != ctx.crossJoinType()) { return JoinType.CROSS.name(); } if (null != ctx.innerJoinType()) { return JoinType.INNER.name(); } if (null != ctx.outerJoinType()) { return getOutJoinType(ctx.outerJoinType()); } if (null != ctx.naturalJoinType()) { return getNaturalJoinType(ctx.naturalJoinType()); } return JoinType.COMMA.name(); } private static String getNaturalJoinType(final NaturalJoinTypeContext ctx) { if (null != ctx.INNER()) { return JoinType.INNER.name(); } if (null != ctx.FULL()) { return JoinType.FULL.name(); } if (null != ctx.LEFT()) { return JoinType.LEFT.name(); } if (null != ctx.RIGHT()) { return JoinType.RIGHT.name(); } return JoinType.INNER.name(); } private static String getOutJoinType(final OuterJoinTypeContext ctx) { if (null == ctx.FULL()) { return null != ctx.LEFT() ? JoinType.LEFT.name() : JoinType.RIGHT.name(); } return JoinType.FULL.name(); } private JoinTableSegment visitJoinQual(final JoinQualContext ctx, final JoinTableSegment joinTableSource) { if (null != ctx.aExpr()) { ExpressionSegment condition = (ExpressionSegment) visit(ctx.aExpr()); joinTableSource.setCondition(condition); } if (null != ctx.USING()) { joinTableSource.setUsing(generateUsingColumn(ctx.nameList())); } return joinTableSource; } private List<ColumnSegment> generateUsingColumn(final NameListContext ctx) { List<ColumnSegment> result = new ArrayList<>(); if (null != ctx.nameList()) { result.addAll(generateUsingColumn(ctx.nameList())); } if (null != ctx.name()) { result.add(new ColumnSegment(ctx.name().start.getStartIndex(), ctx.name().stop.getStopIndex(), new IdentifierValue(ctx.name().getText()))); } return result; } @Override public ASTNode visitAliasClause(final AliasClauseContext ctx) { StringBuilder aliasName = new StringBuilder(ctx.colId().getText()); if (null != ctx.nameList()) { aliasName.append(ctx.LP_().getText()); aliasName.append(ctx.nameList().getText()); aliasName.append(ctx.RP_().getText()); } return new AliasSegment(ctx.colId().start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue(aliasName.toString())); } private OwnerSegment createTableOwner(final IndirectionContext ctx) { AttrNameContext attrName = ctx.indirectionEl().attrName(); return new OwnerSegment(attrName.start.getStartIndex(), attrName.stop.getStopIndex(), new IdentifierValue(attrName.getText())); } @Override public ASTNode visitWhereClause(final WhereClauseContext ctx) { ExpressionSegment expr = (ExpressionSegment) visit(ctx.aExpr()); return new WhereSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), expr); } @Override public ASTNode visitSelectLimit(final SelectLimitContext ctx) { if (null != ctx.limitClause() && null != ctx.offsetClause()) { return createLimitSegmentWhenLimitAndOffset(ctx); } return createLimitSegmentWhenRowCountOrOffsetAbsent(ctx); } @Override public ASTNode visitSelectLimitValue(final SelectLimitValueContext ctx) { if (null != ctx.ALL()) { return null; } ASTNode astNode = visit(ctx.aExpr()); if (astNode instanceof ParameterMarkerExpressionSegment) { return new ParameterMarkerLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ((ParameterMarkerExpressionSegment) astNode).getParameterMarkerIndex()); } return new NumberLiteralLimitValueSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), Long.parseLong(((LiteralExpressionSegment) astNode).getLiterals().toString())); } @Override public ASTNode visitSelectOffsetValue(final SelectOffsetValueContext ctx) { ASTNode astNode = visit(ctx.aExpr()); if (astNode instanceof ParameterMarkerExpressionSegment) { return new ParameterMarkerLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ((ParameterMarkerExpressionSegment) astNode).getParameterMarkerIndex()); } return new NumberLiteralLimitValueSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), Long.parseLong(((LiteralExpressionSegment) astNode).getLiterals().toString())); } @Override public ASTNode visitSelectFetchFirstValue(final SelectFetchFirstValueContext ctx) { ASTNode astNode = visit(ctx.cExpr()); if (null != astNode) { if (astNode instanceof ParameterMarkerLimitValueSegment) { return new ParameterMarkerLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ((ParameterMarkerExpressionSegment) astNode).getParameterMarkerIndex()); } return new NumberLiteralLimitValueSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), Long.parseLong(((LiteralExpressionSegment) astNode).getLiterals().toString())); } return visit(ctx.NUMBER_()); } private LimitSegment createLimitSegmentWhenLimitAndOffset(final SelectLimitContext ctx) { ParseTree astNode0 = ctx.getChild(0); LimitValueSegment rowCount = null; LimitValueSegment offset = null; if (astNode0 instanceof LimitClauseContext) { rowCount = null == ctx.limitClause().selectLimitValue() ? null : (LimitValueSegment) visit(ctx.limitClause().selectLimitValue()); } else { offset = (LimitValueSegment) visit(ctx.offsetClause().selectOffsetValue()); } ParseTree astNode1 = ctx.getChild(1); if (astNode1 instanceof LimitClauseContext) { rowCount = null == ctx.limitClause().selectLimitValue() ? null : (LimitValueSegment) visit(ctx.limitClause().selectLimitValue()); } else { offset = (LimitValueSegment) visit(ctx.offsetClause().selectOffsetValue()); } return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), offset, rowCount); } private LimitSegment createLimitSegmentWhenRowCountOrOffsetAbsent(final SelectLimitContext ctx) { if (null != ctx.limitClause()) { if (null != ctx.limitClause().selectFetchFirstValue()) { LimitValueSegment limit = (LimitValueSegment) visit(ctx.limitClause().selectFetchFirstValue()); return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), null, limit); } LimitValueSegment limit = (LimitValueSegment) visit(ctx.limitClause().selectLimitValue()); return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), null, limit); } LimitValueSegment offset = (LimitValueSegment) visit(ctx.offsetClause().selectOffsetValue()); return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), offset, null); } @Override public ASTNode visitExecuteStmt(final ExecuteStmtContext ctx) { return new PostgreSQLExecuteStatement(); } /** * Get original text. * * @param ctx context * @return original text */ protected String getOriginalText(final ParserRuleContext ctx) { return ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); } @Override @SuppressWarnings("unchecked") public ASTNode visitAnyName(final AnyNameContext ctx) { CollectionValue<NameSegment> result = new CollectionValue<>(); if (null != ctx.attrs()) { result.combine((CollectionValue<NameSegment>) visit(ctx.attrs())); } result.getValue().add(new NameSegment(ctx.colId().getStart().getStartIndex(), ctx.colId().getStop().getStopIndex(), new IdentifierValue(ctx.colId().getText()))); return result; } @Override @SuppressWarnings("unchecked") public ASTNode visitAttrs(final AttrsContext ctx) { CollectionValue<NameSegment> result = new CollectionValue<>(); result.getValue().add(new NameSegment(ctx.attrName().getStart().getStartIndex(), ctx.attrName().getStop().getStopIndex(), new IdentifierValue(ctx.attrName().getText()))); if (null != ctx.attrs()) { result.combine((CollectionValue<NameSegment>) visit(ctx.attrs())); } return result; } @Override public ASTNode visitName(final NameContext ctx) { return visit(ctx.identifier()); } @Override public ASTNode visitSignedIconst(final SignedIconstContext ctx) { return new NumberLiteralValue(ctx.getText()); } }
class PostgreSQLStatementSQLVisitor extends PostgreSQLStatementParserBaseVisitor<ASTNode> { private final Collection<ParameterMarkerSegment> parameterMarkerSegments = new LinkedList<>(); public PostgreSQLStatementSQLVisitor(final Properties props) { } @Override public final ASTNode visitParameterMarker(final ParameterMarkerContext ctx) { if (null == ctx.DOLLAR_()) { return new ParameterMarkerValue(parameterMarkerSegments.size(), ParameterMarkerType.QUESTION); } return new ParameterMarkerValue(new NumberLiteralValue(ctx.NUMBER_().getText()).getValue().intValue() - 1, ParameterMarkerType.DOLLAR); } @Override public final ASTNode visitNumberLiterals(final NumberLiteralsContext ctx) { return new NumberLiteralValue(ctx.NUMBER_().getText()); } @Override public final ASTNode visitIdentifier(final IdentifierContext ctx) { UnreservedWordContext unreservedWord = ctx.unreservedWord(); return null != unreservedWord ? visit(unreservedWord) : new IdentifierValue(ctx.getText()); } @Override public final ASTNode visitUnreservedWord(final UnreservedWordContext ctx) { return new IdentifierValue(ctx.getText()); } @Override public final ASTNode visitSchemaName(final SchemaNameContext ctx) { return visit(ctx.identifier()); } @Override public final ASTNode visitTableName(final TableNameContext ctx) { SimpleTableSegment result = new SimpleTableSegment(new TableNameSegment(ctx.name().getStart().getStartIndex(), ctx.name().getStop().getStopIndex(), (IdentifierValue) visit(ctx.name()))); OwnerContext owner = ctx.owner(); if (null != owner) { result.setOwner(new OwnerSegment(owner.getStart().getStartIndex(), owner.getStop().getStopIndex(), (IdentifierValue) visit(owner.identifier()))); } return result; } @Override public final ASTNode visitColumnName(final ColumnNameContext ctx) { ColumnSegment result = new ColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.name())); OwnerContext owner = ctx.owner(); if (null != owner) { result.setOwner(new OwnerSegment(owner.getStart().getStartIndex(), owner.getStop().getStopIndex(), (IdentifierValue) visit(owner.identifier()))); } return result; } @Override public final ASTNode visitIndexName(final IndexNameContext ctx) { IndexNameSegment indexName = new IndexNameSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), (IdentifierValue) visit(ctx.identifier())); return new IndexSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), indexName); } @Override public final ASTNode visitConstraintName(final ConstraintNameContext ctx) { return new ConstraintSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.identifier())); } @Override public final ASTNode visitTableNames(final TableNamesContext ctx) { CollectionValue<SimpleTableSegment> result = new CollectionValue<>(); for (TableNameContext each : ctx.tableName()) { result.getValue().add((SimpleTableSegment) visit(each)); } return result; } @Override public final ASTNode visitColumnNames(final ColumnNamesContext ctx) { CollectionValue<ColumnSegment> result = new CollectionValue<>(); for (ColumnNameContext each : ctx.columnName()) { result.getValue().add((ColumnSegment) visit(each)); } return result; } @Override public ASTNode visitAExpr(final AExprContext ctx) { if (null != ctx.cExpr()) { return visit(ctx.cExpr()); } if (null != ctx.TYPE_CAST_()) { return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), (ExpressionSegment) visit(ctx.aExpr(0)), ctx.typeName().getText()); } if (null != ctx.BETWEEN()) { return createBetweenSegment(ctx); } if (null != ctx.IN()) { return createInSegment(ctx); } if (null != ctx.patternMatchingOperator()) { return createPatternMatchingOperationSegment(ctx); } Optional<String> binaryOperator = findBinaryOperator(ctx); if (binaryOperator.isPresent()) { return createBinaryOperationSegment(ctx, binaryOperator.get()); } super.visitAExpr(ctx); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new CommonExpressionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), text); } private Optional<String> findBinaryOperator(final AExprContext ctx) { if (null != ctx.IS()) { return Optional.of(ctx.IS().getText()); } if (null != ctx.ISNULL()) { return Optional.of("IS"); } if (1 == ctx.aExpr().size()) { return Optional.empty(); } if (null != ctx.comparisonOperator()) { return Optional.of(ctx.comparisonOperator().getText()); } if (null != ctx.andOperator()) { return Optional.of(ctx.andOperator().getText()); } if (null != ctx.orOperator()) { return Optional.of(ctx.orOperator().getText()); } if (null != ctx.PLUS_()) { return Optional.of(ctx.PLUS_().getText()); } if (null != ctx.MINUS_()) { return Optional.of(ctx.MINUS_().getText()); } if (null != ctx.ASTERISK_()) { return Optional.of(ctx.ASTERISK_().getText()); } if (null != ctx.SLASH_()) { return Optional.of(ctx.SLASH_().getText()); } return Optional.empty(); } private BinaryOperationExpression createBinaryOperationSegment(final AExprContext ctx, final String operator) { if ("IS".equalsIgnoreCase(operator)) { ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); String rightText; ExpressionSegment right; if (null != ctx.IS()) { rightText = ctx.start.getInputStream().getText(new Interval(ctx.IS().getSymbol().getStopIndex() + 2, ctx.stop.getStopIndex())).trim(); right = new LiteralExpressionSegment(ctx.IS().getSymbol().getStopIndex() + 2, ctx.stop.getStopIndex(), rightText); } else { rightText = ctx.start.getInputStream().getText(new Interval(ctx.ISNULL().getSymbol().getStartIndex() + 2, ctx.stop.getStopIndex())).trim(); right = new LiteralExpressionSegment(ctx.ISNULL().getSymbol().getStartIndex() + 2, ctx.stop.getStopIndex(), rightText); } return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, "IS", ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()))); } ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ExpressionSegment right = (ExpressionSegment) visit(ctx.aExpr(1)); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } @Override public ASTNode visitCExpr(final CExprContext ctx) { if (null != ctx.columnref()) { return visit(ctx.columnref()); } if (null != ctx.parameterMarker()) { ParameterMarkerValue parameterMarker = (ParameterMarkerValue) visit(ctx.parameterMarker()); ParameterMarkerExpressionSegment result = new ParameterMarkerExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), parameterMarker.getValue(), parameterMarker.getType()); parameterMarkerSegments.add(result); return result; } if (null != ctx.aexprConst()) { return visit(ctx.aexprConst()); } if (null != ctx.aExpr()) { return visit(ctx.aExpr()); } if (null != ctx.funcExpr()) { return visit(ctx.funcExpr()); } if (null != ctx.selectWithParens()) { return createSubqueryExpressionSegment(ctx); } if (null != ctx.caseExpr()) { return visit(ctx.caseExpr()); } super.visitCExpr(ctx); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new CommonExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), text); } private ExpressionSegment createSubqueryExpressionSegment(final CExprContext ctx) { SubquerySegment subquerySegment = new SubquerySegment(ctx.selectWithParens().getStart().getStartIndex(), ctx.selectWithParens().getStop().getStopIndex(), (PostgreSQLSelectStatement) visit(ctx.selectWithParens())); return null == ctx.EXISTS() ? new SubqueryExpressionSegment(subquerySegment) : new ExistsSubqueryExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), subquerySegment); } @Override public ASTNode visitCaseExpr(final CaseExprContext ctx) { Collection<ExpressionSegment> whenExprs = new LinkedList<>(); Collection<ExpressionSegment> thenExprs = new LinkedList<>(); for (WhenClauseContext each : ctx.whenClauseList().whenClause()) { whenExprs.add((ExpressionSegment) visit(each.aExpr(0))); thenExprs.add((ExpressionSegment) visit(each.aExpr(1))); } ExpressionSegment caseExpr = null == ctx.caseArg() ? null : (ExpressionSegment) visit(ctx.caseArg().aExpr()); ExpressionSegment elseExpr = null == ctx.caseDefault() ? null : (ExpressionSegment) visit(ctx.caseDefault().aExpr()); return new CaseWhenExpression(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), caseExpr, whenExprs, thenExprs, elseExpr); } @Override public ASTNode visitFuncExpr(final FuncExprContext ctx) { if (null != ctx.functionExprCommonSubexpr()) { return visit(ctx.functionExprCommonSubexpr()); } Collection<ExpressionSegment> expressionSegments = getExpressionSegments(getTargetRuleContextFromParseTree(ctx, AExprContext.class)); String aggregationType = ctx.funcApplication().funcName().getText(); if (AggregationType.isAggregationType(aggregationType)) { return createAggregationSegment(ctx.funcApplication(), aggregationType, expressionSegments); } FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.funcApplication().funcName().getText(), getOriginalText(ctx)); result.getParameters().addAll(expressionSegments); return result; } @Override public ASTNode visitFunctionExprCommonSubexpr(final FunctionExprCommonSubexprContext ctx) { if (null != ctx.CAST()) { return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), (ExpressionSegment) visit(ctx.aExpr(0)), ctx.typeName().getText()); } FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.getChild(0).getText(), getOriginalText(ctx)); Collection<ExpressionSegment> expressionSegments = getExpressionSegments(getTargetRuleContextFromParseTree(ctx, AExprContext.class)); result.getParameters().addAll(expressionSegments); return result; } private <T extends ParseTree> Collection<T> getTargetRuleContextFromParseTree(final ParseTree parseTree, final Class<? extends T> clazz) { Collection<T> result = new LinkedList<>(); for (int index = 0; index < parseTree.getChildCount(); index++) { ParseTree child = parseTree.getChild(index); if (clazz.isInstance(child)) { result.add(clazz.cast(child)); } else { result.addAll(getTargetRuleContextFromParseTree(child, clazz)); } } return result; } private Collection<ExpressionSegment> getExpressionSegments(final Collection<AExprContext> aExprContexts) { Collection<ExpressionSegment> result = new LinkedList<>(); for (AExprContext each : aExprContexts) { result.add((ExpressionSegment) visit(each)); } return result; } @Override public ASTNode visitAexprConst(final AexprConstContext ctx) { LiteralValue<?> value; if (null != ctx.numberConst()) { value = new NumberLiteralValue(ctx.numberConst().getText()); } else if (null != ctx.STRING_()) { value = new StringLiteralValue(ctx.STRING_().getText()); } else if (null != ctx.FALSE()) { value = new BooleanLiteralValue(ctx.FALSE().getText()); } else if (null != ctx.TRUE()) { value = new BooleanLiteralValue(ctx.TRUE().getText()); } else if (null != ctx.NULL()) { value = new NullLiteralValue(ctx.getText()); } else { value = new OtherLiteralValue(ctx.getText()); } if (null != ctx.constTypeName() || null != ctx.funcName() && null == ctx.LP_()) { LiteralExpressionSegment expression = new LiteralExpressionSegment(ctx.STRING_().getSymbol().getStartIndex(), ctx.STRING_().getSymbol().getStopIndex(), value.getValue().toString()); String dataType = null != ctx.constTypeName() ? ctx.constTypeName().getText() : ctx.funcName().getText(); return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), expression, dataType); } return SQLUtils.createLiteralExpression(value, ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } @Override public ASTNode visitColumnref(final ColumnrefContext ctx) { if (null != ctx.indirection()) { AttrNameContext attrName = ctx.indirection().indirectionEl().attrName(); ColumnSegment result = new ColumnSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue(attrName.getText())); OwnerSegment owner = new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); result.setOwner(owner); return result; } return new ColumnSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); } private InExpression createInSegment(final AExprContext ctx) { ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ExpressionSegment right = createInExpressionSegment(ctx.inExpr()); boolean not = null != ctx.NOT(); return new InExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, not); } @SuppressWarnings("unchecked") private ExpressionSegment createInExpressionSegment(final InExprContext ctx) { if (null != ctx.selectWithParens()) { PostgreSQLSelectStatement select = (PostgreSQLSelectStatement) visit(ctx.selectWithParens()); SubquerySegment subquerySegment = new SubquerySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), select); return new SubqueryExpressionSegment(subquerySegment); } ListExpression result = new ListExpression(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex()); result.getItems().addAll(((CollectionValue<ExpressionSegment>) visit(ctx.exprList())).getValue()); return result; } @SuppressWarnings("unchecked") @Override public ASTNode visitExprList(final ExprListContext ctx) { CollectionValue<ExpressionSegment> result = new CollectionValue<>(); if (null != ctx.exprList()) { result.combine((CollectionValue<ExpressionSegment>) visitExprList(ctx.exprList())); } result.getValue().add((ExpressionSegment) visit(ctx.aExpr())); return result; } private BetweenExpression createBetweenSegment(final AExprContext ctx) { ExpressionSegment left = (ExpressionSegment) visit(ctx.aExpr(0)); ExpressionSegment between = (ExpressionSegment) visit(ctx.bExpr()); ExpressionSegment and = (ExpressionSegment) visit(ctx.aExpr(1)); boolean not = null != ctx.NOT(); return new BetweenExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, between, and, not); } @Override public ASTNode visitBExpr(final BExprContext ctx) { if (null != ctx.cExpr()) { return visit(ctx.cExpr()); } if (null != ctx.TYPE_CAST_()) { return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), (ExpressionSegment) visit(ctx.bExpr(0)), ctx.typeName().getText()); } if (null != ctx.qualOp()) { ExpressionSegment left = (ExpressionSegment) visit(ctx.bExpr(0)); ExpressionSegment right = (ExpressionSegment) visit(ctx.bExpr(1)); String operator = ctx.qualOp().getText(); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } for (BExprContext each : ctx.bExpr()) { visit(each); } return new LiteralExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); } private ProjectionSegment createAggregationSegment(final FuncApplicationContext ctx, final String aggregationType, final Collection<ExpressionSegment> expressionSegments) { AggregationType type = AggregationType.valueOf(aggregationType.toUpperCase()); String innerExpression = ctx.start.getInputStream().getText(new Interval(ctx.LP_().getSymbol().getStartIndex(), ctx.stop.getStopIndex())); if (null == ctx.DISTINCT()) { AggregationProjectionSegment result = new AggregationProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), type, innerExpression); result.getParameters().addAll(expressionSegments); return result; } AggregationDistinctProjectionSegment result = new AggregationDistinctProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), type, innerExpression, getDistinctExpression(ctx)); result.getParameters().addAll(expressionSegments); return result; } private String getDistinctExpression(final FuncApplicationContext ctx) { StringBuilder result = new StringBuilder(); result.append(ctx.funcArgList().getText()); if (null != ctx.sortClause()) { result.append(ctx.sortClause().getText()); } return result.toString(); } @Override public final ASTNode visitDataTypeName(final DataTypeNameContext ctx) { IdentifierContext identifierContext = ctx.identifier(); if (null != identifierContext) { return new KeywordValue(identifierContext.getText()); } Collection<String> dataTypeNames = new LinkedList<>(); for (int i = 0; i < ctx.getChildCount(); i++) { dataTypeNames.add(ctx.getChild(i).getText()); } return new KeywordValue(String.join(" ", dataTypeNames)); } @Override public final ASTNode visitSortClause(final SortClauseContext ctx) { Collection<OrderByItemSegment> items = new LinkedList<>(); for (SortbyContext each : ctx.sortbyList().sortby()) { items.add((OrderByItemSegment) visit(each)); } return new OrderBySegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), items); } @Override public final ASTNode visitSortby(final SortbyContext ctx) { OrderDirection orderDirection = null != ctx.ascDesc() ? generateOrderDirection(ctx.ascDesc()) : OrderDirection.ASC; NullsOrderType nullsOrderType = generateNullsOrderType(ctx.nullsOrder()); ASTNode expr = visit(ctx.aExpr()); if (expr instanceof ColumnSegment) { ColumnSegment column = (ColumnSegment) expr; return new ColumnOrderByItemSegment(column, orderDirection, nullsOrderType); } if (expr instanceof LiteralExpressionSegment) { LiteralExpressionSegment index = (LiteralExpressionSegment) expr; return new IndexOrderByItemSegment(index.getStartIndex(), index.getStopIndex(), Integer.parseInt(index.getLiterals().toString()), orderDirection, nullsOrderType); } if (expr instanceof ExpressionSegment) { return new ExpressionOrderByItemSegment(ctx.aExpr().getStart().getStartIndex(), ctx.aExpr().getStop().getStopIndex(), getOriginalText(ctx.aExpr()), orderDirection, nullsOrderType, (ExpressionSegment) expr); } return new ExpressionOrderByItemSegment(ctx.aExpr().getStart().getStartIndex(), ctx.aExpr().getStop().getStopIndex(), getOriginalText(ctx.aExpr()), orderDirection, nullsOrderType); } private NullsOrderType generateNullsOrderType(final NullsOrderContext ctx) { if (null == ctx) { return null; } return null == ctx.FIRST() ? NullsOrderType.LAST : NullsOrderType.FIRST; } private OrderDirection generateOrderDirection(final AscDescContext ctx) { return null == ctx.DESC() ? OrderDirection.ASC : OrderDirection.DESC; } @Override public final ASTNode visitDataType(final DataTypeContext ctx) { DataTypeSegment result = new DataTypeSegment(); result.setDataTypeName(((KeywordValue) visit(ctx.dataTypeName())).getValue()); result.setStartIndex(ctx.start.getStartIndex()); result.setStopIndex(ctx.stop.getStopIndex()); if (null != ctx.dataTypeLength()) { DataTypeLengthSegment dataTypeLengthSegment = (DataTypeLengthSegment) visit(ctx.dataTypeLength()); result.setDataLength(dataTypeLengthSegment); } return result; } @Override public final ASTNode visitDataTypeLength(final DataTypeLengthContext ctx) { DataTypeLengthSegment result = new DataTypeLengthSegment(); result.setStartIndex(ctx.start.getStartIndex()); result.setStopIndex(ctx.stop.getStartIndex()); List<TerminalNode> numbers = ctx.NUMBER_(); if (1 == numbers.size()) { result.setPrecision(Integer.parseInt(numbers.get(0).getText())); } if (2 == numbers.size()) { result.setPrecision(Integer.parseInt(numbers.get(0).getText())); result.setScale(Integer.parseInt(numbers.get(1).getText())); } return result; } @Override public ASTNode visitInsert(final InsertContext ctx) { PostgreSQLInsertStatement result = (PostgreSQLInsertStatement) visit(ctx.insertRest()); result.setTable((SimpleTableSegment) visit(ctx.insertTarget())); if (null != ctx.optOnConflict()) { result.setOnDuplicateKeyColumnsSegment((OnDuplicateKeyColumnsSegment) visit(ctx.optOnConflict())); } if (null != ctx.returningClause()) { result.setReturningSegment((ReturningSegment) visit(ctx.returningClause())); } result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitOptOnConflict(final OptOnConflictContext ctx) { SetClauseListContext setClauseListContext = ctx.setClauseList(); Collection<AssignmentSegment> assignments = ((SetAssignmentSegment) visit(setClauseListContext)).getAssignments(); return new OnDuplicateKeyColumnsSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), assignments); } @Override public ASTNode visitInsertTarget(final InsertTargetContext ctx) { SimpleTableSegment result = (SimpleTableSegment) visit(ctx.qualifiedName()); if (null != ctx.AS()) { ColIdContext colId = ctx.colId(); result.setAlias(new AliasSegment(colId.start.getStartIndex(), colId.stop.getStopIndex(), new IdentifierValue(colId.getText()))); } return result; } @Override public ASTNode visitQualifiedNameList(final QualifiedNameListContext ctx) { CollectionValue<SimpleTableSegment> result = new CollectionValue<>(); if (null != ctx.qualifiedName()) { result.getValue().add((SimpleTableSegment) visit(ctx.qualifiedName())); } if (null != ctx.qualifiedNameList()) { result.combine((CollectionValue) visit(ctx.qualifiedNameList())); } return result; } @Override public ASTNode visitQualifiedName(final QualifiedNameContext ctx) { if (null != ctx.indirection()) { AttrNameContext attrName = ctx.indirection().indirectionEl().attrName(); TableNameSegment tableName = new TableNameSegment(attrName.start.getStartIndex(), attrName.stop.getStopIndex(), new IdentifierValue(attrName.getText())); OwnerSegment owner = new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); SimpleTableSegment result = new SimpleTableSegment(tableName); if (null != ctx.indirection().indirection()) { OwnerSegment tableOwner = createTableOwner(ctx.indirection().indirection()); tableOwner.setOwner(owner); result.setOwner(tableOwner); } else { result.setOwner(owner); } return result; } return new SimpleTableSegment(new TableNameSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); } @Override public ASTNode visitInsertRest(final InsertRestContext ctx) { PostgreSQLInsertStatement result = new PostgreSQLInsertStatement(); if (null == ctx.insertColumnList()) { result.setInsertColumns(new InsertColumnsSegment(ctx.start.getStartIndex() - 1, ctx.start.getStartIndex() - 1, Collections.emptyList())); } else { InsertColumnListContext insertColumns = ctx.insertColumnList(); CollectionValue<ColumnSegment> columns = (CollectionValue<ColumnSegment>) visit(insertColumns); InsertColumnsSegment insertColumnsSegment = new InsertColumnsSegment(insertColumns.start.getStartIndex() - 1, insertColumns.stop.getStopIndex() + 1, columns.getValue()); result.setInsertColumns(insertColumnsSegment); } ValuesClauseContext valuesClause = ctx.select().selectNoParens().selectClauseN().simpleSelect().valuesClause(); if (null == valuesClause) { PostgreSQLSelectStatement selectStatement = (PostgreSQLSelectStatement) visit(ctx.select()); result.setInsertSelect(new SubquerySegment(ctx.select().start.getStartIndex(), ctx.select().stop.getStopIndex(), selectStatement)); } else { result.getValues().addAll(createInsertValuesSegments(valuesClause)); } return result; } @Override public ASTNode visitInsertColumnList(final InsertColumnListContext ctx) { CollectionValue<ColumnSegment> result = new CollectionValue<>(); if (null != ctx.insertColumnList()) { result.getValue().addAll(((CollectionValue<ColumnSegment>) visit(ctx.insertColumnList())).getValue()); } result.getValue().add((ColumnSegment) visit(ctx.insertColumnItem())); return result; } @Override public ASTNode visitInsertColumnItem(final InsertColumnItemContext ctx) { if (null == ctx.optIndirection().indirectionEl()) { return new ColumnSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText())); } ColumnSegment result = new ColumnSegment(ctx.colId().start.getStartIndex(), ctx.optIndirection().stop.getStopIndex(), new IdentifierValue(ctx.optIndirection().indirectionEl().attrName().getText())); result.setOwner(new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); return result; } private Collection<InsertValuesSegment> createInsertValuesSegments(final ValuesClauseContext ctx) { Collection<InsertValuesSegment> result = new LinkedList<>(); if (null != ctx.valuesClause()) { Collection<InsertValuesSegment> expressions = createInsertValuesSegments(ctx.valuesClause()); result.addAll(expressions); } Collection<ExpressionSegment> expressions = createInsertValuesSegments(ctx.exprList()); InsertValuesSegment insertValuesSegment = new InsertValuesSegment(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex(), (List<ExpressionSegment>) expressions); result.add(insertValuesSegment); return result; } private Collection<ExpressionSegment> createInsertValuesSegments(final ExprListContext ctx) { Collection<ExpressionSegment> result = new LinkedList<>(); if (null != ctx.exprList()) { Collection<ExpressionSegment> tmpResult = createInsertValuesSegments(ctx.exprList()); result.addAll(tmpResult); } ExpressionSegment expr = (ExpressionSegment) visit(ctx.aExpr()); result.add(expr); return result; } private Collection<AssignmentSegment> generateAssignmentSegments(final SetClauseListContext ctx) { Collection<AssignmentSegment> result = new LinkedList<>(); if (null != ctx.setClauseList()) { Collection<AssignmentSegment> tmpResult = generateAssignmentSegments(ctx.setClauseList()); result.addAll(tmpResult); } AssignmentSegment assignmentSegment = (AssignmentSegment) visit(ctx.setClause()); result.add(assignmentSegment); return result; } @Override public ASTNode visitSetClause(final SetClauseContext ctx) { ColumnSegment columnSegment = (ColumnSegment) visit(ctx.setTarget()); List<ColumnSegment> columnSegments = new LinkedList<>(); columnSegments.add(columnSegment); ExpressionSegment expressionSegment = (ExpressionSegment) visit(ctx.aExpr()); return new ColumnAssignmentSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), columnSegments, expressionSegment); } @Override public ASTNode visitSetTarget(final SetTargetContext ctx) { IdentifierValue identifierValue = new IdentifierValue(ctx.colId().getText()); return new ColumnSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), identifierValue); } @Override public ASTNode visitRelationExprOptAlias(final RelationExprOptAliasContext ctx) { SimpleTableSegment result = (SimpleTableSegment) visit(ctx.relationExpr().qualifiedName()); if (null != ctx.colId()) { result.setAlias(new AliasSegment(ctx.colId().start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); } return result; } @Override public ASTNode visitUpdate(final UpdateContext ctx) { PostgreSQLUpdateStatement result = new PostgreSQLUpdateStatement(); SimpleTableSegment tableSegment = (SimpleTableSegment) visit(ctx.relationExprOptAlias()); result.setTable(tableSegment); result.setSetAssignment((SetAssignmentSegment) visit(ctx.setClauseList())); if (null != ctx.whereOrCurrentClause()) { result.setWhere((WhereSegment) visit(ctx.whereOrCurrentClause())); } result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitSetClauseList(final SetClauseListContext ctx) { Collection<AssignmentSegment> assignments = generateAssignmentSegments(ctx); return new SetAssignmentSegment(ctx.start.getStartIndex() - 4, ctx.stop.getStopIndex(), assignments); } @Override public ASTNode visitDelete(final DeleteContext ctx) { PostgreSQLDeleteStatement result = new PostgreSQLDeleteStatement(); SimpleTableSegment tableSegment = (SimpleTableSegment) visit(ctx.relationExprOptAlias()); result.setTable(tableSegment); if (null != ctx.whereOrCurrentClause()) { result.setWhere((WhereSegment) visit(ctx.whereOrCurrentClause())); } result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitWhereOrCurrentClause(final WhereOrCurrentClauseContext ctx) { return visit(ctx.whereClause()); } @Override public ASTNode visitSelect(final SelectContext ctx) { PostgreSQLSelectStatement result = (PostgreSQLSelectStatement) visit(ctx.selectNoParens()); result.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return result; } @Override public ASTNode visitSelectNoParens(final SelectNoParensContext ctx) { PostgreSQLSelectStatement result = (PostgreSQLSelectStatement) visit(ctx.selectClauseN()); if (null != ctx.sortClause()) { OrderBySegment orderBySegment = (OrderBySegment) visit(ctx.sortClause()); result.setOrderBy(orderBySegment); } if (null != ctx.selectLimit()) { LimitSegment limitSegment = (LimitSegment) visit(ctx.selectLimit()); result.setLimit(limitSegment); } if (null != ctx.forLockingClause()) { LockSegment lockSegment = (LockSegment) visit(ctx.forLockingClause()); result.setLock(lockSegment); } return result; } @Override public ASTNode visitForLockingClause(final ForLockingClauseContext ctx) { return new LockSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); } @Override public ASTNode visitSelectWithParens(final SelectWithParensContext ctx) { if (null != ctx.selectWithParens()) { return visit(ctx.selectWithParens()); } return visit(ctx.selectNoParens()); } @Override public ASTNode visitSelectClauseN(final SelectClauseNContext ctx) { if (null != ctx.simpleSelect()) { return visit(ctx.simpleSelect()); } if (null != ctx.selectClauseN() && !ctx.selectClauseN().isEmpty()) { PostgreSQLSelectStatement result = new PostgreSQLSelectStatement(); PostgreSQLSelectStatement left = (PostgreSQLSelectStatement) visit(ctx.selectClauseN(0)); result.setProjections(left.getProjections()); result.setFrom(left.getFrom()); CombineSegment combineSegment = new CombineSegment(((TerminalNode) ctx.getChild(1)).getSymbol().getStartIndex(), ctx.getStop().getStopIndex(), left, getCombineType(ctx), (PostgreSQLSelectStatement) visit(ctx.selectClauseN(1))); result.setCombine(combineSegment); return result; } return visit(ctx.selectWithParens()); } private CombineType getCombineType(final SelectClauseNContext ctx) { boolean isDistinct = null == ctx.allOrDistinct() || null != ctx.allOrDistinct().DISTINCT(); if (null != ctx.UNION()) { return isDistinct ? CombineType.UNION : CombineType.UNION_ALL; } if (null != ctx.INTERSECT()) { return isDistinct ? CombineType.INTERSECT : CombineType.INTERSECT_ALL; } return isDistinct ? CombineType.EXCEPT : CombineType.EXCEPT_ALL; } @Override public ASTNode visitSimpleSelect(final SimpleSelectContext ctx) { PostgreSQLSelectStatement result = new PostgreSQLSelectStatement(); if (null == ctx.targetList()) { result.setProjections(new ProjectionsSegment(-1, -1)); } else { ProjectionsSegment projects = (ProjectionsSegment) visit(ctx.targetList()); if (null != ctx.distinctClause()) { projects.setDistinctRow(true); } result.setProjections(projects); } if (null != ctx.fromClause()) { TableSegment tableSegment = (TableSegment) visit(ctx.fromClause()); result.setFrom(tableSegment); } if (null != ctx.whereClause()) { result.setWhere((WhereSegment) visit(ctx.whereClause())); } if (null != ctx.groupClause()) { result.setGroupBy((GroupBySegment) visit(ctx.groupClause())); } if (null != ctx.havingClause()) { result.setHaving((HavingSegment) visit(ctx.havingClause())); } if (null != ctx.windowClause()) { result.setWindow((WindowSegment) visit(ctx.windowClause())); } return result; } @Override public ASTNode visitHavingClause(final HavingClauseContext ctx) { ExpressionSegment expr = (ExpressionSegment) visit(ctx.aExpr()); return new HavingSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), expr); } @Override public ASTNode visitWindowClause(final WindowClauseContext ctx) { return new WindowSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex()); } @Override public ASTNode visitGroupClause(final GroupClauseContext ctx) { Collection<OrderByItemSegment> items = new LinkedList<>(); for (GroupByItemContext each : ctx.groupByList().groupByItem()) { items.add((OrderByItemSegment) visit(each)); } return new GroupBySegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), items); } @Override public ASTNode visitGroupByItem(final GroupByItemContext ctx) { if (null != ctx.aExpr()) { ASTNode astNode = visit(ctx.aExpr()); if (astNode instanceof ColumnSegment) { return new ColumnOrderByItemSegment((ColumnSegment) astNode, OrderDirection.ASC, null); } if (astNode instanceof LiteralExpressionSegment) { LiteralExpressionSegment index = (LiteralExpressionSegment) astNode; return new IndexOrderByItemSegment(index.getStartIndex(), index.getStopIndex(), Integer.parseInt(index.getLiterals().toString()), OrderDirection.ASC, null); } return new ExpressionOrderByItemSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(ctx), OrderDirection.ASC, null); } return new ExpressionOrderByItemSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(ctx), OrderDirection.ASC, null); } @Override public ASTNode visitTargetList(final TargetListContext ctx) { ProjectionsSegment result = new ProjectionsSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); if (null != ctx.targetList()) { ProjectionsSegment projections = (ProjectionsSegment) visit(ctx.targetList()); result.getProjections().addAll(projections.getProjections()); } ProjectionSegment projection = (ProjectionSegment) visit(ctx.targetEl()); result.getProjections().add(projection); return result; } @Override public ASTNode visitTargetEl(final TargetElContext ctx) { ProjectionSegment result = createProjectionSegment(ctx, ctx.aExpr()); if (null != ctx.identifier()) { ((AliasAvailable) result).setAlias(new AliasSegment(ctx.identifier().start.getStartIndex(), ctx.identifier().stop.getStopIndex(), new IdentifierValue(ctx.identifier().getText()))); } return result; } private ProjectionSegment createProjectionSegment(final TargetElContext ctx, final AExprContext expr) { if (null != ctx.ASTERISK_()) { return new ShorthandProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); } if (null != ctx.DOT_ASTERISK_()) { ShorthandProjectionSegment result = new ShorthandProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); result.setOwner(new OwnerSegment(ctx.colId().start.getStartIndex(), ctx.colId().stop.getStopIndex(), new IdentifierValue(ctx.colId().getText()))); return result; } if (null != ctx.aExpr()) { ASTNode projection = visit(ctx.aExpr()); return createProjectionSegment(ctx, expr, projection); } return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(expr), null); } private ProjectionSegment createProjectionSegment(final TargetElContext ctx, final AExprContext expr, final ASTNode projection) { if (projection instanceof ColumnSegment) { return new ColumnProjectionSegment((ColumnSegment) projection); } if (projection instanceof AggregationProjectionSegment) { return (AggregationProjectionSegment) projection; } if (projection instanceof SubqueryExpressionSegment) { SubqueryExpressionSegment subqueryExpression = (SubqueryExpressionSegment) projection; String text = ctx.start.getInputStream().getText(new Interval(subqueryExpression.getStartIndex(), subqueryExpression.getStopIndex())); return new SubqueryProjectionSegment(subqueryExpression.getSubquery(), text); } if (projection instanceof ExistsSubqueryExpression) { ExistsSubqueryExpression existsSubqueryExpression = (ExistsSubqueryExpression) projection; String text = ctx.start.getInputStream().getText(new Interval(existsSubqueryExpression.getStartIndex(), existsSubqueryExpression.getStopIndex())); return new SubqueryProjectionSegment(existsSubqueryExpression.getSubquery(), text); } if (projection instanceof ExpressionSegment) { return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(expr), (ExpressionSegment) projection); } return new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(expr), null); } @Override public ASTNode visitFromClause(final FromClauseContext ctx) { return visit(ctx.fromList()); } @Override public ASTNode visitFromList(final FromListContext ctx) { if (null != ctx.fromList()) { JoinTableSegment result = new JoinTableSegment(); result.setStartIndex(ctx.start.getStartIndex()); result.setStopIndex(ctx.stop.getStopIndex()); result.setLeft((TableSegment) visit(ctx.fromList())); result.setRight((TableSegment) visit(ctx.tableReference())); result.setJoinType(JoinType.COMMA.name()); return result; } return visit(ctx.tableReference()); } @Override public ASTNode visitTableReference(final TableReferenceContext ctx) { if (null != ctx.relationExpr()) { SimpleTableSegment result = (SimpleTableSegment) visit(ctx.relationExpr().qualifiedName()); if (null != ctx.aliasClause()) { result.setAlias((AliasSegment) visit(ctx.aliasClause())); } return result; } if (null != ctx.selectWithParens()) { PostgreSQLSelectStatement select = (PostgreSQLSelectStatement) visit(ctx.selectWithParens()); SubquerySegment subquery = new SubquerySegment(ctx.selectWithParens().start.getStartIndex(), ctx.selectWithParens().stop.getStopIndex(), select); AliasSegment alias = null != ctx.aliasClause() ? (AliasSegment) visit(ctx.aliasClause()) : null; SubqueryTableSegment result = new SubqueryTableSegment(subquery); result.setAlias(alias); return result; } if (null == ctx.tableReference()) { TableNameSegment tableName = new TableNameSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue("not support")); return new SimpleTableSegment(tableName); } JoinTableSegment result = new JoinTableSegment(); result.setLeft((TableSegment) visit(ctx.tableReference())); int startIndex = null != ctx.LP_() ? ctx.LP_().getSymbol().getStartIndex() : ctx.tableReference().start.getStartIndex(); int stopIndex = 0; AliasSegment alias = null; if (null == ctx.aliasClause()) { stopIndex = null != ctx.RP_() ? ctx.RP_().getSymbol().getStopIndex() : ctx.tableReference().start.getStopIndex(); } else { alias = (AliasSegment) visit(ctx.aliasClause()); startIndex = null != ctx.RP_() ? ctx.RP_().getSymbol().getStopIndex() : ctx.joinedTable().stop.getStopIndex(); } result.setStartIndex(startIndex); result.setStopIndex(stopIndex); result = visitJoinedTable(ctx.joinedTable(), result); result.setAlias(alias); return result; } private JoinTableSegment visitJoinedTable(final JoinedTableContext ctx, final JoinTableSegment tableSegment) { TableSegment right = (TableSegment) visit(ctx.tableReference()); tableSegment.setRight(right); tableSegment.setJoinType(getJoinType(ctx)); tableSegment.setNatural(null != ctx.naturalJoinType()); return null != ctx.joinQual() ? visitJoinQual(ctx.joinQual(), tableSegment) : tableSegment; } private String getJoinType(final JoinedTableContext ctx) { if (null != ctx.crossJoinType()) { return JoinType.CROSS.name(); } if (null != ctx.innerJoinType()) { return JoinType.INNER.name(); } if (null != ctx.outerJoinType()) { return getOutJoinType(ctx.outerJoinType()); } if (null != ctx.naturalJoinType()) { return getNaturalJoinType(ctx.naturalJoinType()); } return JoinType.COMMA.name(); } private static String getNaturalJoinType(final NaturalJoinTypeContext ctx) { if (null != ctx.INNER()) { return JoinType.INNER.name(); } if (null != ctx.FULL()) { return JoinType.FULL.name(); } if (null != ctx.LEFT()) { return JoinType.LEFT.name(); } if (null != ctx.RIGHT()) { return JoinType.RIGHT.name(); } return JoinType.INNER.name(); } private static String getOutJoinType(final OuterJoinTypeContext ctx) { if (null == ctx.FULL()) { return null != ctx.LEFT() ? JoinType.LEFT.name() : JoinType.RIGHT.name(); } return JoinType.FULL.name(); } private JoinTableSegment visitJoinQual(final JoinQualContext ctx, final JoinTableSegment joinTableSource) { if (null != ctx.aExpr()) { ExpressionSegment condition = (ExpressionSegment) visit(ctx.aExpr()); joinTableSource.setCondition(condition); } if (null != ctx.USING()) { joinTableSource.setUsing(generateUsingColumn(ctx.nameList())); } return joinTableSource; } private List<ColumnSegment> generateUsingColumn(final NameListContext ctx) { List<ColumnSegment> result = new ArrayList<>(); if (null != ctx.nameList()) { result.addAll(generateUsingColumn(ctx.nameList())); } if (null != ctx.name()) { result.add(new ColumnSegment(ctx.name().start.getStartIndex(), ctx.name().stop.getStopIndex(), new IdentifierValue(ctx.name().getText()))); } return result; } @Override public ASTNode visitAliasClause(final AliasClauseContext ctx) { StringBuilder aliasName = new StringBuilder(ctx.colId().getText()); if (null != ctx.nameList()) { aliasName.append(ctx.LP_().getText()); aliasName.append(ctx.nameList().getText()); aliasName.append(ctx.RP_().getText()); } return new AliasSegment(ctx.colId().start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue(aliasName.toString())); } private OwnerSegment createTableOwner(final IndirectionContext ctx) { AttrNameContext attrName = ctx.indirectionEl().attrName(); return new OwnerSegment(attrName.start.getStartIndex(), attrName.stop.getStopIndex(), new IdentifierValue(attrName.getText())); } @Override public ASTNode visitWhereClause(final WhereClauseContext ctx) { ExpressionSegment expr = (ExpressionSegment) visit(ctx.aExpr()); return new WhereSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), expr); } @Override public ASTNode visitSelectLimit(final SelectLimitContext ctx) { if (null != ctx.limitClause() && null != ctx.offsetClause()) { return createLimitSegmentWhenLimitAndOffset(ctx); } return createLimitSegmentWhenRowCountOrOffsetAbsent(ctx); } @Override public ASTNode visitSelectLimitValue(final SelectLimitValueContext ctx) { if (null != ctx.ALL()) { return null; } ASTNode astNode = visit(ctx.aExpr()); if (astNode instanceof ParameterMarkerExpressionSegment) { return new ParameterMarkerLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ((ParameterMarkerExpressionSegment) astNode).getParameterMarkerIndex()); } return new NumberLiteralLimitValueSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), Long.parseLong(((LiteralExpressionSegment) astNode).getLiterals().toString())); } @Override public ASTNode visitSelectOffsetValue(final SelectOffsetValueContext ctx) { ASTNode astNode = visit(ctx.aExpr()); if (astNode instanceof ParameterMarkerExpressionSegment) { return new ParameterMarkerLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ((ParameterMarkerExpressionSegment) astNode).getParameterMarkerIndex()); } return new NumberLiteralLimitValueSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), Long.parseLong(((LiteralExpressionSegment) astNode).getLiterals().toString())); } @Override public ASTNode visitSelectFetchFirstValue(final SelectFetchFirstValueContext ctx) { ASTNode astNode = visit(ctx.cExpr()); if (null != astNode) { if (astNode instanceof ParameterMarkerLimitValueSegment) { return new ParameterMarkerLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ((ParameterMarkerExpressionSegment) astNode).getParameterMarkerIndex()); } return new NumberLiteralLimitValueSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), Long.parseLong(((LiteralExpressionSegment) astNode).getLiterals().toString())); } return visit(ctx.NUMBER_()); } private LimitSegment createLimitSegmentWhenLimitAndOffset(final SelectLimitContext ctx) { ParseTree astNode0 = ctx.getChild(0); LimitValueSegment rowCount = null; LimitValueSegment offset = null; if (astNode0 instanceof LimitClauseContext) { rowCount = null == ctx.limitClause().selectLimitValue() ? null : (LimitValueSegment) visit(ctx.limitClause().selectLimitValue()); } else { offset = (LimitValueSegment) visit(ctx.offsetClause().selectOffsetValue()); } ParseTree astNode1 = ctx.getChild(1); if (astNode1 instanceof LimitClauseContext) { rowCount = null == ctx.limitClause().selectLimitValue() ? null : (LimitValueSegment) visit(ctx.limitClause().selectLimitValue()); } else { offset = (LimitValueSegment) visit(ctx.offsetClause().selectOffsetValue()); } return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), offset, rowCount); } private LimitSegment createLimitSegmentWhenRowCountOrOffsetAbsent(final SelectLimitContext ctx) { if (null != ctx.limitClause()) { if (null != ctx.limitClause().selectFetchFirstValue()) { LimitValueSegment limit = (LimitValueSegment) visit(ctx.limitClause().selectFetchFirstValue()); return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), null, limit); } LimitValueSegment limit = (LimitValueSegment) visit(ctx.limitClause().selectLimitValue()); return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), null, limit); } LimitValueSegment offset = (LimitValueSegment) visit(ctx.offsetClause().selectOffsetValue()); return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), offset, null); } @Override public ASTNode visitExecuteStmt(final ExecuteStmtContext ctx) { return new PostgreSQLExecuteStatement(); } /** * Get original text. * * @param ctx context * @return original text */ protected String getOriginalText(final ParserRuleContext ctx) { return ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); } @Override @SuppressWarnings("unchecked") public ASTNode visitAnyName(final AnyNameContext ctx) { CollectionValue<NameSegment> result = new CollectionValue<>(); if (null != ctx.attrs()) { result.combine((CollectionValue<NameSegment>) visit(ctx.attrs())); } result.getValue().add(new NameSegment(ctx.colId().getStart().getStartIndex(), ctx.colId().getStop().getStopIndex(), new IdentifierValue(ctx.colId().getText()))); return result; } @Override @SuppressWarnings("unchecked") public ASTNode visitAttrs(final AttrsContext ctx) { CollectionValue<NameSegment> result = new CollectionValue<>(); result.getValue().add(new NameSegment(ctx.attrName().getStart().getStartIndex(), ctx.attrName().getStop().getStopIndex(), new IdentifierValue(ctx.attrName().getText()))); if (null != ctx.attrs()) { result.combine((CollectionValue<NameSegment>) visit(ctx.attrs())); } return result; } @Override public ASTNode visitName(final NameContext ctx) { return visit(ctx.identifier()); } @Override public ASTNode visitSignedIconst(final SignedIconstContext ctx) { return new NumberLiteralValue(ctx.getText()); } }
Shouldn't this be `max.nodes() / min.groups()` ?
public Capacity applyOn(Capacity capacity, ApplicationId application, boolean exclusive) { var min = applyOn(capacity.minResources(), capacity, application, exclusive); var max = applyOn(capacity.maxResources(), capacity, application, exclusive); var groupSize = capacity.groupSize().fromAtMost(max.nodes() / max.groups()) .toAtLeast(min.nodes() / min.groups()); return capacity.withLimits(min, max, groupSize); }
var groupSize = capacity.groupSize().fromAtMost(max.nodes() / max.groups())
public Capacity applyOn(Capacity capacity, ApplicationId application, boolean exclusive) { var min = applyOn(capacity.minResources(), capacity, application, exclusive); var max = applyOn(capacity.maxResources(), capacity, application, exclusive); var groupSize = capacity.groupSize().fromAtMost(max.nodes() / min.groups()) .toAtLeast(min.nodes() / max.groups()); return capacity.withLimits(min, max, groupSize); }
class CapacityPolicies { private final NodeRepository nodeRepository; private final Zone zone; private final StringFlag adminClusterNodeArchitecture; public CapacityPolicies(NodeRepository nodeRepository) { this.nodeRepository = nodeRepository; this.zone = nodeRepository.zone(); this.adminClusterNodeArchitecture = PermanentFlags.ADMIN_CLUSTER_NODE_ARCHITECTURE.bindTo(nodeRepository.flagSource()); } private ClusterResources applyOn(ClusterResources resources, Capacity capacity, ApplicationId application, boolean exclusive) { int nodes = decideSize(resources.nodes(), capacity.isRequired(), application.instance().isTester()); int groups = Math.min(resources.groups(), nodes); var nodeResources = decideNodeResources(resources.nodeResources(), capacity.isRequired(), exclusive); return new ClusterResources(nodes, groups, nodeResources); } private int decideSize(int requested, boolean required, boolean isTester) { if (isTester) return 1; if (required) return requested; return switch (zone.environment()) { case dev, test -> 1; case perf -> Math.min(requested, 3); case staging -> requested <= 1 ? requested : Math.max(2, requested / 10); case prod -> requested; }; } private NodeResources decideNodeResources(NodeResources target, boolean required, boolean exclusive) { if (required || exclusive) return target; if (target.isUnspecified()) return target; if (zone.environment() == Environment.dev && zone.cloud().allowHostSharing()) target = target.withVcpu(0.1).withBandwidthGbps(0.1); if (zone.system().isCd() || zone.environment() == Environment.dev || zone.environment() == Environment.test) target = target.with(NodeResources.DiskSpeed.any).with(NodeResources.StorageType.any).withBandwidthGbps(0.1); return target; } public NodeResources defaultNodeResources(ClusterSpec clusterSpec, ApplicationId applicationId) { if (clusterSpec.type() == ClusterSpec.Type.admin) { Architecture architecture = adminClusterArchitecture(applicationId); if (clusterSpec.id().value().equals("cluster-controllers")) { return clusterControllerResources(clusterSpec).with(architecture); } return (nodeRepository.exclusiveAllocation(clusterSpec) ? versioned(clusterSpec, Map.of(new Version(0), smallestExclusiveResources())) : versioned(clusterSpec, Map.of(new Version(0), smallestSharedResources()))) .with(architecture); } if (clusterSpec.type() == ClusterSpec.Type.content) { return zone.cloud().dynamicProvisioning() ? versioned(clusterSpec, Map.of(new Version(0), new NodeResources(2.0, 8, 50, 0.3), new Version(8, 75), new NodeResources(2, 16, 300, 0.3))) : versioned(clusterSpec, Map.of(new Version(0), new NodeResources(1.5, 8, 50, 0.3))); } else { return zone.cloud().dynamicProvisioning() ? versioned(clusterSpec, Map.of(new Version(0), new NodeResources(2.0, 8, 50, 0.3))) : versioned(clusterSpec, Map.of(new Version(0), new NodeResources(1.5, 8, 50, 0.3))); } } private NodeResources clusterControllerResources(ClusterSpec clusterSpec) { if (nodeRepository.exclusiveAllocation(clusterSpec)) { return versioned(clusterSpec, Map.of(new Version(0), smallestExclusiveResources())); } return versioned(clusterSpec, Map.of(new Version(0), new NodeResources(0.25, 1.14, 10, 0.3))); } private Architecture adminClusterArchitecture(ApplicationId instance) { return Architecture.valueOf(adminClusterNodeArchitecture.with(APPLICATION_ID, instance.serializedForm()).value()); } /** Returns the resources for the newest version not newer than that requested in the cluster spec. */ static NodeResources versioned(ClusterSpec spec, Map<Version, NodeResources> resources) { return requireNonNull(new TreeMap<>(resources).floorEntry(spec.vespaVersion()), "no default resources applicable for " + spec + " among: " + resources) .getValue(); } private NodeResources smallestExclusiveResources() { return (zone.cloud().name().equals(CloudName.GCP)) ? new NodeResources(1, 4, 50, 0.3) : new NodeResources(0.5, 4, 50, 0.3); } private NodeResources smallestSharedResources() { return (zone.cloud().name().equals(CloudName.GCP)) ? new NodeResources(1, 4, 50, 0.3) : new NodeResources(0.5, 2, 50, 0.3); } /** Returns whether the nodes requested can share physical host with other applications */ public ClusterSpec decideExclusivity(Capacity capacity, ClusterSpec requestedCluster) { if (capacity.cloudAccount().isPresent()) return requestedCluster.withExclusivity(true); boolean exclusive = requestedCluster.isExclusive() && (capacity.isRequired() || zone.environment() == Environment.prod); return requestedCluster.withExclusivity(exclusive); } }
class CapacityPolicies { private final NodeRepository nodeRepository; private final Zone zone; private final StringFlag adminClusterNodeArchitecture; public CapacityPolicies(NodeRepository nodeRepository) { this.nodeRepository = nodeRepository; this.zone = nodeRepository.zone(); this.adminClusterNodeArchitecture = PermanentFlags.ADMIN_CLUSTER_NODE_ARCHITECTURE.bindTo(nodeRepository.flagSource()); } private ClusterResources applyOn(ClusterResources resources, Capacity capacity, ApplicationId application, boolean exclusive) { int nodes = decideSize(resources.nodes(), capacity.isRequired(), application.instance().isTester()); int groups = Math.min(resources.groups(), nodes); var nodeResources = decideNodeResources(resources.nodeResources(), capacity.isRequired(), exclusive); return new ClusterResources(nodes, groups, nodeResources); } private int decideSize(int requested, boolean required, boolean isTester) { if (isTester) return 1; if (required) return requested; return switch (zone.environment()) { case dev, test -> 1; case perf -> Math.min(requested, 3); case staging -> requested <= 1 ? requested : Math.max(2, requested / 10); case prod -> requested; }; } private NodeResources decideNodeResources(NodeResources target, boolean required, boolean exclusive) { if (required || exclusive) return target; if (target.isUnspecified()) return target; if (zone.environment() == Environment.dev && zone.cloud().allowHostSharing()) target = target.withVcpu(0.1).withBandwidthGbps(0.1); if (zone.system().isCd() || zone.environment() == Environment.dev || zone.environment() == Environment.test) target = target.with(NodeResources.DiskSpeed.any).with(NodeResources.StorageType.any).withBandwidthGbps(0.1); return target; } public NodeResources defaultNodeResources(ClusterSpec clusterSpec, ApplicationId applicationId) { if (clusterSpec.type() == ClusterSpec.Type.admin) { Architecture architecture = adminClusterArchitecture(applicationId); if (clusterSpec.id().value().equals("cluster-controllers")) { return clusterControllerResources(clusterSpec).with(architecture); } return (nodeRepository.exclusiveAllocation(clusterSpec) ? versioned(clusterSpec, Map.of(new Version(0), smallestExclusiveResources())) : versioned(clusterSpec, Map.of(new Version(0), smallestSharedResources()))) .with(architecture); } if (clusterSpec.type() == ClusterSpec.Type.content) { return zone.cloud().dynamicProvisioning() ? versioned(clusterSpec, Map.of(new Version(0), new NodeResources(2.0, 8, 50, 0.3), new Version(8, 75), new NodeResources(2, 16, 300, 0.3))) : versioned(clusterSpec, Map.of(new Version(0), new NodeResources(1.5, 8, 50, 0.3))); } else { return zone.cloud().dynamicProvisioning() ? versioned(clusterSpec, Map.of(new Version(0), new NodeResources(2.0, 8, 50, 0.3))) : versioned(clusterSpec, Map.of(new Version(0), new NodeResources(1.5, 8, 50, 0.3))); } } private NodeResources clusterControllerResources(ClusterSpec clusterSpec) { if (nodeRepository.exclusiveAllocation(clusterSpec)) { return versioned(clusterSpec, Map.of(new Version(0), smallestExclusiveResources())); } return versioned(clusterSpec, Map.of(new Version(0), new NodeResources(0.25, 1.14, 10, 0.3))); } private Architecture adminClusterArchitecture(ApplicationId instance) { return Architecture.valueOf(adminClusterNodeArchitecture.with(APPLICATION_ID, instance.serializedForm()).value()); } /** Returns the resources for the newest version not newer than that requested in the cluster spec. */ static NodeResources versioned(ClusterSpec spec, Map<Version, NodeResources> resources) { return requireNonNull(new TreeMap<>(resources).floorEntry(spec.vespaVersion()), "no default resources applicable for " + spec + " among: " + resources) .getValue(); } private NodeResources smallestExclusiveResources() { return (zone.cloud().name().equals(CloudName.GCP)) ? new NodeResources(1, 4, 50, 0.3) : new NodeResources(0.5, 4, 50, 0.3); } private NodeResources smallestSharedResources() { return (zone.cloud().name().equals(CloudName.GCP)) ? new NodeResources(1, 4, 50, 0.3) : new NodeResources(0.5, 2, 50, 0.3); } /** Returns whether the nodes requested can share physical host with other applications */ public ClusterSpec decideExclusivity(Capacity capacity, ClusterSpec requestedCluster) { if (capacity.cloudAccount().isPresent()) return requestedCluster.withExclusivity(true); boolean exclusive = requestedCluster.isExclusive() && (capacity.isRequired() || zone.environment() == Environment.prod); return requestedCluster.withExclusivity(exclusive); } }
Just move the toByteArray call to the test and out of the implementation to prevent its use
public static byte[] getPosition(@Nullable ShufflePosition shufflePosition) { if (shufflePosition == null) { return null; } Preconditions.checkArgument(shufflePosition instanceof ByteArrayShufflePosition); ByteArrayShufflePosition adapter = (ByteArrayShufflePosition) shufflePosition; return adapter.getPosition().toByteArray(); }
return adapter.getPosition().toByteArray();
public static byte[] getPosition(@Nullable ShufflePosition shufflePosition) { if (shufflePosition == null) { return null; } Preconditions.checkArgument(shufflePosition instanceof ByteArrayShufflePosition); ByteArrayShufflePosition adapter = (ByteArrayShufflePosition) shufflePosition; return adapter.getPosition().toByteArray(); }
class ByteArrayShufflePosition implements Comparable<ShufflePosition>, ShufflePosition { private static final ByteString ZERO = ByteString.copyFrom(new byte[] {0}); private final ByteString position; public ByteArrayShufflePosition(ByteString position) { this.position = position; } public static ByteArrayShufflePosition fromBase64(String position) { return ByteArrayShufflePosition.of(decodeBase64(position)); } public static ByteArrayShufflePosition of(byte[] position) { if (position == null) { return null; } return new ByteArrayShufflePosition(UnsafeByteOperations.unsafeWrap(position)); } public static ByteArrayShufflePosition of(ByteString position) { if (position == null) { return null; } return new ByteArrayShufflePosition(position); } public ByteString getPosition() { return position; } public String encodeBase64() { return encodeBase64URLSafeString(position.toByteArray()); } /** * Returns the {@link ByteArrayShufflePosition} that immediately follows this one, i.e. there are * no possible {@link ByteArrayShufflePosition ByteArrayShufflePositions} between this and its * successor. */ public ByteArrayShufflePosition immediateSuccessor() { return new ByteArrayShufflePosition(position.concat(ZERO)); } @Override public boolean equals(@Nullable Object o) { if (this == o) { return true; } if (o instanceof ByteArrayShufflePosition) { ByteArrayShufflePosition that = (ByteArrayShufflePosition) o; return this.position.equals(that.position); } return false; } @Override public int hashCode() { return position.hashCode(); } @Override public String toString() { return "ShufflePosition(base64:" + encodeBase64() + ")"; } /** May only compare homogenous ByteArrayShufflePosition types. */ @Override public int compareTo(ShufflePosition o) { if (this == o) { return 0; } ByteArrayShufflePosition other = (ByteArrayShufflePosition) o; return ByteString.unsignedLexicographicalComparator().compare(position, other.position); } }
class ByteArrayShufflePosition implements Comparable<ShufflePosition>, ShufflePosition { private static final ByteString ZERO = ByteString.copyFrom(new byte[] {0}); private final ByteString position; public ByteArrayShufflePosition(ByteString position) { this.position = position; } public static ByteArrayShufflePosition fromBase64(String position) { return ByteArrayShufflePosition.of(decodeBase64(position)); } public static ByteArrayShufflePosition of(byte[] position) { if (position == null) { return null; } return new ByteArrayShufflePosition(UnsafeByteOperations.unsafeWrap(position)); } public static ByteArrayShufflePosition of(ByteString position) { if (position == null) { return null; } return new ByteArrayShufflePosition(position); } public ByteString getPosition() { return position; } public String encodeBase64() { return encodeBase64URLSafeString(position.toByteArray()); } /** * Returns the {@link ByteArrayShufflePosition} that immediately follows this one, i.e. there are * no possible {@link ByteArrayShufflePosition ByteArrayShufflePositions} between this and its * successor. */ public ByteArrayShufflePosition immediateSuccessor() { return new ByteArrayShufflePosition(position.concat(ZERO)); } @Override public boolean equals(@Nullable Object o) { if (this == o) { return true; } if (o instanceof ByteArrayShufflePosition) { ByteArrayShufflePosition that = (ByteArrayShufflePosition) o; return this.position.equals(that.position); } return false; } @Override public int hashCode() { return position.hashCode(); } @Override public String toString() { return "ShufflePosition(base64:" + encodeBase64() + ")"; } /** May only compare homogenous ByteArrayShufflePosition types. */ @Override public int compareTo(ShufflePosition o) { if (this == o) { return 0; } ByteArrayShufflePosition other = (ByteArrayShufflePosition) o; return ByteString.unsignedLexicographicalComparator().compare(position, other.position); } }
Log the exception as the last parameter in this.
public String serializeRaw(Object object) { final ClientLogger logger = new ClientLogger(JacksonAdapter.class); if (object == null) { return null; } try { return serialize(object, SerializerEncoding.JSON).replaceAll("^\"*", "").replaceAll("\"*$", ""); } catch (IOException ex) { logger.warning("Failed to serialize {} to JSON.", object.getClass()); return null; } }
logger.warning("Failed to serialize {} to JSON.", object.getClass());
public String serializeRaw(Object object) { if (object == null) { return null; } try { return serialize(object, SerializerEncoding.JSON).replaceAll("^\"*", "").replaceAll("\"*$", ""); } catch (IOException ex) { logger.warning("Failed to serialize {} to JSON.", object.getClass(), ex); return null; } }
class JacksonAdapter implements SerializerAdapter { private final ClientLogger logger = new ClientLogger(JacksonAdapter.class); /** * An instance of {@link ObjectMapper} to serialize/deserialize objects. */ private final ObjectMapper mapper; /** * An instance of {@link ObjectMapper} that does not do flattening. */ private final ObjectMapper simpleMapper; private final XmlMapper xmlMapper; /* * The lazily-created serializer for this ServiceClient. */ private static SerializerAdapter serializerAdapter; /* * BOM header from some response bodies. To be removed in deserialization. */ private static final String BOM = "\uFEFF"; /** * Creates a new JacksonAdapter instance with default mapper settings. */ public JacksonAdapter() { simpleMapper = initializeObjectMapper(new ObjectMapper()); xmlMapper = initializeObjectMapper(new XmlMapper()); xmlMapper.configure(ToXmlGenerator.Feature.WRITE_XML_DECLARATION, true); xmlMapper.setDefaultUseWrapper(false); ObjectMapper flatteningMapper = initializeObjectMapper(new ObjectMapper()) .registerModule(FlatteningSerializer.getModule(simpleMapper())) .registerModule(FlatteningDeserializer.getModule(simpleMapper())); mapper = initializeObjectMapper(new ObjectMapper()) .registerModule(AdditionalPropertiesSerializer.getModule(flatteningMapper)) .registerModule(AdditionalPropertiesDeserializer.getModule(flatteningMapper)) .registerModule(FlatteningSerializer.getModule(simpleMapper())) .registerModule(FlatteningDeserializer.getModule(simpleMapper())); } /** * Gets a static instance of {@link ObjectMapper} that doesn't handle flattening. * * @return an instance of {@link ObjectMapper}. */ protected ObjectMapper simpleMapper() { return simpleMapper; } /** * maintain singleton instance of the default serializer adapter. * * @return the default serializer */ public static synchronized SerializerAdapter createDefaultSerializerAdapter() { if (serializerAdapter == null) { serializerAdapter = new JacksonAdapter(); } return serializerAdapter; } /** * @return the original serializer type */ public ObjectMapper serializer() { return mapper; } @Override public String serialize(Object object, SerializerEncoding encoding) throws IOException { if (object == null) { return null; } StringWriter writer = new StringWriter(); if (encoding == SerializerEncoding.XML) { xmlMapper.writeValue(writer, object); } else { serializer().writeValue(writer, object); } return writer.toString(); } @Override @Override public String serializeList(List<?> list, CollectionFormat format) { if (list == null) { return null; } List<String> serialized = new ArrayList<>(); for (Object element : list) { String raw = serializeRaw(element); serialized.add(raw != null ? raw : ""); } return String.join(format.getDelimiter(), serialized); } @Override @SuppressWarnings("unchecked") public <T> T deserialize(String value, final Type type, SerializerEncoding encoding) throws IOException { if (value == null || value.isEmpty() || value.equals(BOM)) { return null; } if (value.startsWith(BOM)) { value = value.replaceFirst(BOM, ""); } final JavaType javaType = createJavaType(type); try { if (encoding == SerializerEncoding.XML) { return (T) xmlMapper.readValue(value, javaType); } else { return (T) serializer().readValue(value, javaType); } } catch (JsonParseException jpe) { throw logger.logExceptionAsError(new MalformedValueException(jpe.getMessage(), jpe)); } } /** * Initializes an instance of JacksonMapperAdapter with default configurations * applied to the object mapper. * * @param mapper the object mapper to use. */ private static <T extends ObjectMapper> T initializeObjectMapper(T mapper) { mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false) .configure(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS, true) .configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false) .configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, true) .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) .configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true) .setSerializationInclusion(JsonInclude.Include.NON_NULL) .registerModule(new JavaTimeModule()) .registerModule(ByteArraySerializer.getModule()) .registerModule(Base64UrlSerializer.getModule()) .registerModule(DateTimeSerializer.getModule()) .registerModule(DateTimeRfc1123Serializer.getModule()) .registerModule(DurationSerializer.getModule()) .registerModule(HttpHeadersSerializer.getModule()); mapper.setVisibility(mapper.getSerializationConfig().getDefaultVisibilityChecker() .withFieldVisibility(JsonAutoDetect.Visibility.ANY) .withSetterVisibility(JsonAutoDetect.Visibility.NONE) .withGetterVisibility(JsonAutoDetect.Visibility.NONE) .withIsGetterVisibility(JsonAutoDetect.Visibility.NONE)); return mapper; } private JavaType createJavaType(Type type) { JavaType result; if (type == null) { result = null; } else if (type instanceof JavaType) { result = (JavaType) type; } else if (type instanceof ParameterizedType) { final ParameterizedType parameterizedType = (ParameterizedType) type; final Type[] actualTypeArguments = parameterizedType.getActualTypeArguments(); JavaType[] javaTypeArguments = new JavaType[actualTypeArguments.length]; for (int i = 0; i != actualTypeArguments.length; i++) { javaTypeArguments[i] = createJavaType(actualTypeArguments[i]); } result = mapper .getTypeFactory().constructParametricType((Class<?>) parameterizedType.getRawType(), javaTypeArguments); } else { result = mapper .getTypeFactory().constructType(type); } return result; } }
class JacksonAdapter implements SerializerAdapter { private final ClientLogger logger = new ClientLogger(JacksonAdapter.class); /** * An instance of {@link ObjectMapper} to serialize/deserialize objects. */ private final ObjectMapper mapper; /** * An instance of {@link ObjectMapper} that does not do flattening. */ private final ObjectMapper simpleMapper; private final XmlMapper xmlMapper; private final ObjectMapper headerMapper; /* * The lazily-created serializer for this ServiceClient. */ private static SerializerAdapter serializerAdapter; /* * BOM header from some response bodies. To be removed in deserialization. */ private static final String BOM = "\uFEFF"; /** * Creates a new JacksonAdapter instance with default mapper settings. */ public JacksonAdapter() { simpleMapper = initializeObjectMapper(new ObjectMapper()); xmlMapper = initializeObjectMapper(new XmlMapper()); xmlMapper.configure(ToXmlGenerator.Feature.WRITE_XML_DECLARATION, true); xmlMapper.setDefaultUseWrapper(false); ObjectMapper flatteningMapper = initializeObjectMapper(new ObjectMapper()) .registerModule(FlatteningSerializer.getModule(simpleMapper())) .registerModule(FlatteningDeserializer.getModule(simpleMapper())); mapper = initializeObjectMapper(new ObjectMapper()) .registerModule(AdditionalPropertiesSerializer.getModule(flatteningMapper)) .registerModule(AdditionalPropertiesDeserializer.getModule(flatteningMapper)) .registerModule(FlatteningSerializer.getModule(simpleMapper())) .registerModule(FlatteningDeserializer.getModule(simpleMapper())); headerMapper = simpleMapper .copy() .configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true); } /** * Gets a static instance of {@link ObjectMapper} that doesn't handle flattening. * * @return an instance of {@link ObjectMapper}. */ protected ObjectMapper simpleMapper() { return simpleMapper; } /** * maintain singleton instance of the default serializer adapter. * * @return the default serializer */ public static synchronized SerializerAdapter createDefaultSerializerAdapter() { if (serializerAdapter == null) { serializerAdapter = new JacksonAdapter(); } return serializerAdapter; } /** * @return the original serializer type */ public ObjectMapper serializer() { return mapper; } @Override public String serialize(Object object, SerializerEncoding encoding) throws IOException { if (object == null) { return null; } StringWriter writer = new StringWriter(); if (encoding == SerializerEncoding.XML) { xmlMapper.writeValue(writer, object); } else { serializer().writeValue(writer, object); } return writer.toString(); } @Override @Override public String serializeList(List<?> list, CollectionFormat format) { if (list == null) { return null; } List<String> serialized = new ArrayList<>(); for (Object element : list) { String raw = serializeRaw(element); serialized.add(raw != null ? raw : ""); } return String.join(format.getDelimiter(), serialized); } @Override @SuppressWarnings("unchecked") public <T> T deserialize(String value, final Type type, SerializerEncoding encoding) throws IOException { if (value == null || value.isEmpty() || value.equals(BOM)) { return null; } if (value.startsWith(BOM)) { value = value.replaceFirst(BOM, ""); } final JavaType javaType = createJavaType(type); try { if (encoding == SerializerEncoding.XML) { return (T) xmlMapper.readValue(value, javaType); } else { return (T) serializer().readValue(value, javaType); } } catch (JsonParseException jpe) { throw logger.logExceptionAsError(new MalformedValueException(jpe.getMessage(), jpe)); } } @Override public <T> T deserialize(HttpHeaders headers, Type deserializedHeadersType) throws IOException { if (deserializedHeadersType == null) { return null; } final String headersJsonString = headerMapper.writeValueAsString(headers); T deserializedHeaders = headerMapper.readValue(headersJsonString, createJavaType(deserializedHeadersType)); final Class<?> deserializedHeadersClass = TypeUtil.getRawClass(deserializedHeadersType); final Field[] declaredFields = deserializedHeadersClass.getDeclaredFields(); for (final Field declaredField : declaredFields) { if (declaredField.isAnnotationPresent(HeaderCollection.class)) { final Type declaredFieldType = declaredField.getGenericType(); if (TypeUtil.isTypeOrSubTypeOf(declaredField.getType(), Map.class)) { final Type[] mapTypeArguments = TypeUtil.getTypeArguments(declaredFieldType); if (mapTypeArguments.length == 2 && mapTypeArguments[0] == String.class && mapTypeArguments[1] == String.class) { final HeaderCollection headerCollectionAnnotation = declaredField.getAnnotation(HeaderCollection.class); final String headerCollectionPrefix = headerCollectionAnnotation.value().toLowerCase(Locale.ROOT); final int headerCollectionPrefixLength = headerCollectionPrefix.length(); if (headerCollectionPrefixLength > 0) { final Map<String, String> headerCollection = new HashMap<>(); for (final HttpHeader header : headers) { final String headerName = header.getName(); if (headerName.toLowerCase(Locale.ROOT).startsWith(headerCollectionPrefix)) { headerCollection.put(headerName.substring(headerCollectionPrefixLength), header.getValue()); } } final boolean declaredFieldAccessibleBackup = declaredField.isAccessible(); try { if (!declaredFieldAccessibleBackup) { AccessController.doPrivileged((PrivilegedAction<Object>) () -> { declaredField.setAccessible(true); return null; }); } declaredField.set(deserializedHeaders, headerCollection); } catch (IllegalAccessException ignored) { } finally { if (!declaredFieldAccessibleBackup) { AccessController.doPrivileged((PrivilegedAction<Object>) () -> { declaredField.setAccessible(declaredFieldAccessibleBackup); return null; }); } } } } } } } return deserializedHeaders; } /** * Initializes an instance of JacksonMapperAdapter with default configurations * applied to the object mapper. * * @param mapper the object mapper to use. */ private static <T extends ObjectMapper> T initializeObjectMapper(T mapper) { mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false) .configure(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS, true) .configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false) .configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, true) .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) .configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true) .setSerializationInclusion(JsonInclude.Include.NON_NULL) .registerModule(new JavaTimeModule()) .registerModule(ByteArraySerializer.getModule()) .registerModule(Base64UrlSerializer.getModule()) .registerModule(DateTimeSerializer.getModule()) .registerModule(DateTimeRfc1123Serializer.getModule()) .registerModule(DurationSerializer.getModule()) .registerModule(HttpHeadersSerializer.getModule()); mapper.setVisibility(mapper.getSerializationConfig().getDefaultVisibilityChecker() .withFieldVisibility(JsonAutoDetect.Visibility.ANY) .withSetterVisibility(JsonAutoDetect.Visibility.NONE) .withGetterVisibility(JsonAutoDetect.Visibility.NONE) .withIsGetterVisibility(JsonAutoDetect.Visibility.NONE)); return mapper; } private JavaType createJavaType(Type type) { JavaType result; if (type == null) { result = null; } else if (type instanceof JavaType) { result = (JavaType) type; } else if (type instanceof ParameterizedType) { final ParameterizedType parameterizedType = (ParameterizedType) type; final Type[] actualTypeArguments = parameterizedType.getActualTypeArguments(); JavaType[] javaTypeArguments = new JavaType[actualTypeArguments.length]; for (int i = 0; i != actualTypeArguments.length; i++) { javaTypeArguments[i] = createJavaType(actualTypeArguments[i]); } result = mapper .getTypeFactory().constructParametricType((Class<?>) parameterizedType.getRawType(), javaTypeArguments); } else { result = mapper .getTypeFactory().constructType(type); } return result; } }
I modified this logic to work regardless of the position of the delimiter.
public void handle(Buffer buffer) { try { byte[] bytes = buffer.getBytes(); MediaType mediaType = response.getMediaType(); if (isNewlineDelimited) { String charset = mediaType.getParameters().get(MediaType.CHARSET_PARAMETER); charset = charset == null ? "UTF-8" : charset; byte[] separator = "\n".getBytes(charset); int start = 0; while (start < bytes.length) { int end = bytes.length; for (int i = start; i < end; i++) { if (bytes[i] == separator[0]) { int j; boolean matches = true; for (j = 1; j < separator.length; j++) { if (bytes[i + j] != separator[j]) { matches = false; break; } } if (matches) { end = i; break; } } } if (start < end) { ByteArrayInputStream in = new ByteArrayInputStream(bytes, start, end); R item = restClientRequestContext.readEntity(in, responseType, mediaType, response.getMetadata()); multiRequest.emitter.emit(item); } start = end + separator.length; } } else { ByteArrayInputStream in = new ByteArrayInputStream(bytes); R item = restClientRequestContext.readEntity(in, responseType, mediaType, response.getMetadata()); multiRequest.emitter.emit(item); } } catch (Throwable t) { multiRequest.emitter.fail(t); } }
String charset = mediaType.getParameters().get(MediaType.CHARSET_PARAMETER);
public void handle(Buffer buffer) { try { byte[] bytes = buffer.getBytes(); MediaType mediaType = response.getMediaType(); if (isNewlineDelimited) { String charset = mediaType.getParameters().get(MediaType.CHARSET_PARAMETER); charset = charset == null ? "UTF-8" : charset; byte[] separator = "\n".getBytes(charset); int start = 0; while (start < bytes.length) { int end = bytes.length; for (int i = start; i < end; i++) { if (bytes[i] == separator[0]) { int j; boolean matches = true; for (j = 1; j < separator.length; j++) { if (bytes[i + j] != separator[j]) { matches = false; break; } } if (matches) { end = i; break; } } } if (start < end) { ByteArrayInputStream in = new ByteArrayInputStream(bytes, start, end); R item = restClientRequestContext.readEntity(in, responseType, mediaType, response.getMetadata()); multiRequest.emitter.emit(item); } start = end + separator.length; } } else { ByteArrayInputStream in = new ByteArrayInputStream(bytes); R item = restClientRequestContext.readEntity(in, responseType, mediaType, response.getMetadata()); multiRequest.emitter.emit(item); } } catch (Throwable t) { multiRequest.emitter.fail(t); } }
class MultiRequest<R> { private final AtomicReference<Runnable> onCancel = new AtomicReference<>(); private final MultiEmitter<? super R> emitter; private static final Runnable CLEARED = () -> { }; public MultiRequest(MultiEmitter<? super R> emitter) { this.emitter = emitter; emitter.onTermination(() -> { if (emitter.isCancelled()) { this.cancel(); } }); } void emit(R item) { if (!isCancelled()) { emitter.emit(item); } } void fail(Throwable t) { if (!isCancelled()) { emitter.fail(t); cancel(); } } void complete() { if (!isCancelled()) { emitter.complete(); cancel(); } } public boolean isCancelled() { return onCancel.get() == CLEARED; } private void cancel() { Runnable action = onCancel.getAndSet(CLEARED); if (action != null && action != CLEARED) { action.run(); } } public void onCancel(Runnable onCancel) { if (this.onCancel.compareAndSet(null, onCancel)) { } else if (this.onCancel.get() == CLEARED) { if (onCancel != null) onCancel.run(); } else { throw new IllegalArgumentException("onCancel was already called"); } } }
class MultiRequest<R> { private final AtomicReference<Runnable> onCancel = new AtomicReference<>(); private final MultiEmitter<? super R> emitter; private static final Runnable CLEARED = () -> { }; public MultiRequest(MultiEmitter<? super R> emitter) { this.emitter = emitter; emitter.onTermination(() -> { if (emitter.isCancelled()) { this.cancel(); } }); } void emit(R item) { if (!isCancelled()) { emitter.emit(item); } } void fail(Throwable t) { if (!isCancelled()) { emitter.fail(t); cancel(); } } void complete() { if (!isCancelled()) { emitter.complete(); cancel(); } } public boolean isCancelled() { return onCancel.get() == CLEARED; } private void cancel() { Runnable action = onCancel.getAndSet(CLEARED); if (action != null && action != CLEARED) { action.run(); } } public void onCancel(Runnable onCancel) { if (this.onCancel.compareAndSet(null, onCancel)) { } else if (this.onCancel.get() == CLEARED) { if (onCancel != null) onCancel.run(); } else { throw new IllegalArgumentException("onCancel was already called"); } } }
This is an improvement. I'm a bit sceptical of the method API: Usually, some condition has led to the caller wanting to set this node to dirty. I would guess that whole condition needs to be reevaluated under the unallocated lock? More than just the presence of the node may have changed between testing the condition and coming to this point where the lock is held.
private List<Node> performOn(NodeFilter filter, BiFunction<Node, Mutex, Node> action) { List<Node> unallocatedNodes = new ArrayList<>(); ListMap<ApplicationId, Node> allocatedNodes = new ListMap<>(); for (Node node : db.readNodes()) { if ( ! filter.matches(node)) continue; if (node.allocation().isPresent()) allocatedNodes.put(node.allocation().get().owner(), node); else unallocatedNodes.add(node); } List<Node> resultingNodes = new ArrayList<>(); try (Mutex lock = lockUnallocated()) { for (Node node : unallocatedNodes) { Optional<Node> currentNode = db.readNode(node.hostname()); if (currentNode.isEmpty()) continue; resultingNodes.add(action.apply(currentNode.get(), lock)); } } for (Map.Entry<ApplicationId, List<Node>> applicationNodes : allocatedNodes.entrySet()) { try (Mutex lock = lock(applicationNodes.getKey())) { for (Node node : applicationNodes.getValue()) { Optional<Node> currentNode = db.readNode(node.hostname()); if (currentNode.isEmpty()) continue; resultingNodes.add(action.apply(currentNode.get(), lock)); } } } return resultingNodes; }
Optional<Node> currentNode = db.readNode(node.hostname());
private List<Node> performOn(NodeFilter filter, BiFunction<Node, Mutex, Node> action) { List<Node> unallocatedNodes = new ArrayList<>(); ListMap<ApplicationId, Node> allocatedNodes = new ListMap<>(); for (Node node : db.readNodes()) { if ( ! filter.matches(node)) continue; if (node.allocation().isPresent()) allocatedNodes.put(node.allocation().get().owner(), node); else unallocatedNodes.add(node); } List<Node> resultingNodes = new ArrayList<>(); try (Mutex lock = lockUnallocated()) { for (Node node : unallocatedNodes) { Optional<Node> currentNode = db.readNode(node.hostname()); if (currentNode.isEmpty()) continue; resultingNodes.add(action.apply(currentNode.get(), lock)); } } for (Map.Entry<ApplicationId, List<Node>> applicationNodes : allocatedNodes.entrySet()) { try (Mutex lock = lock(applicationNodes.getKey())) { for (Node node : applicationNodes.getValue()) { Optional<Node> currentNode = db.readNode(node.hostname()); if (currentNode.isEmpty()) continue; resultingNodes.add(action.apply(currentNode.get(), lock)); } } } return resultingNodes; }
class NodeRepository extends AbstractComponent { private static final Logger log = Logger.getLogger(NodeRepository.class.getName()); private final CuratorDatabaseClient db; private final Clock clock; private final Zone zone; private final NodeFlavors flavors; private final HostResourcesCalculator resourcesCalculator; private final NameResolver nameResolver; private final OsVersions osVersions; private final InfrastructureVersions infrastructureVersions; private final FirmwareChecks firmwareChecks; private final DockerImages dockerImages; private final JobControl jobControl; private final Applications applications; private final boolean canProvisionHosts; private final int spareCount; /** * Creates a node repository from a zookeeper provider. * This will use the system time to make time-sensitive decisions */ @Inject public NodeRepository(NodeRepositoryConfig config, NodeFlavors flavors, ProvisionServiceProvider provisionServiceProvider, Curator curator, Zone zone, FlagSource flagSource) { this(flavors, provisionServiceProvider.getHostResourcesCalculator(), curator, Clock.systemUTC(), zone, new DnsNameResolver(), DockerImage.fromString(config.dockerImage()), flagSource, config.useCuratorClientCache(), provisionServiceProvider.getHostProvisioner().isPresent(), zone.environment().isProduction() && provisionServiceProvider.getHostProvisioner().isEmpty() ? 1 : 0, config.nodeCacheSize()); } /** * Creates a node repository from a zookeeper provider and a clock instance * which will be used for time-sensitive decisions. */ public NodeRepository(NodeFlavors flavors, HostResourcesCalculator resourcesCalculator, Curator curator, Clock clock, Zone zone, NameResolver nameResolver, DockerImage dockerImage, FlagSource flagSource, boolean useCuratorClientCache, boolean canProvisionHosts, int spareCount, long nodeCacheSize) { this.db = new CuratorDatabaseClient(flavors, curator, clock, zone, useCuratorClientCache, nodeCacheSize); this.zone = zone; this.clock = clock; this.flavors = flavors; this.resourcesCalculator = resourcesCalculator; this.nameResolver = nameResolver; this.osVersions = new OsVersions(this); this.infrastructureVersions = new InfrastructureVersions(db); this.firmwareChecks = new FirmwareChecks(db, clock); this.dockerImages = new DockerImages(db, dockerImage); this.jobControl = new JobControl(new JobControlFlags(db, flagSource)); this.applications = new Applications(db); this.canProvisionHosts = canProvisionHosts; this.spareCount = spareCount; rewriteNodes(); } /** Read and write all nodes to make sure they are stored in the latest version of the serialized format */ private void rewriteNodes() { Instant start = clock.instant(); int nodesWritten = 0; for (State state : State.values()) { List<Node> nodes = db.readNodes(state); db.writeTo(state, nodes, Agent.system, Optional.empty()); nodesWritten += nodes.size(); } Instant end = clock.instant(); log.log(Level.INFO, String.format("Rewrote %d nodes in %s", nodesWritten, Duration.between(start, end))); } /** Returns the curator database client used by this */ public CuratorDatabaseClient database() { return db; } /** Returns the Docker image to use for given node */ public DockerImage dockerImage(Node node) { return dockerImages.dockerImageFor(node.type()); } /** @return The name resolver used to resolve hostname and ip addresses */ public NameResolver nameResolver() { return nameResolver; } /** Returns the OS versions to use for nodes in this */ public OsVersions osVersions() { return osVersions; } /** Returns the infrastructure versions to use for nodes in this */ public InfrastructureVersions infrastructureVersions() { return infrastructureVersions; } /** Returns the status of firmware checks for hosts managed by this. */ public FirmwareChecks firmwareChecks() { return firmwareChecks; } /** Returns the docker images to use for nodes in this. */ public DockerImages dockerImages() { return dockerImages; } /** Returns the status of maintenance jobs managed by this. */ public JobControl jobControl() { return jobControl; } /** Returns this node repo's view of the applications deployed to it */ public Applications applications() { return applications; } public NodeFlavors flavors() { return flavors; } public HostResourcesCalculator resourcesCalculator() { return resourcesCalculator; } /** The number of nodes we should ensure has free capacity for node failures whenever possible */ public int spareCount() { return spareCount; } /** * Finds and returns the node with the hostname in any of the given states, or empty if not found * * @param hostname the full host name of the node * @param inState the states the node may be in. If no states are given, it will be returned from any state * @return the node, or empty if it was not found in any of the given states */ public Optional<Node> getNode(String hostname, State ... inState) { return db.readNode(hostname, inState); } /** * Returns all nodes in any of the given states. * * @param inState the states to return nodes from. If no states are given, all nodes of the given type are returned * @return the node, or empty if it was not found in any of the given states */ public List<Node> getNodes(State ... inState) { return new ArrayList<>(db.readNodes(inState)); } /** * Finds and returns the nodes of the given type in any of the given states. * * @param type the node type to return * @param inState the states to return nodes from. If no states are given, all nodes of the given type are returned * @return the node, or empty if it was not found in any of the given states */ public List<Node> getNodes(NodeType type, State ... inState) { return db.readNodes(inState).stream().filter(node -> node.type().equals(type)).collect(Collectors.toList()); } /** Returns a filterable list of nodes in this repository in any of the given states */ public NodeList list(State ... inState) { return NodeList.copyOf(getNodes(inState)); } /** Returns a filterable list of all nodes of an application */ public NodeList list(ApplicationId application) { return NodeList.copyOf(getNodes(application)); } /** Returns a locked list of all nodes in this repository */ public LockedNodeList list(Mutex lock) { return new LockedNodeList(getNodes(), lock); } /** Returns a filterable list of all load balancers in this repository */ public LoadBalancerList loadBalancers() { return loadBalancers((ignored) -> true); } /** Returns a filterable list of load balancers belonging to given application */ public LoadBalancerList loadBalancers(ApplicationId application) { return loadBalancers((id) -> id.application().equals(application)); } private LoadBalancerList loadBalancers(Predicate<LoadBalancerId> predicate) { return LoadBalancerList.copyOf(db.readLoadBalancers(predicate).values()); } public List<Node> getNodes(ApplicationId id, State ... inState) { return db.readNodes(id, inState); } public List<Node> getInactive() { return db.readNodes(State.inactive); } public List<Node> getFailed() { return db.readNodes(State.failed); } /** * Returns the ACL for the node (trusted nodes, networks and ports) */ private NodeAcl getNodeAcl(Node node, NodeList candidates) { Set<Node> trustedNodes = new TreeSet<>(Comparator.comparing(Node::hostname)); Set<Integer> trustedPorts = new LinkedHashSet<>(); Set<String> trustedNetworks = new LinkedHashSet<>(); trustedPorts.add(22); candidates.parentOf(node).ifPresent(trustedNodes::add); node.allocation().ifPresent(allocation -> { trustedNodes.addAll(candidates.owner(allocation.owner()).asList()); loadBalancers(allocation.owner()).asList().stream() .map(LoadBalancer::instance) .map(LoadBalancerInstance::networks) .forEach(trustedNetworks::addAll); }); switch (node.type()) { case tenant: trustedNodes.addAll(candidates.nodeType(NodeType.config).asList()); trustedNodes.addAll(candidates.nodeType(NodeType.proxy).asList()); node.allocation().ifPresent(allocation -> trustedNodes.addAll(candidates.parentsOf(candidates.owner(allocation.owner())).asList())); if (node.state() == State.ready) { trustedNodes.addAll(candidates.nodeType(NodeType.tenant).asList()); } break; case config: trustedNodes.addAll(candidates.asList()); trustedPorts.add(4443); break; case proxy: trustedNodes.addAll(candidates.nodeType(NodeType.config).asList()); trustedPorts.add(443); trustedPorts.add(4080); trustedPorts.add(4443); break; case controller: trustedPorts.add(4443); trustedPorts.add(443); trustedPorts.add(80); break; default: illegal("Don't know how to create ACL for " + node + " of type " + node.type()); } return new NodeAcl(node, trustedNodes, trustedNetworks, trustedPorts); } /** * Creates a list of node ACLs which identify which nodes the given node should trust * * @param node Node for which to generate ACLs * @param children Return ACLs for the children of the given node (e.g. containers on a Docker host) * @return List of node ACLs */ public List<NodeAcl> getNodeAcls(Node node, boolean children) { NodeList candidates = list(); if (children) { return candidates.childrenOf(node).asList().stream() .map(childNode -> getNodeAcl(childNode, candidates)) .collect(Collectors.collectingAndThen(Collectors.toList(), Collections::unmodifiableList)); } return Collections.singletonList(getNodeAcl(node, candidates)); } /** Creates a new node object, without adding it to the node repo. If no IP address is given, it will be resolved */ public Node createNode(String openStackId, String hostname, IP.Config ipConfig, Optional<String> parentHostname, Flavor flavor, Optional<TenantName> reservedTo, NodeType type) { if (ipConfig.primary().isEmpty()) ipConfig = ipConfig.with(nameResolver.getAllByNameOrThrow(hostname)); return Node.create(openStackId, ipConfig, hostname, parentHostname, Optional.empty(), flavor, reservedTo, type, Optional.empty()); } public Node createNode(String openStackId, String hostname, Optional<String> parentHostname, Flavor flavor, NodeType type) { return createNode(openStackId, hostname, IP.Config.EMPTY, parentHostname, flavor, Optional.empty(), type); } /** Adds a list of newly created docker container nodes to the node repository as <i>reserved</i> nodes */ public List<Node> addDockerNodes(LockedNodeList nodes) { for (Node node : nodes) { if ( ! node.flavor().getType().equals(Flavor.Type.DOCKER_CONTAINER)) illegal("Cannot add " + node + ": This is not a docker node"); if ( ! node.allocation().isPresent()) illegal("Cannot add " + node + ": Docker containers needs to be allocated"); Optional<Node> existing = getNode(node.hostname()); if (existing.isPresent()) illegal("Cannot add " + node + ": A node with this name already exists (" + existing.get() + ", " + existing.get().history() + "). Node to be added: " + node + ", " + node.history()); } return db.addNodesInState(nodes.asList(), State.reserved, Agent.system); } /** * Adds a list of (newly created) nodes to the node repository as <i>provisioned</i> nodes. * If any of the nodes already exists in the deprovisioned state, the new node will be merged * with the history of that node. */ public List<Node> addNodes(List<Node> nodes, Agent agent) { try (Mutex lock = lockUnallocated()) { List<Node> nodesToAdd = new ArrayList<>(); List<Node> nodesToRemove = new ArrayList<>(); for (int i = 0; i < nodes.size(); i++) { var node = nodes.get(i); for (int j = 0; j < i; j++) { if (node.equals(nodes.get(j))) illegal("Cannot add nodes: " + node + " is duplicated in the argument list"); } Optional<Node> existing = getNode(node.hostname()); if (existing.isPresent()) { if (existing.get().state() != State.deprovisioned) illegal("Cannot add " + node + ": A node with this name already exists"); node = node.with(existing.get().history()); node = node.with(existing.get().reports()); node = node.with(node.status().withFailCount(existing.get().status().failCount())); if (existing.get().status().firmwareVerifiedAt().isPresent()) node = node.with(node.status().withFirmwareVerifiedAt(existing.get().status().firmwareVerifiedAt().get())); nodesToRemove.add(existing.get()); } nodesToAdd.add(node); } List<Node> resultingNodes = db.addNodesInState(IP.Config.verify(nodesToAdd, list(lock)), State.provisioned, agent); db.removeNodes(nodesToRemove); return resultingNodes; } } /** Sets a list of nodes ready and returns the nodes in the ready state */ public List<Node> setReady(List<Node> nodes, Agent agent, String reason) { try (Mutex lock = lockUnallocated()) { List<Node> nodesWithResetFields = nodes.stream() .map(node -> { if (node.state() != State.provisioned && node.state() != State.dirty) illegal("Can not set " + node + " ready. It is not provisioned or dirty."); return node.withWantToRetire(false, false, Agent.system, clock.instant()); }) .collect(Collectors.toList()); return db.writeTo(State.ready, nodesWithResetFields, agent, Optional.of(reason)); } } public Node setReady(String hostname, Agent agent, String reason) { Node nodeToReady = getNode(hostname).orElseThrow(() -> new NoSuchNodeException("Could not move " + hostname + " to ready: Node not found")); if (nodeToReady.state() == State.ready) return nodeToReady; return setReady(Collections.singletonList(nodeToReady), agent, reason).get(0); } /** Reserve nodes. This method does <b>not</b> lock the node repository */ public List<Node> reserve(List<Node> nodes) { return db.writeTo(State.reserved, nodes, Agent.application, Optional.empty()); } /** Activate nodes. This method does <b>not</b> lock the node repository */ public List<Node> activate(List<Node> nodes, NestedTransaction transaction) { return db.writeTo(State.active, nodes, Agent.application, Optional.empty(), transaction); } /** * Sets a list of nodes to have their allocation removable (active to inactive) in the node repository. * * @param application the application the nodes belong to * @param nodes the nodes to make removable. These nodes MUST be in the active state. */ public void setRemovable(ApplicationId application, List<Node> nodes) { try (Mutex lock = lock(application)) { List<Node> removableNodes = nodes.stream().map(node -> node.with(node.allocation().get().removable(true))) .collect(Collectors.toList()); write(removableNodes, lock); } } /** Deactivate nodes owned by application guarded by given lock */ public void deactivate(NestedTransaction transaction, ProvisionLock lock) { deactivate(db.readNodes(lock.application(), State.reserved, State.active), transaction, lock); applications.remove(lock.application(), transaction, lock); } /** * Deactivates these nodes in a transaction and returns the nodes in the new state which will hold if the * transaction commits. */ public List<Node> deactivate(List<Node> nodes, NestedTransaction transaction, @SuppressWarnings("unused") ProvisionLock lock) { return db.writeTo(State.inactive, nodes, Agent.application, Optional.empty(), transaction); } /** Move nodes to the dirty state */ public List<Node> setDirty(List<Node> nodes, Agent agent, String reason) { return performOn(NodeListFilter.from(nodes), (node, lock) -> setDirty(node, agent, reason)); } /** * Set a node dirty, allowed if it is in the provisioned, inactive, failed or parked state. * Use this to clean newly provisioned nodes or to recycle failed nodes which have been repaired or put on hold. * * @throws IllegalArgumentException if the node has hardware failure */ public Node setDirty(Node node, Agent agent, String reason) { return db.writeTo(State.dirty, node, agent, Optional.of(reason)); } public List<Node> dirtyRecursively(String hostname, Agent agent, String reason) { Node nodeToDirty = getNode(hostname).orElseThrow(() -> new IllegalArgumentException("Could not deallocate " + hostname + ": Node not found")); List<Node> nodesToDirty = (nodeToDirty.type().isHost() ? Stream.concat(list().childrenOf(hostname).asList().stream(), Stream.of(nodeToDirty)) : Stream.of(nodeToDirty)) .filter(node -> node.state() != State.dirty) .collect(Collectors.toList()); List<String> hostnamesNotAllowedToDirty = nodesToDirty.stream() .filter(node -> node.state() != State.provisioned) .filter(node -> node.state() != State.failed) .filter(node -> node.state() != State.parked) .map(Node::hostname) .collect(Collectors.toList()); if ( ! hostnamesNotAllowedToDirty.isEmpty()) illegal("Could not deallocate " + nodeToDirty + ": " + hostnamesNotAllowedToDirty + " are not in states [provisioned, failed, parked]"); return nodesToDirty.stream().map(node -> setDirty(node, agent, reason)).collect(Collectors.toList()); } /** * Fails this node and returns it in its new state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node fail(String hostname, Agent agent, String reason) { return move(hostname, true, State.failed, agent, Optional.of(reason)); } /** * Fails all the nodes that are children of hostname before finally failing the hostname itself. * * @return List of all the failed nodes in their new state */ public List<Node> failRecursively(String hostname, Agent agent, String reason) { return moveRecursively(hostname, State.failed, agent, Optional.of(reason)); } /** * Parks this node and returns it in its new state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node park(String hostname, boolean keepAllocation, Agent agent, String reason) { return move(hostname, keepAllocation, State.parked, agent, Optional.of(reason)); } /** * Parks all the nodes that are children of hostname before finally parking the hostname itself. * * @return List of all the parked nodes in their new state */ public List<Node> parkRecursively(String hostname, Agent agent, String reason) { return moveRecursively(hostname, State.parked, agent, Optional.of(reason)); } /** * Moves a previously failed or parked node back to the active state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node reactivate(String hostname, Agent agent, String reason) { return move(hostname, true, State.active, agent, Optional.of(reason)); } private List<Node> moveRecursively(String hostname, State toState, Agent agent, Optional<String> reason) { List<Node> moved = list().childrenOf(hostname).asList().stream() .map(child -> move(child, toState, agent, reason)) .collect(Collectors.toList()); moved.add(move(hostname, true, toState, agent, reason)); return moved; } private Node move(String hostname, boolean keepAllocation, State toState, Agent agent, Optional<String> reason) { Node node = getNode(hostname).orElseThrow(() -> new NoSuchNodeException("Could not move " + hostname + " to " + toState + ": Node not found")); if (!keepAllocation && node.allocation().isPresent()) { node = node.withoutAllocation(); } return move(node, toState, agent, reason); } private Node move(Node node, State toState, Agent agent, Optional<String> reason) { if (toState == Node.State.active && node.allocation().isEmpty()) illegal("Could not set " + node + " active. It has no allocation."); try (Mutex lock = lock(node)) { if (toState == State.active) { for (Node currentActive : getNodes(node.allocation().get().owner(), State.active)) { if (node.allocation().get().membership().cluster().equals(currentActive.allocation().get().membership().cluster()) && node.allocation().get().membership().index() == currentActive.allocation().get().membership().index()) illegal("Could not set " + node + " active: Same cluster and index as " + currentActive); } } return db.writeTo(toState, node, agent, reason); } } /* * This method is used by the REST API to handle readying nodes for new allocations. For tenant docker * containers this will remove the node from node repository, otherwise the node will be moved to state ready. */ public Node markNodeAvailableForNewAllocation(String hostname, Agent agent, String reason) { Node node = getNode(hostname).orElseThrow(() -> new NotFoundException("No node with hostname '" + hostname + "'")); if (node.flavor().getType() == Flavor.Type.DOCKER_CONTAINER && node.type() == NodeType.tenant) { if (node.state() != State.dirty) illegal("Cannot make " + node + " available for new allocation as it is not in state [dirty]"); return removeRecursively(node, true).get(0); } if (node.state() == State.ready) return node; Node parentHost = node.parentHostname().flatMap(this::getNode).orElse(node); List<String> failureReasons = NodeFailer.reasonsToFailParentHost(parentHost); if ( ! failureReasons.isEmpty()) illegal(node + " cannot be readied because it has hard failures: " + failureReasons); return setReady(Collections.singletonList(node), agent, reason).get(0); } /** * Removes all the nodes that are children of hostname before finally removing the hostname itself. * * @return a List of all the nodes that have been removed or (for hosts) deprovisioned */ public List<Node> removeRecursively(String hostname) { Node node = getNode(hostname).orElseThrow(() -> new NotFoundException("No node with hostname '" + hostname + "'")); return removeRecursively(node, false); } public List<Node> removeRecursively(Node node, boolean force) { try (Mutex lock = lockUnallocated()) { requireRemovable(node, false, force); if (node.type().isHost()) { List<Node> children = list().childrenOf(node).asList(); children.forEach(child -> requireRemovable(child, true, force)); db.removeNodes(children); List<Node> removed = new ArrayList<>(children); if (zone.getCloud().dynamicProvisioning() || node.type() != NodeType.host) db.removeNodes(List.of(node)); else { node = node.with(IP.Config.EMPTY); move(node, State.deprovisioned, Agent.system, Optional.empty()); } removed.add(node); return removed; } else { List<Node> removed = List.of(node); db.removeNodes(removed); return removed; } } } /** Forgets a deprovisioned node. This removes all traces of the node in the node repository. */ public void forget(Node node) { if (node.state() != State.deprovisioned) throw new IllegalArgumentException(node + " must be deprovisioned before it can be forgotten"); db.removeNodes(List.of(node)); } /** * Throws if the given node cannot be removed. Removal is allowed if: * - Tenant node: node is unallocated * - Host node: iff in state provisioned|failed|parked * - Child node: * If only removing the container node: node in state ready * If also removing the parent node: child is in state provisioned|failed|parked|dirty|ready */ private void requireRemovable(Node node, boolean removingAsChild, boolean force) { if (force) return; if (node.type() == NodeType.tenant && node.allocation().isPresent()) illegal(node + " is currently allocated and cannot be removed"); if (!node.type().isHost() && !removingAsChild) { if (node.state() != State.ready) illegal(node + " can not be removed as it is not in the state " + State.ready); } else if (!node.type().isHost()) { Set<State> legalStates = EnumSet.of(State.provisioned, State.failed, State.parked, State.dirty, State.ready); if ( ! legalStates.contains(node.state())) illegal(node + " can not be removed as it is not in the states " + legalStates); } else { Set<State> legalStates = EnumSet.of(State.provisioned, State.failed, State.parked); if (! legalStates.contains(node.state())) illegal(node + " can not be removed as it is not in the states " + legalStates); } } /** * Increases the restart generation of the active nodes matching the filter. * * @return the nodes in their new state. */ public List<Node> restart(NodeFilter filter) { return performOn(StateFilter.from(State.active, filter), (node, lock) -> write(node.withRestart(node.allocation().get().restartGeneration().withIncreasedWanted()), lock)); } /** * Increases the reboot generation of the nodes matching the filter. * @return the nodes in their new state. */ public List<Node> reboot(NodeFilter filter) { return performOn(filter, (node, lock) -> write(node.withReboot(node.status().reboot().withIncreasedWanted()), lock)); } /** * Set target OS version of all nodes matching given filter. * * @return the nodes in their new state. */ public List<Node> upgradeOs(NodeFilter filter, Optional<Version> version) { return performOn(filter, (node, lock) -> { var newStatus = node.status().withOsVersion(node.status().osVersion().withWanted(version)); return write(node.with(newStatus), lock); }); } /** Retire nodes matching given filter */ public List<Node> retire(NodeFilter filter, Agent agent, Instant instant) { return performOn(filter, (node, lock) -> write(node.withWantToRetire(true, agent, instant), lock)); } /** * Writes this node after it has changed some internal state but NOT changed its state field. * This does NOT lock the node repository implicitly, but callers are expected to already hold the lock. * * @param lock Already acquired lock * @return the written node for convenience */ public Node write(Node node, Mutex lock) { return write(List.of(node), lock).get(0); } /** * Writes these nodes after they have changed some internal state but NOT changed their state field. * This does NOT lock the node repository implicitly, but callers are expected to already hold the lock. * * @param lock already acquired lock * @return the written nodes for convenience */ public List<Node> write(List<Node> nodes, @SuppressWarnings("unused") Mutex lock) { return db.writeTo(nodes, Agent.system, Optional.empty()); } /** * Performs an operation requiring locking on all nodes matching some filter. * * @param filter the filter determining the set of nodes where the operation will be performed * @param action the action to perform * @return the set of nodes on which the action was performed, as they became as a result of the operation */ public boolean canAllocateTenantNodeTo(Node host) { if ( ! host.type().canRun(NodeType.tenant)) return false; if (host.status().wantToRetire()) return false; if (host.allocation().map(alloc -> alloc.membership().retired()).orElse(false)) return false; if ( canProvisionHosts()) return EnumSet.of(State.active, State.ready, State.provisioned).contains(host.state()); else return host.state() == State.active; } /** Returns whether this repository can provision hosts on demand */ public boolean canProvisionHosts() { return canProvisionHosts; } /** Returns the time keeper of this system */ public Clock clock() { return clock; } /** Returns the zone of this system */ public Zone zone() { return zone; } /** Create a lock which provides exclusive rights to making changes to the given application */ public Mutex lock(ApplicationId application) { return db.lock(application); } /** Create a lock with a timeout which provides exclusive rights to making changes to the given application */ public Mutex lock(ApplicationId application, Duration timeout) { return db.lock(application, timeout); } /** Create a lock which provides exclusive rights to modifying unallocated nodes */ public Mutex lockUnallocated() { return db.lockInactive(); } /** Acquires the appropriate lock for this node */ public Mutex lock(Node node) { return node.allocation().isPresent() ? lock(node.allocation().get().owner()) : lockUnallocated(); } private void illegal(String message) { throw new IllegalArgumentException(message); } }
class NodeRepository extends AbstractComponent { private static final Logger log = Logger.getLogger(NodeRepository.class.getName()); private final CuratorDatabaseClient db; private final Clock clock; private final Zone zone; private final NodeFlavors flavors; private final HostResourcesCalculator resourcesCalculator; private final NameResolver nameResolver; private final OsVersions osVersions; private final InfrastructureVersions infrastructureVersions; private final FirmwareChecks firmwareChecks; private final DockerImages dockerImages; private final JobControl jobControl; private final Applications applications; private final boolean canProvisionHosts; private final int spareCount; /** * Creates a node repository from a zookeeper provider. * This will use the system time to make time-sensitive decisions */ @Inject public NodeRepository(NodeRepositoryConfig config, NodeFlavors flavors, ProvisionServiceProvider provisionServiceProvider, Curator curator, Zone zone, FlagSource flagSource) { this(flavors, provisionServiceProvider.getHostResourcesCalculator(), curator, Clock.systemUTC(), zone, new DnsNameResolver(), DockerImage.fromString(config.dockerImage()), flagSource, config.useCuratorClientCache(), provisionServiceProvider.getHostProvisioner().isPresent(), zone.environment().isProduction() && provisionServiceProvider.getHostProvisioner().isEmpty() ? 1 : 0, config.nodeCacheSize()); } /** * Creates a node repository from a zookeeper provider and a clock instance * which will be used for time-sensitive decisions. */ public NodeRepository(NodeFlavors flavors, HostResourcesCalculator resourcesCalculator, Curator curator, Clock clock, Zone zone, NameResolver nameResolver, DockerImage dockerImage, FlagSource flagSource, boolean useCuratorClientCache, boolean canProvisionHosts, int spareCount, long nodeCacheSize) { this.db = new CuratorDatabaseClient(flavors, curator, clock, zone, useCuratorClientCache, nodeCacheSize); this.zone = zone; this.clock = clock; this.flavors = flavors; this.resourcesCalculator = resourcesCalculator; this.nameResolver = nameResolver; this.osVersions = new OsVersions(this); this.infrastructureVersions = new InfrastructureVersions(db); this.firmwareChecks = new FirmwareChecks(db, clock); this.dockerImages = new DockerImages(db, dockerImage); this.jobControl = new JobControl(new JobControlFlags(db, flagSource)); this.applications = new Applications(db); this.canProvisionHosts = canProvisionHosts; this.spareCount = spareCount; rewriteNodes(); } /** Read and write all nodes to make sure they are stored in the latest version of the serialized format */ private void rewriteNodes() { Instant start = clock.instant(); int nodesWritten = 0; for (State state : State.values()) { List<Node> nodes = db.readNodes(state); db.writeTo(state, nodes, Agent.system, Optional.empty()); nodesWritten += nodes.size(); } Instant end = clock.instant(); log.log(Level.INFO, String.format("Rewrote %d nodes in %s", nodesWritten, Duration.between(start, end))); } /** Returns the curator database client used by this */ public CuratorDatabaseClient database() { return db; } /** Returns the Docker image to use for given node */ public DockerImage dockerImage(Node node) { return dockerImages.dockerImageFor(node.type()); } /** @return The name resolver used to resolve hostname and ip addresses */ public NameResolver nameResolver() { return nameResolver; } /** Returns the OS versions to use for nodes in this */ public OsVersions osVersions() { return osVersions; } /** Returns the infrastructure versions to use for nodes in this */ public InfrastructureVersions infrastructureVersions() { return infrastructureVersions; } /** Returns the status of firmware checks for hosts managed by this. */ public FirmwareChecks firmwareChecks() { return firmwareChecks; } /** Returns the docker images to use for nodes in this. */ public DockerImages dockerImages() { return dockerImages; } /** Returns the status of maintenance jobs managed by this. */ public JobControl jobControl() { return jobControl; } /** Returns this node repo's view of the applications deployed to it */ public Applications applications() { return applications; } public NodeFlavors flavors() { return flavors; } public HostResourcesCalculator resourcesCalculator() { return resourcesCalculator; } /** The number of nodes we should ensure has free capacity for node failures whenever possible */ public int spareCount() { return spareCount; } /** * Finds and returns the node with the hostname in any of the given states, or empty if not found * * @param hostname the full host name of the node * @param inState the states the node may be in. If no states are given, it will be returned from any state * @return the node, or empty if it was not found in any of the given states */ public Optional<Node> getNode(String hostname, State ... inState) { return db.readNode(hostname, inState); } /** * Returns all nodes in any of the given states. * * @param inState the states to return nodes from. If no states are given, all nodes of the given type are returned * @return the node, or empty if it was not found in any of the given states */ public List<Node> getNodes(State ... inState) { return new ArrayList<>(db.readNodes(inState)); } /** * Finds and returns the nodes of the given type in any of the given states. * * @param type the node type to return * @param inState the states to return nodes from. If no states are given, all nodes of the given type are returned * @return the node, or empty if it was not found in any of the given states */ public List<Node> getNodes(NodeType type, State ... inState) { return db.readNodes(inState).stream().filter(node -> node.type().equals(type)).collect(Collectors.toList()); } /** Returns a filterable list of nodes in this repository in any of the given states */ public NodeList list(State ... inState) { return NodeList.copyOf(getNodes(inState)); } /** Returns a filterable list of all nodes of an application */ public NodeList list(ApplicationId application) { return NodeList.copyOf(getNodes(application)); } /** Returns a locked list of all nodes in this repository */ public LockedNodeList list(Mutex lock) { return new LockedNodeList(getNodes(), lock); } /** Returns a filterable list of all load balancers in this repository */ public LoadBalancerList loadBalancers() { return loadBalancers((ignored) -> true); } /** Returns a filterable list of load balancers belonging to given application */ public LoadBalancerList loadBalancers(ApplicationId application) { return loadBalancers((id) -> id.application().equals(application)); } private LoadBalancerList loadBalancers(Predicate<LoadBalancerId> predicate) { return LoadBalancerList.copyOf(db.readLoadBalancers(predicate).values()); } public List<Node> getNodes(ApplicationId id, State ... inState) { return db.readNodes(id, inState); } public List<Node> getInactive() { return db.readNodes(State.inactive); } public List<Node> getFailed() { return db.readNodes(State.failed); } /** * Returns the ACL for the node (trusted nodes, networks and ports) */ private NodeAcl getNodeAcl(Node node, NodeList candidates) { Set<Node> trustedNodes = new TreeSet<>(Comparator.comparing(Node::hostname)); Set<Integer> trustedPorts = new LinkedHashSet<>(); Set<String> trustedNetworks = new LinkedHashSet<>(); trustedPorts.add(22); candidates.parentOf(node).ifPresent(trustedNodes::add); node.allocation().ifPresent(allocation -> { trustedNodes.addAll(candidates.owner(allocation.owner()).asList()); loadBalancers(allocation.owner()).asList().stream() .map(LoadBalancer::instance) .map(LoadBalancerInstance::networks) .forEach(trustedNetworks::addAll); }); switch (node.type()) { case tenant: trustedNodes.addAll(candidates.nodeType(NodeType.config).asList()); trustedNodes.addAll(candidates.nodeType(NodeType.proxy).asList()); node.allocation().ifPresent(allocation -> trustedNodes.addAll(candidates.parentsOf(candidates.owner(allocation.owner())).asList())); if (node.state() == State.ready) { trustedNodes.addAll(candidates.nodeType(NodeType.tenant).asList()); } break; case config: trustedNodes.addAll(candidates.asList()); trustedPorts.add(4443); break; case proxy: trustedNodes.addAll(candidates.nodeType(NodeType.config).asList()); trustedPorts.add(443); trustedPorts.add(4080); trustedPorts.add(4443); break; case controller: trustedPorts.add(4443); trustedPorts.add(443); trustedPorts.add(80); break; default: illegal("Don't know how to create ACL for " + node + " of type " + node.type()); } return new NodeAcl(node, trustedNodes, trustedNetworks, trustedPorts); } /** * Creates a list of node ACLs which identify which nodes the given node should trust * * @param node Node for which to generate ACLs * @param children Return ACLs for the children of the given node (e.g. containers on a Docker host) * @return List of node ACLs */ public List<NodeAcl> getNodeAcls(Node node, boolean children) { NodeList candidates = list(); if (children) { return candidates.childrenOf(node).asList().stream() .map(childNode -> getNodeAcl(childNode, candidates)) .collect(Collectors.collectingAndThen(Collectors.toList(), Collections::unmodifiableList)); } return Collections.singletonList(getNodeAcl(node, candidates)); } /** Creates a new node object, without adding it to the node repo. If no IP address is given, it will be resolved */ public Node createNode(String openStackId, String hostname, IP.Config ipConfig, Optional<String> parentHostname, Flavor flavor, Optional<TenantName> reservedTo, NodeType type) { if (ipConfig.primary().isEmpty()) ipConfig = ipConfig.with(nameResolver.getAllByNameOrThrow(hostname)); return Node.create(openStackId, ipConfig, hostname, parentHostname, Optional.empty(), flavor, reservedTo, type, Optional.empty()); } public Node createNode(String openStackId, String hostname, Optional<String> parentHostname, Flavor flavor, NodeType type) { return createNode(openStackId, hostname, IP.Config.EMPTY, parentHostname, flavor, Optional.empty(), type); } /** Adds a list of newly created docker container nodes to the node repository as <i>reserved</i> nodes */ public List<Node> addDockerNodes(LockedNodeList nodes) { for (Node node : nodes) { if ( ! node.flavor().getType().equals(Flavor.Type.DOCKER_CONTAINER)) illegal("Cannot add " + node + ": This is not a docker node"); if ( ! node.allocation().isPresent()) illegal("Cannot add " + node + ": Docker containers needs to be allocated"); Optional<Node> existing = getNode(node.hostname()); if (existing.isPresent()) illegal("Cannot add " + node + ": A node with this name already exists (" + existing.get() + ", " + existing.get().history() + "). Node to be added: " + node + ", " + node.history()); } return db.addNodesInState(nodes.asList(), State.reserved, Agent.system); } /** * Adds a list of (newly created) nodes to the node repository as <i>provisioned</i> nodes. * If any of the nodes already exists in the deprovisioned state, the new node will be merged * with the history of that node. */ public List<Node> addNodes(List<Node> nodes, Agent agent) { try (Mutex lock = lockUnallocated()) { List<Node> nodesToAdd = new ArrayList<>(); List<Node> nodesToRemove = new ArrayList<>(); for (int i = 0; i < nodes.size(); i++) { var node = nodes.get(i); for (int j = 0; j < i; j++) { if (node.equals(nodes.get(j))) illegal("Cannot add nodes: " + node + " is duplicated in the argument list"); } Optional<Node> existing = getNode(node.hostname()); if (existing.isPresent()) { if (existing.get().state() != State.deprovisioned) illegal("Cannot add " + node + ": A node with this name already exists"); node = node.with(existing.get().history()); node = node.with(existing.get().reports()); node = node.with(node.status().withFailCount(existing.get().status().failCount())); if (existing.get().status().firmwareVerifiedAt().isPresent()) node = node.with(node.status().withFirmwareVerifiedAt(existing.get().status().firmwareVerifiedAt().get())); nodesToRemove.add(existing.get()); } nodesToAdd.add(node); } List<Node> resultingNodes = db.addNodesInState(IP.Config.verify(nodesToAdd, list(lock)), State.provisioned, agent); db.removeNodes(nodesToRemove); return resultingNodes; } } /** Sets a list of nodes ready and returns the nodes in the ready state */ public List<Node> setReady(List<Node> nodes, Agent agent, String reason) { try (Mutex lock = lockUnallocated()) { List<Node> nodesWithResetFields = nodes.stream() .map(node -> { if (node.state() != State.provisioned && node.state() != State.dirty) illegal("Can not set " + node + " ready. It is not provisioned or dirty."); return node.withWantToRetire(false, false, Agent.system, clock.instant()); }) .collect(Collectors.toList()); return db.writeTo(State.ready, nodesWithResetFields, agent, Optional.of(reason)); } } public Node setReady(String hostname, Agent agent, String reason) { Node nodeToReady = getNode(hostname).orElseThrow(() -> new NoSuchNodeException("Could not move " + hostname + " to ready: Node not found")); if (nodeToReady.state() == State.ready) return nodeToReady; return setReady(Collections.singletonList(nodeToReady), agent, reason).get(0); } /** Reserve nodes. This method does <b>not</b> lock the node repository */ public List<Node> reserve(List<Node> nodes) { return db.writeTo(State.reserved, nodes, Agent.application, Optional.empty()); } /** Activate nodes. This method does <b>not</b> lock the node repository */ public List<Node> activate(List<Node> nodes, NestedTransaction transaction) { return db.writeTo(State.active, nodes, Agent.application, Optional.empty(), transaction); } /** * Sets a list of nodes to have their allocation removable (active to inactive) in the node repository. * * @param application the application the nodes belong to * @param nodes the nodes to make removable. These nodes MUST be in the active state. */ public void setRemovable(ApplicationId application, List<Node> nodes) { try (Mutex lock = lock(application)) { List<Node> removableNodes = nodes.stream().map(node -> node.with(node.allocation().get().removable(true))) .collect(Collectors.toList()); write(removableNodes, lock); } } /** Deactivate nodes owned by application guarded by given lock */ public void deactivate(NestedTransaction transaction, ProvisionLock lock) { deactivate(db.readNodes(lock.application(), State.reserved, State.active), transaction, lock); applications.remove(lock.application(), transaction, lock); } /** * Deactivates these nodes in a transaction and returns the nodes in the new state which will hold if the * transaction commits. */ public List<Node> deactivate(List<Node> nodes, NestedTransaction transaction, @SuppressWarnings("unused") ProvisionLock lock) { return db.writeTo(State.inactive, nodes, Agent.application, Optional.empty(), transaction); } /** Move nodes to the dirty state */ public List<Node> setDirty(List<Node> nodes, Agent agent, String reason) { return performOn(NodeListFilter.from(nodes), (node, lock) -> setDirty(node, agent, reason)); } /** * Set a node dirty, allowed if it is in the provisioned, inactive, failed or parked state. * Use this to clean newly provisioned nodes or to recycle failed nodes which have been repaired or put on hold. * * @throws IllegalArgumentException if the node has hardware failure */ public Node setDirty(Node node, Agent agent, String reason) { return db.writeTo(State.dirty, node, agent, Optional.of(reason)); } public List<Node> dirtyRecursively(String hostname, Agent agent, String reason) { Node nodeToDirty = getNode(hostname).orElseThrow(() -> new IllegalArgumentException("Could not deallocate " + hostname + ": Node not found")); List<Node> nodesToDirty = (nodeToDirty.type().isHost() ? Stream.concat(list().childrenOf(hostname).asList().stream(), Stream.of(nodeToDirty)) : Stream.of(nodeToDirty)) .filter(node -> node.state() != State.dirty) .collect(Collectors.toList()); List<String> hostnamesNotAllowedToDirty = nodesToDirty.stream() .filter(node -> node.state() != State.provisioned) .filter(node -> node.state() != State.failed) .filter(node -> node.state() != State.parked) .map(Node::hostname) .collect(Collectors.toList()); if ( ! hostnamesNotAllowedToDirty.isEmpty()) illegal("Could not deallocate " + nodeToDirty + ": " + hostnamesNotAllowedToDirty + " are not in states [provisioned, failed, parked]"); return nodesToDirty.stream().map(node -> setDirty(node, agent, reason)).collect(Collectors.toList()); } /** * Fails this node and returns it in its new state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node fail(String hostname, Agent agent, String reason) { return move(hostname, true, State.failed, agent, Optional.of(reason)); } /** * Fails all the nodes that are children of hostname before finally failing the hostname itself. * * @return List of all the failed nodes in their new state */ public List<Node> failRecursively(String hostname, Agent agent, String reason) { return moveRecursively(hostname, State.failed, agent, Optional.of(reason)); } /** * Parks this node and returns it in its new state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node park(String hostname, boolean keepAllocation, Agent agent, String reason) { return move(hostname, keepAllocation, State.parked, agent, Optional.of(reason)); } /** * Parks all the nodes that are children of hostname before finally parking the hostname itself. * * @return List of all the parked nodes in their new state */ public List<Node> parkRecursively(String hostname, Agent agent, String reason) { return moveRecursively(hostname, State.parked, agent, Optional.of(reason)); } /** * Moves a previously failed or parked node back to the active state. * * @return the node in its new state * @throws NoSuchNodeException if the node is not found */ public Node reactivate(String hostname, Agent agent, String reason) { return move(hostname, true, State.active, agent, Optional.of(reason)); } private List<Node> moveRecursively(String hostname, State toState, Agent agent, Optional<String> reason) { List<Node> moved = list().childrenOf(hostname).asList().stream() .map(child -> move(child, toState, agent, reason)) .collect(Collectors.toList()); moved.add(move(hostname, true, toState, agent, reason)); return moved; } private Node move(String hostname, boolean keepAllocation, State toState, Agent agent, Optional<String> reason) { Node node = getNode(hostname).orElseThrow(() -> new NoSuchNodeException("Could not move " + hostname + " to " + toState + ": Node not found")); if (!keepAllocation && node.allocation().isPresent()) { node = node.withoutAllocation(); } return move(node, toState, agent, reason); } private Node move(Node node, State toState, Agent agent, Optional<String> reason) { if (toState == Node.State.active && node.allocation().isEmpty()) illegal("Could not set " + node + " active. It has no allocation."); try (Mutex lock = lock(node)) { if (toState == State.active) { for (Node currentActive : getNodes(node.allocation().get().owner(), State.active)) { if (node.allocation().get().membership().cluster().equals(currentActive.allocation().get().membership().cluster()) && node.allocation().get().membership().index() == currentActive.allocation().get().membership().index()) illegal("Could not set " + node + " active: Same cluster and index as " + currentActive); } } return db.writeTo(toState, node, agent, reason); } } /* * This method is used by the REST API to handle readying nodes for new allocations. For tenant docker * containers this will remove the node from node repository, otherwise the node will be moved to state ready. */ public Node markNodeAvailableForNewAllocation(String hostname, Agent agent, String reason) { Node node = getNode(hostname).orElseThrow(() -> new NotFoundException("No node with hostname '" + hostname + "'")); if (node.flavor().getType() == Flavor.Type.DOCKER_CONTAINER && node.type() == NodeType.tenant) { if (node.state() != State.dirty) illegal("Cannot make " + node + " available for new allocation as it is not in state [dirty]"); return removeRecursively(node, true).get(0); } if (node.state() == State.ready) return node; Node parentHost = node.parentHostname().flatMap(this::getNode).orElse(node); List<String> failureReasons = NodeFailer.reasonsToFailParentHost(parentHost); if ( ! failureReasons.isEmpty()) illegal(node + " cannot be readied because it has hard failures: " + failureReasons); return setReady(Collections.singletonList(node), agent, reason).get(0); } /** * Removes all the nodes that are children of hostname before finally removing the hostname itself. * * @return a List of all the nodes that have been removed or (for hosts) deprovisioned */ public List<Node> removeRecursively(String hostname) { Node node = getNode(hostname).orElseThrow(() -> new NotFoundException("No node with hostname '" + hostname + "'")); return removeRecursively(node, false); } public List<Node> removeRecursively(Node node, boolean force) { try (Mutex lock = lockUnallocated()) { requireRemovable(node, false, force); if (node.type().isHost()) { List<Node> children = list().childrenOf(node).asList(); children.forEach(child -> requireRemovable(child, true, force)); db.removeNodes(children); List<Node> removed = new ArrayList<>(children); if (zone.getCloud().dynamicProvisioning() || node.type() != NodeType.host) db.removeNodes(List.of(node)); else { node = node.with(IP.Config.EMPTY); move(node, State.deprovisioned, Agent.system, Optional.empty()); } removed.add(node); return removed; } else { List<Node> removed = List.of(node); db.removeNodes(removed); return removed; } } } /** Forgets a deprovisioned node. This removes all traces of the node in the node repository. */ public void forget(Node node) { if (node.state() != State.deprovisioned) throw new IllegalArgumentException(node + " must be deprovisioned before it can be forgotten"); db.removeNodes(List.of(node)); } /** * Throws if the given node cannot be removed. Removal is allowed if: * - Tenant node: node is unallocated * - Host node: iff in state provisioned|failed|parked * - Child node: * If only removing the container node: node in state ready * If also removing the parent node: child is in state provisioned|failed|parked|dirty|ready */ private void requireRemovable(Node node, boolean removingAsChild, boolean force) { if (force) return; if (node.type() == NodeType.tenant && node.allocation().isPresent()) illegal(node + " is currently allocated and cannot be removed"); if (!node.type().isHost() && !removingAsChild) { if (node.state() != State.ready) illegal(node + " can not be removed as it is not in the state " + State.ready); } else if (!node.type().isHost()) { Set<State> legalStates = EnumSet.of(State.provisioned, State.failed, State.parked, State.dirty, State.ready); if ( ! legalStates.contains(node.state())) illegal(node + " can not be removed as it is not in the states " + legalStates); } else { Set<State> legalStates = EnumSet.of(State.provisioned, State.failed, State.parked); if (! legalStates.contains(node.state())) illegal(node + " can not be removed as it is not in the states " + legalStates); } } /** * Increases the restart generation of the active nodes matching the filter. * * @return the nodes in their new state. */ public List<Node> restart(NodeFilter filter) { return performOn(StateFilter.from(State.active, filter), (node, lock) -> write(node.withRestart(node.allocation().get().restartGeneration().withIncreasedWanted()), lock)); } /** * Increases the reboot generation of the nodes matching the filter. * @return the nodes in their new state. */ public List<Node> reboot(NodeFilter filter) { return performOn(filter, (node, lock) -> write(node.withReboot(node.status().reboot().withIncreasedWanted()), lock)); } /** * Set target OS version of all nodes matching given filter. * * @return the nodes in their new state. */ public List<Node> upgradeOs(NodeFilter filter, Optional<Version> version) { return performOn(filter, (node, lock) -> { var newStatus = node.status().withOsVersion(node.status().osVersion().withWanted(version)); return write(node.with(newStatus), lock); }); } /** Retire nodes matching given filter */ public List<Node> retire(NodeFilter filter, Agent agent, Instant instant) { return performOn(filter, (node, lock) -> write(node.withWantToRetire(true, agent, instant), lock)); } /** * Writes this node after it has changed some internal state but NOT changed its state field. * This does NOT lock the node repository implicitly, but callers are expected to already hold the lock. * * @param lock Already acquired lock * @return the written node for convenience */ public Node write(Node node, Mutex lock) { return write(List.of(node), lock).get(0); } /** * Writes these nodes after they have changed some internal state but NOT changed their state field. * This does NOT lock the node repository implicitly, but callers are expected to already hold the lock. * * @param lock already acquired lock * @return the written nodes for convenience */ public List<Node> write(List<Node> nodes, @SuppressWarnings("unused") Mutex lock) { return db.writeTo(nodes, Agent.system, Optional.empty()); } /** * Performs an operation requiring locking on all nodes matching some filter. * * @param filter the filter determining the set of nodes where the operation will be performed * @param action the action to perform * @return the set of nodes on which the action was performed, as they became as a result of the operation */ public boolean canAllocateTenantNodeTo(Node host) { if ( ! host.type().canRun(NodeType.tenant)) return false; if (host.status().wantToRetire()) return false; if (host.allocation().map(alloc -> alloc.membership().retired()).orElse(false)) return false; if ( canProvisionHosts()) return EnumSet.of(State.active, State.ready, State.provisioned).contains(host.state()); else return host.state() == State.active; } /** Returns whether this repository can provision hosts on demand */ public boolean canProvisionHosts() { return canProvisionHosts; } /** Returns the time keeper of this system */ public Clock clock() { return clock; } /** Returns the zone of this system */ public Zone zone() { return zone; } /** Create a lock which provides exclusive rights to making changes to the given application */ public Mutex lock(ApplicationId application) { return db.lock(application); } /** Create a lock with a timeout which provides exclusive rights to making changes to the given application */ public Mutex lock(ApplicationId application, Duration timeout) { return db.lock(application, timeout); } /** Create a lock which provides exclusive rights to modifying unallocated nodes */ public Mutex lockUnallocated() { return db.lockInactive(); } /** Acquires the appropriate lock for this node */ public Mutex lock(Node node) { return node.allocation().isPresent() ? lock(node.allocation().get().owner()) : lockUnallocated(); } private void illegal(String message) { throw new IllegalArgumentException(message); } }
I think you should set this to null.
public PubsubMessage(byte[] payload, Map<String, String> attributes) { this.message = payload; this.attributes = attributes; this.messageId = ""; }
this.messageId = "";
public PubsubMessage(byte[] payload, Map<String, String> attributes) { this.message = payload; this.attributes = attributes; this.messageId = null; }
class PubsubMessage { private byte[] message; private Map<String, String> attributes; private String messageId; public PubsubMessage(byte[] payload, Map<String, String> attributes, String messageId) { this.message = payload; this.attributes = attributes; this.messageId = messageId; } /** Returns the main PubSub message. */ public byte[] getPayload() { return message; } /** Returns the given attribute value. If not such attribute exists, returns null. */ @Nullable public String getAttribute(String attribute) { checkNotNull(attribute, "attribute"); return attributes.get(attribute); } /** Returns the full map of attributes. This is an unmodifiable map. */ public Map<String, String> getAttributeMap() { return attributes; } /** Returns the messageId of the message. */ public String getMessageId() { return messageId; } }
class PubsubMessage { private byte[] message; private Map<String, String> attributes; private String messageId; public PubsubMessage(byte[] payload, Map<String, String> attributes, String messageId) { this.message = payload; this.attributes = attributes; this.messageId = messageId; } /** Returns the main PubSub message. */ public byte[] getPayload() { return message; } /** Returns the given attribute value. If not such attribute exists, returns null. */ @Nullable public String getAttribute(String attribute) { checkNotNull(attribute, "attribute"); return attributes.get(attribute); } /** Returns the full map of attributes. This is an unmodifiable map. */ public Map<String, String> getAttributeMap() { return attributes; } /** Returns the messageId of the message populated by Cloud Pub/Sub. */ @Nullable public String getMessageId() { return messageId; } }
If you disable additivity, then the lines are not forwarded to the parent logger. The consequence is that the lines disappear for the user.
protected void before() throws Throwable { loggingEvents = new ConcurrentLinkedQueue<>(); final LoggerConfig previousLoggerConfig = LOGGER_CONTEXT.getConfiguration().getLoggerConfig(loggerName); final Level previousLevel = previousLoggerConfig.getLevel(); final Level userDefinedLevel = Level.getLevel(level.name()); final Level newLevel = userDefinedLevel.isMoreSpecificThan(previousLevel) ? previousLevel : userDefinedLevel; final Filter levelFilter = ThresholdFilter.createFilter( userDefinedLevel, Filter.Result.ACCEPT, Filter.Result.DENY); Appender testAppender = new AbstractAppender( "test-appender-" + generateRandomString(), levelFilter, null, false) { @Override public void append(LogEvent event) { loggingEvents.add(event.getMessage().getFormattedMessage()); } }; testAppender.start(); LoggerConfig loggerConfig = LoggerConfig.createLogger( true, newLevel, loggerName, null, new AppenderRef[] {}, null, LOGGER_CONTEXT.getConfiguration(), null); loggerConfig.addAppender(testAppender, null, null); if (previousLoggerConfig.getName().equals(loggerName)) { backupLoggerConfig = previousLoggerConfig; LOGGER_CONTEXT.getConfiguration().removeLogger(loggerName); for (Appender appender : previousLoggerConfig.getAppenders().values()) { loggerConfig.addAppender(appender, null, null); } } LOGGER_CONTEXT.getConfiguration().addLogger(loggerName, loggerConfig); LOGGER_CONTEXT.updateLoggers(); }
null,
protected void before() throws Throwable { loggingEvents = new ConcurrentLinkedQueue<>(); final LoggerConfig previousLoggerConfig = LOGGER_CONTEXT.getConfiguration().getLoggerConfig(loggerName); final Level previousLevel = previousLoggerConfig.getLevel(); final Level userDefinedLevel = Level.getLevel(level.name()); final Level newLevel = userDefinedLevel.isMoreSpecificThan(previousLevel) ? previousLevel : userDefinedLevel; final Filter levelFilter = ThresholdFilter.createFilter( userDefinedLevel, Filter.Result.ACCEPT, Filter.Result.DENY); final Appender testAppender = new AbstractAppender( "test-appender-" + generateRandomString(), levelFilter, null, false) { @Override public void append(LogEvent event) { loggingEvents.add(event.getMessage().getFormattedMessage()); } }; testAppender.start(); final LoggerConfig loggerConfig = LoggerConfig.createLogger( true, newLevel, loggerName, null, new AppenderRef[] {}, null, LOGGER_CONTEXT.getConfiguration(), null); loggerConfig.addAppender(testAppender, null, null); if (previousLoggerConfig.getName().equals(loggerName)) { backupLoggerConfig = previousLoggerConfig; LOGGER_CONTEXT.getConfiguration().removeLogger(loggerName); for (Appender appender : previousLoggerConfig.getAppenders().values()) { loggerConfig.addAppender(appender, null, null); } } LOGGER_CONTEXT.getConfiguration().addLogger(loggerName, loggerConfig); LOGGER_CONTEXT.updateLoggers(); }
class TestLoggerResource extends ExternalResource { private static final LoggerContext LOGGER_CONTEXT = (LoggerContext) LogManager.getContext(false); private final String loggerName; private final org.slf4j.event.Level level; @Nullable private LoggerConfig backupLoggerConfig = null; private ConcurrentLinkedQueue<String> loggingEvents; public TestLoggerResource(Class<?> clazz, org.slf4j.event.Level level) { this(clazz.getCanonicalName(), level); } private TestLoggerResource(String loggerName, org.slf4j.event.Level level) { this.loggerName = loggerName; this.level = level; } public List<String> getMessages() { return new ArrayList<>(loggingEvents); } private static String generateRandomString() { return UUID.randomUUID().toString().replace("-", ""); } @Override @Override protected void after() { LOGGER_CONTEXT.getConfiguration().removeLogger(loggerName); if (backupLoggerConfig != null) { LOGGER_CONTEXT.getConfiguration().addLogger(loggerName, backupLoggerConfig); } LOGGER_CONTEXT.updateLoggers(); loggingEvents = null; } /** Enables the use of {@link TestLoggerResource} for try-with-resources statement. */ public static SingleTestResource asSingleTestResource( String loggerName, org.slf4j.event.Level level) throws Throwable { return new SingleTestResource(loggerName, level); } /** * SingleTestResource re-uses the code in {@link TestLoggerResource} for try-with-resources * statement. */ public static class SingleTestResource implements AutoCloseable { TestLoggerResource resource; private SingleTestResource(String loggerName, org.slf4j.event.Level level) throws Throwable { resource = new TestLoggerResource(loggerName, level); resource.before(); } @Override public void close() throws Exception { resource.after(); } public List<String> getMessages() { return resource.getMessages(); } } }
class TestLoggerResource extends ExternalResource { private static final LoggerContext LOGGER_CONTEXT = (LoggerContext) LogManager.getContext(false); private final String loggerName; private final org.slf4j.event.Level level; @Nullable private LoggerConfig backupLoggerConfig = null; private ConcurrentLinkedQueue<String> loggingEvents; public TestLoggerResource(Class<?> clazz, org.slf4j.event.Level level) { this(clazz.getCanonicalName(), level); } private TestLoggerResource(String loggerName, org.slf4j.event.Level level) { this.loggerName = loggerName; this.level = level; } public List<String> getMessages() { return new ArrayList<>(loggingEvents); } private static String generateRandomString() { return UUID.randomUUID().toString().replace("-", ""); } @Override @Override protected void after() { LOGGER_CONTEXT.getConfiguration().removeLogger(loggerName); if (backupLoggerConfig != null) { LOGGER_CONTEXT.getConfiguration().addLogger(loggerName, backupLoggerConfig); backupLoggerConfig = null; } LOGGER_CONTEXT.updateLoggers(); loggingEvents = null; } /** Enables the use of {@link TestLoggerResource} for try-with-resources statement. */ public static SingleTestResource asSingleTestResource( String loggerName, org.slf4j.event.Level level) throws Throwable { return new SingleTestResource(loggerName, level); } /** * SingleTestResource re-uses the code in {@link TestLoggerResource} for try-with-resources * statement. */ public static class SingleTestResource implements AutoCloseable { final TestLoggerResource resource; private SingleTestResource(String loggerName, org.slf4j.event.Level level) throws Throwable { resource = new TestLoggerResource(loggerName, level); resource.before(); } @Override public void close() throws Exception { resource.after(); } public List<String> getMessages() { return resource.getMessages(); } } }
Negative is an interesting situation, but you are right makes sense, removed the reduandant check
private Duration calculateRenewalDelay(OffsetDateTime initialLockedUntil) { final OffsetDateTime now = OffsetDateTime.now(); final Duration remainingTime = Duration.between(now, initialLockedUntil); if (remainingTime.isNegative() || remainingTime.toMillis() < 400) { logger.info("Duration was negative or less than 400ms. now[{}] lockedUntil[{}]", now, initialLockedUntil); return Duration.ZERO; } else { final long bufferInMilliSec = Math.min(remainingTime.toMillis() / 2, MAX_RENEWAL_BUFFER_DURATION.toMillis()); final Duration renewAfter = Duration.ofMillis(remainingTime.toMillis() - bufferInMilliSec); if (renewAfter.isNegative()) { logger.info("Adjusted duration is negative. renewAfter: {}ms. Buffer: {}ms.", remainingTime.toMillis(), bufferInMilliSec); } return renewAfter; } }
if (remainingTime.isNegative() || remainingTime.toMillis() < 400) {
private Duration calculateRenewalDelay(OffsetDateTime initialLockedUntil) { final OffsetDateTime now = OffsetDateTime.now(); final Duration remainingTime = Duration.between(now, initialLockedUntil); if (remainingTime.toMillis() < 400) { logger.info("Duration was less than 400ms. now[{}] lockedUntil[{}]", now, initialLockedUntil); return Duration.ZERO; } else { final long bufferInMilliSec = Math.min(remainingTime.toMillis() / 2, MAX_RENEWAL_BUFFER_DURATION.toMillis()); final Duration renewAfter = Duration.ofMillis(remainingTime.toMillis() - bufferInMilliSec); if (renewAfter.isNegative()) { logger.info("Adjusted duration is negative. renewAfter: {}ms. Buffer: {}ms.", remainingTime.toMillis(), bufferInMilliSec); } return renewAfter; } }
class LockRenewalOperation implements AutoCloseable { private final ClientLogger logger = new ClientLogger(LockRenewalOperation.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final AtomicReference<OffsetDateTime> lockedUntil = new AtomicReference<>(); private final AtomicReference<Throwable> throwable = new AtomicReference<>(); private final AtomicReference<LockRenewalStatus> status = new AtomicReference<>(LockRenewalStatus.RUNNING); private final MonoProcessor<Void> cancellationProcessor = MonoProcessor.create(); private final Mono<Void> completionMono; private final String lockToken; private final boolean isSession; private final Function<String, Mono<OffsetDateTime>> renewalOperation; private final Disposable subscription; /** * Creates a new lock renewal operation. The lock is initially renewed. * * @param lockToken Message lock or session id to renew. * @param maxLockRenewalDuration The maximum duration this lock should be renewed. * @param isSession Whether the lock represents a session lock or message lock. * @param renewalOperation The renewal operation to call. */ LockRenewalOperation(String lockToken, Duration maxLockRenewalDuration, boolean isSession, Function<String, Mono<OffsetDateTime>> renewalOperation) { this(lockToken, maxLockRenewalDuration, isSession, renewalOperation, OffsetDateTime.now()); } /** * Creates a new lock renewal operation. * * @param lockToken Lock or session id to renew. * @param tokenLockedUntil The initial period the message or session is locked until. * @param maxLockRenewalDuration The maximum duration this lock should be renewed. * @param isSession Whether the lock represents a session lock or message lock. * @param renewalOperation The renewal operation to call. */ LockRenewalOperation(String lockToken, Duration maxLockRenewalDuration, boolean isSession, Function<String, Mono<OffsetDateTime>> renewalOperation, OffsetDateTime tokenLockedUntil) { this.lockToken = Objects.requireNonNull(lockToken, "'lockToken' cannot be null."); this.renewalOperation = Objects.requireNonNull(renewalOperation, "'renewalOperation' cannot be null."); this.isSession = isSession; Objects.requireNonNull(tokenLockedUntil, "'lockedUntil cannot be null.'"); Objects.requireNonNull(maxLockRenewalDuration, "'maxLockRenewalDuration' cannot be null."); if (maxLockRenewalDuration.isNegative()) { throw logger.logExceptionAsError(new IllegalArgumentException( "'maxLockRenewalDuration' cannot be negative.")); } this.lockedUntil.set(tokenLockedUntil); final Flux<OffsetDateTime> renewLockOperation = getRenewLockOperation(tokenLockedUntil, maxLockRenewalDuration) .takeUntilOther(cancellationProcessor) .cache(Duration.ofMinutes(2)); this.completionMono = renewLockOperation.then(); this.subscription = renewLockOperation.subscribe(until -> this.lockedUntil.set(until), error -> { logger.error("token[{}]. Error occurred while renewing lock token.", error); status.set(LockRenewalStatus.FAILED); throwable.set(error); cancellationProcessor.onComplete(); }, () -> { if (status.compareAndSet(LockRenewalStatus.RUNNING, LockRenewalStatus.COMPLETE)) { logger.verbose("token[{}]. Renewing session lock task completed.", lockToken); } cancellationProcessor.onComplete(); }); } /** * Gets a mono that completes when the operation does. * * @return A mono that completes when the renewal operation does. */ Mono<Void> getCompletionOperation() { return completionMono; } /** * Gets the current datetime the message or session is locked until. * * @return the datetime the message or session is locked until. */ OffsetDateTime getLockedUntil() { return lockedUntil.get(); } /** * Gets the message lock token for the renewal operation. * * @return The message lock token or {@code null} if a session is being renewed instead. */ String getLockToken() { return isSession ? null : lockToken; } /** * Gets the session id for this lock renewal operation. * * @return The session id or {@code null} if it is not a session renewal. */ String getSessionId() { return isSession ? lockToken : null; } /** * Gets the current status of the renewal operation. * * @return The current status of the renewal operation. */ LockRenewalStatus getStatus() { return status.get(); } /** * Gets the exception if an error occurred whilst renewing the message or session lock. * * @return the exception if an error occurred whilst renewing the message or session lock, otherwise {@code null}. */ Throwable getThrowable() { return throwable.get(); } /** * Cancels the lock renewal operation. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (status.compareAndSet(LockRenewalStatus.RUNNING, LockRenewalStatus.CANCELLED)) { logger.verbose("token[{}] Cancelled operation.", lockToken); } cancellationProcessor.onComplete(); subscription.dispose(); } /** * Gets the lock renewal operation. if the {@code maxLockRenewalDuration} is {@link Duration * lock is never renewed. * * @param initialLockedUntil When the initial call is locked until. * @param maxLockRenewalDuration Duration to renew lock for. * @return The subscription for the operation. */ private Flux<OffsetDateTime> getRenewLockOperation(OffsetDateTime initialLockedUntil, Duration maxLockRenewalDuration) { if (maxLockRenewalDuration.isZero()) { status.set(LockRenewalStatus.COMPLETE); return Flux.empty(); } final EmitterProcessor<Duration> emitterProcessor = EmitterProcessor.create(); final FluxSink<Duration> sink = emitterProcessor.sink(); sink.next(calculateRenewalDelay(initialLockedUntil)); final Flux<Object> cancellationSignals = Flux.first(cancellationProcessor, Mono.delay(maxLockRenewalDuration)); return Flux.switchOnNext(emitterProcessor.map(interval -> Mono.delay(interval) .thenReturn(Flux.create(s -> s.next(interval))))) .takeUntilOther(cancellationSignals) .flatMap(delay -> { logger.info("token[{}]. now[{}]. Starting lock renewal.", lockToken, OffsetDateTime.now()); return renewalOperation.apply(lockToken); }) .map(offsetDateTime -> { final Duration next = Duration.between(OffsetDateTime.now(), offsetDateTime); logger.info("token[{}]. nextExpiration[{}]. next: [{}]. isSession[{}]", lockToken, offsetDateTime, next, isSession); sink.next(calculateRenewalDelay(offsetDateTime)); return offsetDateTime; }); } }
class LockRenewalOperation implements AutoCloseable { private final ClientLogger logger = new ClientLogger(LockRenewalOperation.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final AtomicReference<OffsetDateTime> lockedUntil = new AtomicReference<>(); private final AtomicReference<Throwable> throwable = new AtomicReference<>(); private final AtomicReference<LockRenewalStatus> status = new AtomicReference<>(LockRenewalStatus.RUNNING); private final MonoProcessor<Void> cancellationProcessor = MonoProcessor.create(); private final Mono<Void> completionMono; private final String lockToken; private final boolean isSession; private final Function<String, Mono<OffsetDateTime>> renewalOperation; private final Disposable subscription; /** * Creates a new lock renewal operation. The lock is initially renewed. * * @param lockToken Message lock or session id to renew. * @param maxLockRenewalDuration The maximum duration this lock should be renewed. * @param isSession Whether the lock represents a session lock or message lock. * @param renewalOperation The renewal operation to call. */ LockRenewalOperation(String lockToken, Duration maxLockRenewalDuration, boolean isSession, Function<String, Mono<OffsetDateTime>> renewalOperation) { this(lockToken, maxLockRenewalDuration, isSession, renewalOperation, OffsetDateTime.now()); } /** * Creates a new lock renewal operation. * * @param lockToken Lock or session id to renew. * @param tokenLockedUntil The initial period the message or session is locked until. * @param maxLockRenewalDuration The maximum duration this lock should be renewed. * @param isSession Whether the lock represents a session lock or message lock. * @param renewalOperation The renewal operation to call. */ LockRenewalOperation(String lockToken, Duration maxLockRenewalDuration, boolean isSession, Function<String, Mono<OffsetDateTime>> renewalOperation, OffsetDateTime tokenLockedUntil) { this.lockToken = Objects.requireNonNull(lockToken, "'lockToken' cannot be null."); this.renewalOperation = Objects.requireNonNull(renewalOperation, "'renewalOperation' cannot be null."); this.isSession = isSession; Objects.requireNonNull(tokenLockedUntil, "'lockedUntil cannot be null.'"); Objects.requireNonNull(maxLockRenewalDuration, "'maxLockRenewalDuration' cannot be null."); if (maxLockRenewalDuration.isNegative()) { throw logger.logExceptionAsError(new IllegalArgumentException( "'maxLockRenewalDuration' cannot be negative.")); } this.lockedUntil.set(tokenLockedUntil); final Flux<OffsetDateTime> renewLockOperation = getRenewLockOperation(tokenLockedUntil, maxLockRenewalDuration) .takeUntilOther(cancellationProcessor) .cache(Duration.ofMinutes(2)); this.completionMono = renewLockOperation.then(); this.subscription = renewLockOperation.subscribe(until -> this.lockedUntil.set(until), error -> { logger.error("token[{}]. Error occurred while renewing lock token.", error); status.set(LockRenewalStatus.FAILED); throwable.set(error); cancellationProcessor.onComplete(); }, () -> { if (status.compareAndSet(LockRenewalStatus.RUNNING, LockRenewalStatus.COMPLETE)) { logger.verbose("token[{}]. Renewing session lock task completed.", lockToken); } cancellationProcessor.onComplete(); }); } /** * Gets a mono that completes when the operation does. * * @return A mono that completes when the renewal operation does. */ Mono<Void> getCompletionOperation() { return completionMono; } /** * Gets the current datetime the message or session is locked until. * * @return the datetime the message or session is locked until. */ OffsetDateTime getLockedUntil() { return lockedUntil.get(); } /** * Gets the message lock token for the renewal operation. * * @return The message lock token or {@code null} if a session is being renewed instead. */ String getLockToken() { return isSession ? null : lockToken; } /** * Gets the session id for this lock renewal operation. * * @return The session id or {@code null} if it is not a session renewal. */ String getSessionId() { return isSession ? lockToken : null; } /** * Gets the current status of the renewal operation. * * @return The current status of the renewal operation. */ LockRenewalStatus getStatus() { return status.get(); } /** * Gets the exception if an error occurred whilst renewing the message or session lock. * * @return the exception if an error occurred whilst renewing the message or session lock, otherwise {@code null}. */ Throwable getThrowable() { return throwable.get(); } /** * Cancels the lock renewal operation. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (status.compareAndSet(LockRenewalStatus.RUNNING, LockRenewalStatus.CANCELLED)) { logger.verbose("token[{}] Cancelled operation.", lockToken); } cancellationProcessor.onComplete(); subscription.dispose(); } /** * Gets the lock renewal operation. if the {@code maxLockRenewalDuration} is {@link Duration * lock is never renewed. * * @param initialLockedUntil When the initial call is locked until. * @param maxLockRenewalDuration Duration to renew lock for. * @return The subscription for the operation. */ private Flux<OffsetDateTime> getRenewLockOperation(OffsetDateTime initialLockedUntil, Duration maxLockRenewalDuration) { if (maxLockRenewalDuration.isZero()) { status.set(LockRenewalStatus.COMPLETE); return Flux.empty(); } final EmitterProcessor<Duration> emitterProcessor = EmitterProcessor.create(); final FluxSink<Duration> sink = emitterProcessor.sink(); sink.next(calculateRenewalDelay(initialLockedUntil)); final Flux<Object> cancellationSignals = Flux.first(cancellationProcessor, Mono.delay(maxLockRenewalDuration)); return Flux.switchOnNext(emitterProcessor.map(interval -> Mono.delay(interval) .thenReturn(Flux.create(s -> s.next(interval))))) .takeUntilOther(cancellationSignals) .flatMap(delay -> { logger.info("token[{}]. now[{}]. Starting lock renewal.", lockToken, OffsetDateTime.now()); return renewalOperation.apply(lockToken); }) .map(offsetDateTime -> { final Duration next = Duration.between(OffsetDateTime.now(), offsetDateTime); logger.info("token[{}]. nextExpiration[{}]. next: [{}]. isSession[{}]", lockToken, offsetDateTime, next, isSession); sink.next(calculateRenewalDelay(offsetDateTime)); return offsetDateTime; }); } }
```suggestion sqlStr = op.toString() + " " + getChild(0).toSql(); ```
public ArithmeticExpr(Operator op, Expr e1, Expr e2) { super(); this.op = op; Preconditions.checkNotNull(e1); children.add(e1); Preconditions.checkArgument( op == Operator.BITNOT && e2 == null || op != Operator.BITNOT && e2 != null); if (e2 != null) { children.add(e2); } if (children.size() == 1) { sqlStr=op.toString() + " " + getChild(0).toSql(); } else { sqlStr = getChild(0).toSql() + " " + op.toString() + " " + getChild(1).toSql(); } }
sqlStr=op.toString() + " " + getChild(0).toSql();
public ArithmeticExpr(Operator op, Expr e1, Expr e2) { super(); this.op = op; Preconditions.checkNotNull(e1); children.add(e1); Preconditions.checkArgument( op == Operator.BITNOT && e2 == null || op != Operator.BITNOT && e2 != null); if (e2 != null) { children.add(e2); } sqlStr = null; }
class ArithmeticExpr extends Expr { private static final Logger LOG = LogManager.getLogger(ArithmeticExpr.class); enum OperatorPosition { BINARY_INFIX, UNARY_PREFIX, UNARY_POSTFIX, } public enum Operator { MULTIPLY("*", "multiply", OperatorPosition.BINARY_INFIX, TExprOpcode.MULTIPLY), DIVIDE("/", "divide", OperatorPosition.BINARY_INFIX, TExprOpcode.DIVIDE), MOD("%", "mod", OperatorPosition.BINARY_INFIX, TExprOpcode.MOD), INT_DIVIDE("DIV", "int_divide", OperatorPosition.BINARY_INFIX, TExprOpcode.INT_DIVIDE), ADD("+", "add", OperatorPosition.BINARY_INFIX, TExprOpcode.ADD), SUBTRACT("-", "subtract", OperatorPosition.BINARY_INFIX, TExprOpcode.SUBTRACT), BITAND("&", "bitand", OperatorPosition.BINARY_INFIX, TExprOpcode.BITAND), BITOR("|", "bitor", OperatorPosition.BINARY_INFIX, TExprOpcode.BITOR), BITXOR("^", "bitxor", OperatorPosition.BINARY_INFIX, TExprOpcode.BITXOR), BITNOT("~", "bitnot", OperatorPosition.UNARY_PREFIX, TExprOpcode.BITNOT), FACTORIAL("!", "factorial", OperatorPosition.UNARY_POSTFIX, TExprOpcode.FACTORIAL); private final String description; private final String name; private final OperatorPosition pos; private final TExprOpcode opcode; Operator(String description, String name, OperatorPosition pos, TExprOpcode opcode) { this.description = description; this.name = name; this.pos = pos; this.opcode = opcode; } @Override public String toString() { return description; } public String getName() { return name; } public OperatorPosition getPos() { return pos; } public TExprOpcode getOpcode() { return opcode; } public boolean isUnary() { return pos == OperatorPosition.UNARY_PREFIX || pos == OperatorPosition.UNARY_POSTFIX; } public boolean isBinary() { return pos == OperatorPosition.BINARY_INFIX; } } public static void initBuiltins(FunctionSet functionSet) { for (Type t : Type.getNumericTypes()) { functionSet.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.MULTIPLY.getName(), Lists.newArrayList(t, t), t)); functionSet.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.ADD.getName(), Lists.newArrayList(t, t), t)); functionSet.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.SUBTRACT.getName(), Lists.newArrayList(t, t), t)); } functionSet.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.DIVIDE.getName(), Lists.<Type>newArrayList(Type.DOUBLE, Type.DOUBLE), Type.DOUBLE)); functionSet.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.DIVIDE.getName(), Lists.<Type>newArrayList(Type.DECIMAL, Type.DECIMAL), Type.DECIMAL)); functionSet.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.DIVIDE.getName(), Lists.<Type>newArrayList(Type.DECIMALV2, Type.DECIMALV2), Type.DECIMALV2)); for (Type t : Type.getIntegerTypes()) { functionSet.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.INT_DIVIDE.getName(), Lists.newArrayList(t, t), t)); } } private final Operator op; private String sqlStr; /** * Copy c'tor used in clone(). */ protected ArithmeticExpr(ArithmeticExpr other) { super(other); this.op = other.op; } @Override public String toString() { return toSql(); } @Override public Expr clone() { return new ArithmeticExpr(this); } @Override public String toSqlImpl() { return sqlStr; } @Override protected void toThrift(TExprNode msg) { msg.node_type = TExprNodeType.ARITHMETIC_EXPR; if (!type.isDecimal() && !type.isDecimalV2()) { msg.setOpcode(op.getOpcode()); msg.setOutput_column(outputColumn); } } @Override public boolean equals(Object obj) { if (!super.equals(obj)) { return false; } return ((ArithmeticExpr) obj).opcode == opcode; } @Override public void computeOutputColumn(Analyzer analyzer) { super.computeOutputColumn(analyzer); List<TupleId> tupleIds = Lists.newArrayList(); getIds(tupleIds, null); Preconditions.checkArgument(tupleIds.size() == 1); } private Type findCommonType(Type t1, Type t2) { PrimitiveType pt1 = t1.getPrimitiveType(); PrimitiveType pt2 = t2.getPrimitiveType(); if (pt1 == PrimitiveType.DOUBLE || pt2 == PrimitiveType.DOUBLE) { return Type.DOUBLE; } else if (pt1 == PrimitiveType.DECIMALV2 || pt2 == PrimitiveType.DECIMALV2) { return Type.DECIMALV2; } else if (pt1 == PrimitiveType.DECIMAL || pt2 == PrimitiveType.DECIMAL) { return Type.DECIMAL; } else if (pt1 == PrimitiveType.LARGEINT || pt2 == PrimitiveType.LARGEINT) { return Type.LARGEINT; } else { if (pt1 != PrimitiveType.BIGINT && pt2 != PrimitiveType.BIGINT) { return Type.INVALID; } return Type.BIGINT; } } @Override public void analyzeImpl(Analyzer analyzer) throws AnalysisException { if (op == Operator.BITNOT) { type = Type.BIGINT; if (getChild(0).getType().getPrimitiveType() != PrimitiveType.BIGINT) { castChild(type, 0); } fn = getBuiltinFunction( analyzer, op.getName(), collectChildReturnTypes(), Function.CompareMode.IS_SUPERTYPE_OF); if (fn == null) { Preconditions.checkState(false, String.format("No match for op with operand types", toSql())); } return; } Type t1 = getChild(0).getType().getNumResultType(); Type t2 = getChild(1).getType().getNumResultType(); Type commonType = Type.INVALID; String fnName = op.getName(); switch (op) { case MULTIPLY: case ADD: case SUBTRACT: case MOD: commonType = findCommonType(t1, t2); break; case DIVIDE: commonType = findCommonType(t1, t2); if (commonType.getPrimitiveType() == PrimitiveType.BIGINT || commonType.getPrimitiveType() == PrimitiveType.LARGEINT) { commonType = Type.DOUBLE; } break; case INT_DIVIDE: case BITAND: case BITOR: case BITXOR: commonType = Type.BIGINT; break; default: Preconditions.checkState(false, "Unknown arithmetic operation " + op.toString() + " in: " + this.toSql()); break; } type = castBinaryOp(commonType); fn = getBuiltinFunction(analyzer, fnName, collectChildReturnTypes(), Function.CompareMode.IS_IDENTICAL); if (fn == null) { Preconditions.checkState(false, String.format( "No match for '%s' with operand types %s and %s", toSql(), t1, t2)); } } }
class ArithmeticExpr extends Expr { private static final Logger LOG = LogManager.getLogger(ArithmeticExpr.class); enum OperatorPosition { BINARY_INFIX, UNARY_PREFIX, UNARY_POSTFIX, } public enum Operator { MULTIPLY("*", "multiply", OperatorPosition.BINARY_INFIX, TExprOpcode.MULTIPLY), DIVIDE("/", "divide", OperatorPosition.BINARY_INFIX, TExprOpcode.DIVIDE), MOD("%", "mod", OperatorPosition.BINARY_INFIX, TExprOpcode.MOD), INT_DIVIDE("DIV", "int_divide", OperatorPosition.BINARY_INFIX, TExprOpcode.INT_DIVIDE), ADD("+", "add", OperatorPosition.BINARY_INFIX, TExprOpcode.ADD), SUBTRACT("-", "subtract", OperatorPosition.BINARY_INFIX, TExprOpcode.SUBTRACT), BITAND("&", "bitand", OperatorPosition.BINARY_INFIX, TExprOpcode.BITAND), BITOR("|", "bitor", OperatorPosition.BINARY_INFIX, TExprOpcode.BITOR), BITXOR("^", "bitxor", OperatorPosition.BINARY_INFIX, TExprOpcode.BITXOR), BITNOT("~", "bitnot", OperatorPosition.UNARY_PREFIX, TExprOpcode.BITNOT), FACTORIAL("!", "factorial", OperatorPosition.UNARY_POSTFIX, TExprOpcode.FACTORIAL); private final String description; private final String name; private final OperatorPosition pos; private final TExprOpcode opcode; Operator(String description, String name, OperatorPosition pos, TExprOpcode opcode) { this.description = description; this.name = name; this.pos = pos; this.opcode = opcode; } @Override public String toString() { return description; } public String getName() { return name; } public OperatorPosition getPos() { return pos; } public TExprOpcode getOpcode() { return opcode; } public boolean isUnary() { return pos == OperatorPosition.UNARY_PREFIX || pos == OperatorPosition.UNARY_POSTFIX; } public boolean isBinary() { return pos == OperatorPosition.BINARY_INFIX; } } public static void initBuiltins(FunctionSet functionSet) { for (Type t : Type.getNumericTypes()) { functionSet.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.MULTIPLY.getName(), Lists.newArrayList(t, t), t)); functionSet.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.ADD.getName(), Lists.newArrayList(t, t), t)); functionSet.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.SUBTRACT.getName(), Lists.newArrayList(t, t), t)); } functionSet.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.DIVIDE.getName(), Lists.<Type>newArrayList(Type.DOUBLE, Type.DOUBLE), Type.DOUBLE)); functionSet.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.DIVIDE.getName(), Lists.<Type>newArrayList(Type.DECIMAL, Type.DECIMAL), Type.DECIMAL)); functionSet.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.DIVIDE.getName(), Lists.<Type>newArrayList(Type.DECIMALV2, Type.DECIMALV2), Type.DECIMALV2)); for (Type t : Type.getIntegerTypes()) { functionSet.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.INT_DIVIDE.getName(), Lists.newArrayList(t, t), t)); } } private final Operator op; private String sqlStr; /** * Copy c'tor used in clone(). */ protected ArithmeticExpr(ArithmeticExpr other) { super(other); this.op = other.op; this.sqlStr = other.sqlStr; } @Override public String toString() { return toSql(); } @Override public Expr clone() { return new ArithmeticExpr(this); } @Override public String toSqlImpl() { if (sqlStr != null) { return sqlStr; } else if (children.size() == 1) { sqlStr = op.toString() + " " + getChild(0).toSql(); } else { sqlStr = getChild(0).toSql() + " " + op.toString() + " " + getChild(1).toSql(); } return sqlStr; } @Override protected void toThrift(TExprNode msg) { msg.node_type = TExprNodeType.ARITHMETIC_EXPR; if (!type.isDecimal() && !type.isDecimalV2()) { msg.setOpcode(op.getOpcode()); msg.setOutput_column(outputColumn); } } @Override public boolean equals(Object obj) { if (!super.equals(obj)) { return false; } return ((ArithmeticExpr) obj).opcode == opcode; } @Override public void computeOutputColumn(Analyzer analyzer) { super.computeOutputColumn(analyzer); List<TupleId> tupleIds = Lists.newArrayList(); getIds(tupleIds, null); Preconditions.checkArgument(tupleIds.size() == 1); } private Type findCommonType(Type t1, Type t2) { PrimitiveType pt1 = t1.getPrimitiveType(); PrimitiveType pt2 = t2.getPrimitiveType(); if (pt1 == PrimitiveType.DOUBLE || pt2 == PrimitiveType.DOUBLE) { return Type.DOUBLE; } else if (pt1 == PrimitiveType.DECIMALV2 || pt2 == PrimitiveType.DECIMALV2) { return Type.DECIMALV2; } else if (pt1 == PrimitiveType.DECIMAL || pt2 == PrimitiveType.DECIMAL) { return Type.DECIMAL; } else if (pt1 == PrimitiveType.LARGEINT || pt2 == PrimitiveType.LARGEINT) { return Type.LARGEINT; } else { if (pt1 != PrimitiveType.BIGINT && pt2 != PrimitiveType.BIGINT) { return Type.INVALID; } return Type.BIGINT; } } @Override public void analyzeImpl(Analyzer analyzer) throws AnalysisException { if (op == Operator.BITNOT) { type = Type.BIGINT; if (getChild(0).getType().getPrimitiveType() != PrimitiveType.BIGINT) { castChild(type, 0); } fn = getBuiltinFunction( analyzer, op.getName(), collectChildReturnTypes(), Function.CompareMode.IS_SUPERTYPE_OF); if (fn == null) { Preconditions.checkState(false, String.format("No match for op with operand types", toSql())); } return; } Type t1 = getChild(0).getType().getNumResultType(); Type t2 = getChild(1).getType().getNumResultType(); Type commonType = Type.INVALID; String fnName = op.getName(); switch (op) { case MULTIPLY: case ADD: case SUBTRACT: case MOD: commonType = findCommonType(t1, t2); break; case DIVIDE: commonType = findCommonType(t1, t2); if (commonType.getPrimitiveType() == PrimitiveType.BIGINT || commonType.getPrimitiveType() == PrimitiveType.LARGEINT) { commonType = Type.DOUBLE; } break; case INT_DIVIDE: case BITAND: case BITOR: case BITXOR: commonType = Type.BIGINT; break; default: Preconditions.checkState(false, "Unknown arithmetic operation " + op.toString() + " in: " + this.toSql()); break; } type = castBinaryOp(commonType); fn = getBuiltinFunction(analyzer, fnName, collectChildReturnTypes(), Function.CompareMode.IS_IDENTICAL); if (fn == null) { Preconditions.checkState(false, String.format( "No match for '%s' with operand types %s and %s", toSql(), t1, t2)); } } }
I don't know much about the use of `jboss.xnio` library, just read its documentation. I have a question here: will this run in `WORKER THREAD pool`, which was configured to 16 in `NMysqlServer`(use `Options.WORKER_TASK_MAX_THREADS`)? If it is, there may be a problem. For example: if we have 16 slow queries, such as running for 5 minutes, which makes these 16 threads are filled. And because there are no threads available for subsequent query requests, the server will hang for 5 minutes.
public void handleEvent(ConduitStreamSourceChannel channel) { XnioIoThread.requireCurrentThread(); ctx.suspendAcceptQuery(); channel.getWorker().execute(() -> { ctx.setThreadLocalInfo(); try { connectProcessor.processOnce(); if (!ctx.isKilled()) { ctx.resumeAcceptQuery(); } else { ctx.stopAcceptQuery(); ctx.cleanup(); } } catch (Exception e) { LOG.warn("Exception happened in one session(" + ctx + ").", e); ctx.setKilled(); ctx.cleanup(); } finally { ConnectContext.remove(); } }); }
channel.getWorker().execute(() -> {
public void handleEvent(ConduitStreamSourceChannel channel) { XnioIoThread.requireCurrentThread(); ctx.suspendAcceptQuery(); channel.getWorker().execute(() -> { ctx.setThreadLocalInfo(); try { connectProcessor.processOnce(); if (!ctx.isKilled()) { ctx.resumeAcceptQuery(); } else { ctx.stopAcceptQuery(); ctx.cleanup(); } } catch (Exception e) { LOG.warn("Exception happened in one session(" + ctx + ").", e); ctx.setKilled(); ctx.cleanup(); } finally { ConnectContext.remove(); } }); }
class ReadListener implements ChannelListener<ConduitStreamSourceChannel> { private final Logger LOG = LogManager.getLogger(this.getClass()); private NConnectContext ctx; private ConnectProcessor connectProcessor; public ReadListener(NConnectContext nConnectContext, ConnectProcessor connectProcessor) { this.ctx = nConnectContext; this.connectProcessor = connectProcessor; } @Override }
class ReadListener implements ChannelListener<ConduitStreamSourceChannel> { private final Logger LOG = LogManager.getLogger(this.getClass()); private NConnectContext ctx; private ConnectProcessor connectProcessor; public ReadListener(NConnectContext nConnectContext, ConnectProcessor connectProcessor) { this.ctx = nConnectContext; this.connectProcessor = connectProcessor; } @Override }
I'd rather not selectively lie, unless there's a good reason to do so.
public Node apply(MutableNetwork<Node, Edge> input) { for (Node node : input.nodes()) { if (node instanceof RemoteGrpcPortNode || node instanceof ParallelInstructionNode || node instanceof InstructionOutputNode) { continue; } throw new IllegalArgumentException( String.format("Network contains unknown type of node: %s", input)); } for (Node node : input.nodes()) { if (node instanceof InstructionOutputNode) { continue; } for (Node successor : input.successors(node)) { for (Edge edge : input.edgesConnecting(node, successor)) { if (edge instanceof DefaultEdge) { input.removeEdge(edge); input.addEdge( node, successor, MultiOutputInfoEdge.create(new MultiOutputInfo().setTag(idGenerator.getId()))); } } } } RunnerApi.Components.Builder componentsBuilder = RunnerApi.Components.newBuilder(); componentsBuilder.mergeFrom(this.pipeline.getComponents()); if (pipeline.getComponents().getEnvironmentsMap().isEmpty()) { String envId = Environments.JAVA_SDK_HARNESS_ENVIRONMENT.getUrn() + idGenerator.getId(); componentsBuilder.putEnvironments(envId, Environments.JAVA_SDK_HARNESS_ENVIRONMENT); } String fakeWindowingStrategyId = "fakeWindowingStrategy" + idGenerator.getId(); SdkComponents sdkComponents = SdkComponents.create(pipeline.getComponents()); try { RunnerApi.MessageWithComponents fakeWindowingStrategyProto = WindowingStrategyTranslation.toMessageProto( WindowingStrategy.globalDefault(), sdkComponents); componentsBuilder.putWindowingStrategies( fakeWindowingStrategyId, fakeWindowingStrategyProto.getWindowingStrategy()); componentsBuilder.putAllCoders(fakeWindowingStrategyProto.getComponents().getCodersMap()); componentsBuilder.putAllEnvironments( fakeWindowingStrategyProto.getComponents().getEnvironmentsMap()); } catch (IOException exc) { throw new RuntimeException("Could not convert default windowing stratey to proto", exc); } Map<Node, String> nodesToPCollections = new HashMap<>(); ImmutableMap.Builder<String, NameContext> ptransformIdToNameContexts = ImmutableMap.builder(); Set<PCollectionNode> executableStageOutputs = new HashSet<>(); Set<PCollectionNode> executableStageInputs = new HashSet<>(); for (InstructionOutputNode node : Iterables.filter(input.nodes(), InstructionOutputNode.class)) { InstructionOutput instructionOutput = node.getInstructionOutput(); if (isExecutableStageInputPCollection(input, node) || isExecutableStageOutputPCollection(input, node)) { Coder<?> javaCoder = CloudObjects.coderFromCloudObject(CloudObject.fromSpec(instructionOutput.getCodec())); if (FullWindowedValueCoder.class.isInstance(javaCoder)) { FullWindowedValueCoder<?> windowedValueCoder = (FullWindowedValueCoder<?>) javaCoder; Coder<?> windowCoder = windowedValueCoder.getWindowCoder(); if (IntervalWindowCoder.class.isInstance(windowCoder)) { fakeWindowingStrategyId = "generatedFixedWindowingStrategy" + idGenerator.getId(); try { RunnerApi.MessageWithComponents windowingStrategyProto = WindowingStrategyTranslation.toMessageProto( WindowingStrategy.of(FixedWindows.of(Duration.standardSeconds(1))), sdkComponents); componentsBuilder.putWindowingStrategies( fakeWindowingStrategyId, windowingStrategyProto.getWindowingStrategy()); componentsBuilder.putAllCoders(windowingStrategyProto.getComponents().getCodersMap()); componentsBuilder.putAllEnvironments( windowingStrategyProto.getComponents().getEnvironmentsMap()); } catch (IOException exc) { throw new RuntimeException("Could not convert FixedWindow stratey to proto", exc); } } } } String coderId = "generatedCoder" + idGenerator.getId(); try (ByteString.Output output = ByteString.newOutput()) { try { Coder<?> javaCoder = CloudObjects.coderFromCloudObject(CloudObject.fromSpec(instructionOutput.getCodec())); Coder<?> elementCoder = ((WindowedValueCoder<?>) javaCoder).getValueCoder(); sdkComponents.registerCoder(elementCoder); RunnerApi.Coder coderProto = CoderTranslation.toProto(elementCoder, sdkComponents); componentsBuilder.putCoders(coderId, coderProto); } catch (IOException e) { throw new IllegalArgumentException( String.format( "Unable to encode coder %s for output %s", instructionOutput.getCodec(), instructionOutput), e); } catch (Exception e) { OBJECT_MAPPER.writeValue(output, instructionOutput.getCodec()); componentsBuilder.putCoders( coderId, RunnerApi.Coder.newBuilder() .setSpec( RunnerApi.SdkFunctionSpec.newBuilder() .setSpec( RunnerApi.FunctionSpec.newBuilder() .setPayload(output.toByteString()))) .build()); } } catch (IOException e) { throw new IllegalArgumentException( String.format( "Unable to encode coder %s for output %s", instructionOutput.getCodec(), instructionOutput), e); } String pcollectionId = node.getPcollectionId(); RunnerApi.PCollection pCollection = RunnerApi.PCollection.newBuilder() .setCoderId(coderId) .setWindowingStrategyId(fakeWindowingStrategyId) .build(); nodesToPCollections.put(node, pcollectionId); componentsBuilder.putPcollections(pcollectionId, pCollection); if (isExecutableStageOutputPCollection(input, node)) { executableStageOutputs.add(PipelineNode.pCollection(pcollectionId, pCollection)); } if (isExecutableStageInputPCollection(input, node)) { executableStageInputs.add(PipelineNode.pCollection(pcollectionId, pCollection)); } } componentsBuilder.putAllCoders(sdkComponents.toComponents().getCodersMap()); Set<PTransformNode> executableStageTransforms = new HashSet<>(); for (ParallelInstructionNode node : Iterables.filter(input.nodes(), ParallelInstructionNode.class)) { ParallelInstruction parallelInstruction = node.getParallelInstruction(); String ptransformId = "generatedPtransform" + idGenerator.getId(); ptransformIdToNameContexts.put( ptransformId, NameContext.create( null, parallelInstruction.getOriginalName(), parallelInstruction.getSystemName(), parallelInstruction.getName())); RunnerApi.PTransform.Builder pTransform = RunnerApi.PTransform.newBuilder(); RunnerApi.FunctionSpec.Builder transformSpec = RunnerApi.FunctionSpec.newBuilder(); if (parallelInstruction.getParDo() != null) { ParDoInstruction parDoInstruction = parallelInstruction.getParDo(); CloudObject userFnSpec = CloudObject.fromSpec(parDoInstruction.getUserFn()); String userFnClassName = userFnSpec.getClassName(); if (userFnClassName.equals("CombineValuesFn") || userFnClassName.equals("KeyedCombineFn")) { transformSpec = transformCombineValuesFnToFunctionSpec(userFnSpec); } else { String parDoPTransformId = getString(userFnSpec, PropertyNames.SERIALIZED_FN); RunnerApi.PTransform parDoPTransform = pipeline == null ? null : pipeline.getComponents().getTransformsOrDefault(parDoPTransformId, null); if (parDoPTransform != null) { checkArgument( parDoPTransform .getSpec() .getUrn() .equals(PTransformTranslation.PAR_DO_TRANSFORM_URN), "Found transform \"%s\" for ParallelDo instruction, " + " but that transform had unexpected URN \"%s\" (expected \"%s\")", parDoPTransformId, parDoPTransform.getSpec().getUrn(), PTransformTranslation.PAR_DO_TRANSFORM_URN); RunnerApi.ParDoPayload parDoPayload; try { parDoPayload = RunnerApi.ParDoPayload.parseFrom(parDoPTransform.getSpec().getPayload()); } catch (InvalidProtocolBufferException exc) { throw new RuntimeException("ParDo did not have a ParDoPayload", exc); } transformSpec .setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN) .setPayload(parDoPayload.toByteString()); } else { byte[] userFnBytes = getBytes(userFnSpec, PropertyNames.SERIALIZED_FN); transformSpec .setUrn(ParDoTranslation.CUSTOM_JAVA_DO_FN_URN) .setPayload(ByteString.copyFrom(userFnBytes)); } } } else if (parallelInstruction.getRead() != null) { ReadInstruction readInstruction = parallelInstruction.getRead(); CloudObject sourceSpec = CloudObject.fromSpec( CloudSourceUtils.flattenBaseSpecs(readInstruction.getSource()).getSpec()); transformSpec.setUrn(JAVA_SOURCE_URN); try { byte[] serializedSource = Base64.getDecoder().decode(getString(sourceSpec, SERIALIZED_SOURCE)); ByteString sourceByteString = ByteString.copyFrom(serializedSource); transformSpec.setPayload(sourceByteString); } catch (Exception e) { throw new IllegalArgumentException( String.format("Unable to process Read %s", parallelInstruction), e); } } else if (parallelInstruction.getFlatten() != null) { transformSpec.setUrn(PTransformTranslation.FLATTEN_TRANSFORM_URN); } else { throw new IllegalArgumentException( String.format("Unknown type of ParallelInstruction %s", parallelInstruction)); } for (Node predecessorOutput : input.predecessors(node)) { pTransform.putInputs( "generatedInput" + idGenerator.getId(), nodesToPCollections.get(predecessorOutput)); } for (Edge edge : input.outEdges(node)) { Node nodeOutput = input.incidentNodes(edge).target(); MultiOutputInfoEdge edge2 = (MultiOutputInfoEdge) edge; pTransform.putOutputs( edge2.getMultiOutputInfo().getTag(), nodesToPCollections.get(nodeOutput)); } pTransform.setSpec(transformSpec); executableStageTransforms.add(PipelineNode.pTransform(ptransformId, pTransform.build())); } if (executableStageInputs.size() != 1) { throw new UnsupportedOperationException("ExecutableStage only support one input PCollection"); } PCollectionNode executableInput = executableStageInputs.iterator().next(); RunnerApi.Components executableStageComponents = componentsBuilder.build(); Environment executableStageEnv = getEnvironmentFromPTransform(executableStageComponents, executableStageTransforms); if (executableStageEnv == null) { executableStageEnv = Environments.JAVA_SDK_HARNESS_ENVIRONMENT; } Set<SideInputReference> executableStageSideInputs = new HashSet<>(); Set<TimerReference> executableStageTimers = new HashSet<>(); Set<UserStateReference> executableStageUserStateReference = new HashSet<>(); ExecutableStage executableStage = ImmutableExecutableStage.ofFullComponents( executableStageComponents, executableStageEnv, executableInput, executableStageSideInputs, executableStageUserStateReference, executableStageTimers, executableStageTransforms, executableStageOutputs); return ExecutableStageNode.create(executableStage, ptransformIdToNameContexts.build()); }
public Node apply(MutableNetwork<Node, Edge> input) { for (Node node : input.nodes()) { if (node instanceof RemoteGrpcPortNode || node instanceof ParallelInstructionNode || node instanceof InstructionOutputNode) { continue; } throw new IllegalArgumentException( String.format("Network contains unknown type of node: %s", input)); } for (Node node : input.nodes()) { if (node instanceof InstructionOutputNode) { continue; } for (Node successor : input.successors(node)) { for (Edge edge : input.edgesConnecting(node, successor)) { if (edge instanceof DefaultEdge) { input.removeEdge(edge); input.addEdge( node, successor, MultiOutputInfoEdge.create(new MultiOutputInfo().setTag(idGenerator.getId()))); } } } } RunnerApi.Components.Builder componentsBuilder = RunnerApi.Components.newBuilder(); componentsBuilder.mergeFrom(this.pipeline.getComponents()); if (pipeline.getComponents().getEnvironmentsMap().isEmpty()) { String envId = Environments.JAVA_SDK_HARNESS_ENVIRONMENT.getUrn() + idGenerator.getId(); componentsBuilder.putEnvironments(envId, Environments.JAVA_SDK_HARNESS_ENVIRONMENT); } String globalWindowingStrategyId = "generatedGlobalWindowingStrategy" + idGenerator.getId(); String intervalWindowEncodingWindowingStrategyId = "generatedIntervalWindowEncodingWindowingStrategy" + idGenerator.getId(); SdkComponents sdkComponents = SdkComponents.create(pipeline.getComponents()); try { registerWindowingStrategy( globalWindowingStrategyId, WindowingStrategy.globalDefault(), componentsBuilder, sdkComponents); registerWindowingStrategy( intervalWindowEncodingWindowingStrategyId, WindowingStrategy.of(FixedWindows.of(Duration.standardSeconds(1))), componentsBuilder, sdkComponents); } catch (IOException exc) { throw new RuntimeException("Could not convert default windowing stratey to proto", exc); } Map<Node, String> nodesToPCollections = new HashMap<>(); ImmutableMap.Builder<String, NameContext> ptransformIdToNameContexts = ImmutableMap.builder(); Set<PCollectionNode> executableStageOutputs = new HashSet<>(); Set<PCollectionNode> executableStageInputs = new HashSet<>(); for (InstructionOutputNode node : Iterables.filter(input.nodes(), InstructionOutputNode.class)) { InstructionOutput instructionOutput = node.getInstructionOutput(); String coderId = "generatedCoder" + idGenerator.getId(); String windowingStrategyId; try (ByteString.Output output = ByteString.newOutput()) { try { Coder<?> javaCoder = CloudObjects.coderFromCloudObject(CloudObject.fromSpec(instructionOutput.getCodec())); Coder<?> elementCoder = ((WindowedValueCoder<?>) javaCoder).getValueCoder(); sdkComponents.registerCoder(elementCoder); RunnerApi.Coder coderProto = CoderTranslation.toProto(elementCoder, sdkComponents); componentsBuilder.putCoders(coderId, coderProto); if (javaCoder instanceof FullWindowedValueCoder) { FullWindowedValueCoder<?> windowedValueCoder = (FullWindowedValueCoder<?>) javaCoder; Coder<?> windowCoder = windowedValueCoder.getWindowCoder(); if (windowCoder instanceof IntervalWindowCoder) { windowingStrategyId = intervalWindowEncodingWindowingStrategyId; } else if (windowCoder instanceof GlobalWindow.Coder) { windowingStrategyId = globalWindowingStrategyId; } else { throw new UnsupportedOperationException( String.format( "Dataflow portable runner harness doesn't support windowing with %s", windowCoder)); } } else { throw new UnsupportedOperationException( "Dataflow portable runner harness only supports FullWindowedValueCoder"); } } catch (IOException e) { throw new IllegalArgumentException( String.format( "Unable to encode coder %s for output %s", instructionOutput.getCodec(), instructionOutput), e); } catch (Exception e) { OBJECT_MAPPER.writeValue(output, instructionOutput.getCodec()); componentsBuilder.putCoders( coderId, RunnerApi.Coder.newBuilder() .setSpec( RunnerApi.SdkFunctionSpec.newBuilder() .setSpec( RunnerApi.FunctionSpec.newBuilder() .setPayload(output.toByteString()))) .build()); windowingStrategyId = globalWindowingStrategyId; } } catch (IOException e) { throw new IllegalArgumentException( String.format( "Unable to encode coder %s for output %s", instructionOutput.getCodec(), instructionOutput), e); } String pcollectionId = node.getPcollectionId(); RunnerApi.PCollection pCollection = RunnerApi.PCollection.newBuilder() .setCoderId(coderId) .setWindowingStrategyId(windowingStrategyId) .build(); nodesToPCollections.put(node, pcollectionId); componentsBuilder.putPcollections(pcollectionId, pCollection); if (isExecutableStageOutputPCollection(input, node)) { executableStageOutputs.add(PipelineNode.pCollection(pcollectionId, pCollection)); } if (isExecutableStageInputPCollection(input, node)) { executableStageInputs.add(PipelineNode.pCollection(pcollectionId, pCollection)); } } componentsBuilder.putAllCoders(sdkComponents.toComponents().getCodersMap()); Set<PTransformNode> executableStageTransforms = new HashSet<>(); for (ParallelInstructionNode node : Iterables.filter(input.nodes(), ParallelInstructionNode.class)) { ParallelInstruction parallelInstruction = node.getParallelInstruction(); String ptransformId = "generatedPtransform" + idGenerator.getId(); ptransformIdToNameContexts.put( ptransformId, NameContext.create( null, parallelInstruction.getOriginalName(), parallelInstruction.getSystemName(), parallelInstruction.getName())); RunnerApi.PTransform.Builder pTransform = RunnerApi.PTransform.newBuilder(); RunnerApi.FunctionSpec.Builder transformSpec = RunnerApi.FunctionSpec.newBuilder(); if (parallelInstruction.getParDo() != null) { ParDoInstruction parDoInstruction = parallelInstruction.getParDo(); CloudObject userFnSpec = CloudObject.fromSpec(parDoInstruction.getUserFn()); String userFnClassName = userFnSpec.getClassName(); if (userFnClassName.equals("CombineValuesFn") || userFnClassName.equals("KeyedCombineFn")) { transformSpec = transformCombineValuesFnToFunctionSpec(userFnSpec); } else { String parDoPTransformId = getString(userFnSpec, PropertyNames.SERIALIZED_FN); RunnerApi.PTransform parDoPTransform = pipeline == null ? null : pipeline.getComponents().getTransformsOrDefault(parDoPTransformId, null); if (parDoPTransform != null) { checkArgument( parDoPTransform .getSpec() .getUrn() .equals(PTransformTranslation.PAR_DO_TRANSFORM_URN), "Found transform \"%s\" for ParallelDo instruction, " + " but that transform had unexpected URN \"%s\" (expected \"%s\")", parDoPTransformId, parDoPTransform.getSpec().getUrn(), PTransformTranslation.PAR_DO_TRANSFORM_URN); RunnerApi.ParDoPayload parDoPayload; try { parDoPayload = RunnerApi.ParDoPayload.parseFrom(parDoPTransform.getSpec().getPayload()); } catch (InvalidProtocolBufferException exc) { throw new RuntimeException("ParDo did not have a ParDoPayload", exc); } transformSpec .setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN) .setPayload(parDoPayload.toByteString()); } else { byte[] userFnBytes = getBytes(userFnSpec, PropertyNames.SERIALIZED_FN); transformSpec .setUrn(ParDoTranslation.CUSTOM_JAVA_DO_FN_URN) .setPayload(ByteString.copyFrom(userFnBytes)); } } } else if (parallelInstruction.getRead() != null) { ReadInstruction readInstruction = parallelInstruction.getRead(); CloudObject sourceSpec = CloudObject.fromSpec( CloudSourceUtils.flattenBaseSpecs(readInstruction.getSource()).getSpec()); transformSpec.setUrn(JAVA_SOURCE_URN); try { byte[] serializedSource = Base64.getDecoder().decode(getString(sourceSpec, SERIALIZED_SOURCE)); ByteString sourceByteString = ByteString.copyFrom(serializedSource); transformSpec.setPayload(sourceByteString); } catch (Exception e) { throw new IllegalArgumentException( String.format("Unable to process Read %s", parallelInstruction), e); } } else if (parallelInstruction.getFlatten() != null) { transformSpec.setUrn(PTransformTranslation.FLATTEN_TRANSFORM_URN); } else { throw new IllegalArgumentException( String.format("Unknown type of ParallelInstruction %s", parallelInstruction)); } for (Node predecessorOutput : input.predecessors(node)) { pTransform.putInputs( "generatedInput" + idGenerator.getId(), nodesToPCollections.get(predecessorOutput)); } for (Edge edge : input.outEdges(node)) { Node nodeOutput = input.incidentNodes(edge).target(); MultiOutputInfoEdge edge2 = (MultiOutputInfoEdge) edge; pTransform.putOutputs( edge2.getMultiOutputInfo().getTag(), nodesToPCollections.get(nodeOutput)); } pTransform.setSpec(transformSpec); executableStageTransforms.add(PipelineNode.pTransform(ptransformId, pTransform.build())); } if (executableStageInputs.size() != 1) { throw new UnsupportedOperationException("ExecutableStage only support one input PCollection"); } PCollectionNode executableInput = executableStageInputs.iterator().next(); RunnerApi.Components executableStageComponents = componentsBuilder.build(); Environment executableStageEnv = getEnvironmentFromPTransform(executableStageComponents, executableStageTransforms); if (executableStageEnv == null) { executableStageEnv = Environments.JAVA_SDK_HARNESS_ENVIRONMENT; } Set<SideInputReference> executableStageSideInputs = new HashSet<>(); Set<TimerReference> executableStageTimers = new HashSet<>(); Set<UserStateReference> executableStageUserStateReference = new HashSet<>(); ExecutableStage executableStage = ImmutableExecutableStage.ofFullComponents( executableStageComponents, executableStageEnv, executableInput, executableStageSideInputs, executableStageUserStateReference, executableStageTimers, executableStageTransforms, executableStageOutputs); return ExecutableStageNode.create(executableStage, ptransformIdToNameContexts.build()); }
class CreateExecutableStageNodeFunction implements Function<MutableNetwork<Node, Edge>, Node> { private static final String DATA_INPUT_URN = "urn:org.apache.beam:source:runner:0.1"; private static final String DATA_OUTPUT_URN = "urn:org.apache.beam:sink:runner:0.1"; private static final String JAVA_SOURCE_URN = "urn:org.apache.beam:source:java:0.1"; public static final String COMBINE_PER_KEY_URN = BeamUrns.getUrn(StandardPTransforms.Composites.COMBINE_PER_KEY); public static final String COMBINE_PRECOMBINE_URN = BeamUrns.getUrn(StandardPTransforms.CombineComponents.COMBINE_PER_KEY_PRECOMBINE); public static final String COMBINE_MERGE_URN = BeamUrns.getUrn(StandardPTransforms.CombineComponents.COMBINE_PER_KEY_MERGE_ACCUMULATORS); public static final String COMBINE_EXTRACT_URN = BeamUrns.getUrn(StandardPTransforms.CombineComponents.COMBINE_PER_KEY_EXTRACT_OUTPUTS); public static final String COMBINE_GROUPED_VALUES_URN = BeamUrns.getUrn(StandardPTransforms.CombineComponents.COMBINE_GROUPED_VALUES); private static final String SERIALIZED_SOURCE = "serialized_source"; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private final IdGenerator idGenerator; private final @Nullable RunnerApi.Pipeline pipeline; public CreateExecutableStageNodeFunction(RunnerApi.Pipeline pipeline, IdGenerator idGenerator) { this.pipeline = pipeline; this.idGenerator = idGenerator; } @Override private Environment getEnvironmentFromPTransform( RunnerApi.Components components, Set<PTransformNode> sdkTransforms) { RehydratedComponents sdkComponents = RehydratedComponents.forComponents(components); Environment env = null; for (PTransformNode pTransformNode : sdkTransforms) { env = Environments.getEnvironment(pTransformNode.getTransform(), sdkComponents).orElse(null); if (env != null) { break; } } return env; } /** * Transforms a CombineValuesFn {@link ParDoInstruction} to an Apache Beam {@link * RunnerApi.FunctionSpec}. */ private RunnerApi.FunctionSpec.Builder transformCombineValuesFnToFunctionSpec( CloudObject userFn) { String combinePTransformId = getString(userFn, PropertyNames.SERIALIZED_FN); RunnerApi.PTransform combinePerKeyPTransform = pipeline.getComponents().getTransformsOrDefault(combinePTransformId, null); checkArgument( combinePerKeyPTransform != null, "Transform with id \"%s\" not found in pipeline.", combinePTransformId); checkArgument( combinePerKeyPTransform.getSpec().getUrn().equals(COMBINE_PER_KEY_URN), "Found transform \"%s\" for Combine instruction, " + "but that transform had unexpected URN \"%s\" (expected \"%s\")", combinePerKeyPTransform, combinePerKeyPTransform.getSpec().getUrn(), COMBINE_PER_KEY_URN); RunnerApi.CombinePayload combinePayload; try { combinePayload = RunnerApi.CombinePayload.parseFrom(combinePerKeyPTransform.getSpec().getPayload()); } catch (InvalidProtocolBufferException exc) { throw new RuntimeException("Combine did not have a CombinePayload", exc); } String phase = getString(userFn, WorkerPropertyNames.PHASE, CombinePhase.ALL); String urn; switch (phase) { case CombinePhase.ALL: urn = COMBINE_GROUPED_VALUES_URN; break; case CombinePhase.ADD: urn = COMBINE_PRECOMBINE_URN; break; case CombinePhase.MERGE: urn = COMBINE_MERGE_URN; break; case CombinePhase.EXTRACT: urn = COMBINE_EXTRACT_URN; break; default: throw new RuntimeException("Encountered unknown Combine Phase: " + phase); } return RunnerApi.FunctionSpec.newBuilder() .setUrn(urn) .setPayload(combinePayload.toByteString()); } private boolean isExecutableStageInputPCollection( MutableNetwork<Node, Edge> input, InstructionOutputNode node) { return input.predecessors(node).stream().anyMatch(RemoteGrpcPortNode.class::isInstance); } private boolean isExecutableStageOutputPCollection( MutableNetwork<Node, Edge> input, InstructionOutputNode node) { return input.successors(node).stream().anyMatch(RemoteGrpcPortNode.class::isInstance); } }
class CreateExecutableStageNodeFunction implements Function<MutableNetwork<Node, Edge>, Node> { private static final String DATA_INPUT_URN = "urn:org.apache.beam:source:runner:0.1"; private static final String DATA_OUTPUT_URN = "urn:org.apache.beam:sink:runner:0.1"; private static final String JAVA_SOURCE_URN = "urn:org.apache.beam:source:java:0.1"; public static final String COMBINE_PER_KEY_URN = BeamUrns.getUrn(StandardPTransforms.Composites.COMBINE_PER_KEY); public static final String COMBINE_PRECOMBINE_URN = BeamUrns.getUrn(StandardPTransforms.CombineComponents.COMBINE_PER_KEY_PRECOMBINE); public static final String COMBINE_MERGE_URN = BeamUrns.getUrn(StandardPTransforms.CombineComponents.COMBINE_PER_KEY_MERGE_ACCUMULATORS); public static final String COMBINE_EXTRACT_URN = BeamUrns.getUrn(StandardPTransforms.CombineComponents.COMBINE_PER_KEY_EXTRACT_OUTPUTS); public static final String COMBINE_GROUPED_VALUES_URN = BeamUrns.getUrn(StandardPTransforms.CombineComponents.COMBINE_GROUPED_VALUES); private static final String SERIALIZED_SOURCE = "serialized_source"; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private final IdGenerator idGenerator; private final @Nullable RunnerApi.Pipeline pipeline; public CreateExecutableStageNodeFunction(RunnerApi.Pipeline pipeline, IdGenerator idGenerator) { this.pipeline = pipeline; this.idGenerator = idGenerator; } @Override private Environment getEnvironmentFromPTransform( RunnerApi.Components components, Set<PTransformNode> sdkTransforms) { RehydratedComponents sdkComponents = RehydratedComponents.forComponents(components); Environment env = null; for (PTransformNode pTransformNode : sdkTransforms) { env = Environments.getEnvironment(pTransformNode.getTransform(), sdkComponents).orElse(null); if (env != null) { break; } } return env; } /** * Transforms a CombineValuesFn {@link ParDoInstruction} to an Apache Beam {@link * RunnerApi.FunctionSpec}. */ private RunnerApi.FunctionSpec.Builder transformCombineValuesFnToFunctionSpec( CloudObject userFn) { String combinePTransformId = getString(userFn, PropertyNames.SERIALIZED_FN); RunnerApi.PTransform combinePerKeyPTransform = pipeline.getComponents().getTransformsOrDefault(combinePTransformId, null); checkArgument( combinePerKeyPTransform != null, "Transform with id \"%s\" not found in pipeline.", combinePTransformId); checkArgument( combinePerKeyPTransform.getSpec().getUrn().equals(COMBINE_PER_KEY_URN), "Found transform \"%s\" for Combine instruction, " + "but that transform had unexpected URN \"%s\" (expected \"%s\")", combinePerKeyPTransform, combinePerKeyPTransform.getSpec().getUrn(), COMBINE_PER_KEY_URN); RunnerApi.CombinePayload combinePayload; try { combinePayload = RunnerApi.CombinePayload.parseFrom(combinePerKeyPTransform.getSpec().getPayload()); } catch (InvalidProtocolBufferException exc) { throw new RuntimeException("Combine did not have a CombinePayload", exc); } String phase = getString(userFn, WorkerPropertyNames.PHASE, CombinePhase.ALL); String urn; switch (phase) { case CombinePhase.ALL: urn = COMBINE_GROUPED_VALUES_URN; break; case CombinePhase.ADD: urn = COMBINE_PRECOMBINE_URN; break; case CombinePhase.MERGE: urn = COMBINE_MERGE_URN; break; case CombinePhase.EXTRACT: urn = COMBINE_EXTRACT_URN; break; default: throw new RuntimeException("Encountered unknown Combine Phase: " + phase); } return RunnerApi.FunctionSpec.newBuilder() .setUrn(urn) .setPayload(combinePayload.toByteString()); } private boolean isExecutableStageInputPCollection( MutableNetwork<Node, Edge> input, InstructionOutputNode node) { return input.predecessors(node).stream().anyMatch(RemoteGrpcPortNode.class::isInstance); } private boolean isExecutableStageOutputPCollection( MutableNetwork<Node, Edge> input, InstructionOutputNode node) { return input.successors(node).stream().anyMatch(RemoteGrpcPortNode.class::isInstance); } private void registerWindowingStrategy( String windowingStrategyId, WindowingStrategy<?, ?> windowingStrategy, RunnerApi.Components.Builder componentsBuilder, SdkComponents sdkComponents) throws IOException { RunnerApi.MessageWithComponents fakeWindowingStrategyProto = WindowingStrategyTranslation.toMessageProto(windowingStrategy, sdkComponents); componentsBuilder.putWindowingStrategies( windowingStrategyId, fakeWindowingStrategyProto.getWindowingStrategy()); componentsBuilder.putAllCoders(fakeWindowingStrategyProto.getComponents().getCodersMap()); componentsBuilder.putAllEnvironments( fakeWindowingStrategyProto.getComponents().getEnvironmentsMap()); } }
@sd-f This null is taken care of a bit later [here](https://github.com/quarkusio/quarkus/blob/351778d3ce0aa8a44cc45328ebdc44c293d2f1bb/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/OidcClientRecorder.java#L119). We can throw the exception now but then we'd have to refactor the uni code catching it, so if you don't mind then lets keep retuning null here :-)
private static Uni<String> discoverTokenEndpoint(WebClient client, String authServerUrl) { String discoveryUrl = authServerUrl + "/.well-known/openid-configuration"; return client.getAbs(discoveryUrl).send().onItem().transform(resp -> { if (resp.statusCode() == 200) { JsonObject json = resp.bodyAsJsonObject(); return json.getString("token_endpoint"); } else { LOG.tracef("Discovery has failed, status code: %d", resp.statusCode()); throw new OidcClientException("Token endpoint discovery has failed"); } }); }
throw new OidcClientException("Token endpoint discovery has failed");
private static Uni<String> discoverTokenEndpoint(WebClient client, String authServerUrl) { String discoveryUrl = authServerUrl + "/.well-known/openid-configuration"; return client.getAbs(discoveryUrl).send().onItem().transform(resp -> { if (resp.statusCode() == 200) { JsonObject json = resp.bodyAsJsonObject(); return json.getString("token_endpoint"); } else { LOG.tracef("Discovery has failed, status code: %d", resp.statusCode()); return null; } }); }
class OidcClientRecorder { private static final Logger LOG = Logger.getLogger(OidcClientRecorder.class); private static final String DEFAULT_OIDC_CLIENT_ID = "Default"; public OidcClients setup(OidcClientsConfig oidcClientsConfig, TlsConfig tlsConfig, Supplier<Vertx> vertx) { String defaultClientId = oidcClientsConfig.defaultClient.getId().orElse(DEFAULT_OIDC_CLIENT_ID); OidcClient defaultClient = createOidcClient(oidcClientsConfig.defaultClient, defaultClientId, tlsConfig, vertx); Map<String, OidcClient> staticOidcClients = new HashMap<>(); for (Map.Entry<String, OidcClientConfig> config : oidcClientsConfig.namedClients.entrySet()) { OidcCommonUtils.verifyConfigurationId(defaultClientId, config.getKey(), config.getValue().getId()); staticOidcClients.put(config.getKey(), createOidcClient(config.getValue(), config.getKey(), tlsConfig, vertx)); } return new OidcClientsImpl(defaultClient, staticOidcClients, new Function<OidcClientConfig, Uni<OidcClient>>() { @Override public Uni<OidcClient> apply(OidcClientConfig config) { return createOidcClientUni(config, config.getId().get(), tlsConfig, vertx); } }); } public Supplier<OidcClient> createOidcClientBean(OidcClients clients) { return new Supplier<OidcClient>() { @Override public OidcClient get() { return clients.getClient(); } }; } public Supplier<OidcClients> createOidcClientsBean(OidcClients clients) { return new Supplier<OidcClients>() { @Override public OidcClients get() { return clients; } }; } protected static OidcClient createOidcClient(OidcClientConfig oidcConfig, String oidcClientId, TlsConfig tlsConfig, Supplier<Vertx> vertx) { return createOidcClientUni(oidcConfig, oidcClientId, tlsConfig, vertx).await().indefinitely(); } protected static Uni<OidcClient> createOidcClientUni(OidcClientConfig oidcConfig, String oidcClientId, TlsConfig tlsConfig, Supplier<Vertx> vertx) { if (!oidcConfig.isClientEnabled()) { String message = String.format("'%s' client configuration is disabled", oidcClientId); LOG.debug(message); return Uni.createFrom().item(new DisabledOidcClient(message)); } if (!oidcConfig.getId().isPresent()) { oidcConfig.setId(oidcClientId); } OidcCommonUtils.verifyCommonConfiguration(oidcConfig); String authServerUriString = OidcCommonUtils.getAuthServerUrl(oidcConfig); WebClientOptions options = new WebClientOptions(); URI authServerUri = URI.create(authServerUriString); OidcCommonUtils.setHttpClientOptions(oidcConfig, tlsConfig, options); WebClient client = WebClient.create(new io.vertx.mutiny.core.Vertx(vertx.get()), options); Uni<String> tokenRequestUriUni = null; if (!oidcConfig.discoveryEnabled) { tokenRequestUriUni = Uni.createFrom() .item(OidcCommonUtils.getOidcEndpointUrl(authServerUri.toString(), oidcConfig.tokenPath)); } else { tokenRequestUriUni = discoverTokenRequestUri(client, authServerUri.toString(), oidcConfig); } return tokenRequestUriUni.onItem().transform(new Function<String, OidcClient>() { @Override public OidcClient apply(String tokenRequestUri) { if (tokenRequestUri == null) { throw new ConfigurationException( "OpenId Connect Provider token endpoint URL is not configured and can not be discovered"); } MultiMap tokenGrantParams = new MultiMap(io.vertx.core.MultiMap.caseInsensitiveMultiMap()); String grantType = oidcConfig.grant.getType() == Grant.Type.CLIENT ? OidcConstants.CLIENT_CREDENTIALS_GRANT : OidcConstants.PASSWORD_GRANT; setGrantClientParams(oidcConfig, tokenGrantParams, grantType); if (oidcConfig.grant.getType() == Grant.Type.PASSWORD) { Map<String, String> passwordGrantOptions = oidcConfig.getGrantOptions().get(OidcConstants.PASSWORD_GRANT); tokenGrantParams.add(OidcConstants.PASSWORD_GRANT_USERNAME, passwordGrantOptions.get(OidcConstants.PASSWORD_GRANT_USERNAME)); tokenGrantParams.add(OidcConstants.PASSWORD_GRANT_PASSWORD, passwordGrantOptions.get(OidcConstants.PASSWORD_GRANT_PASSWORD)); } MultiMap commonRefreshGrantParams = new MultiMap(io.vertx.core.MultiMap.caseInsensitiveMultiMap()); setGrantClientParams(oidcConfig, commonRefreshGrantParams, OidcConstants.REFRESH_TOKEN_GRANT); return new OidcClientImpl(client, tokenRequestUri, grantType, tokenGrantParams, commonRefreshGrantParams, oidcConfig); } }); } private static void setGrantClientParams(OidcClientConfig oidcConfig, MultiMap grantParams, String grantType) { grantParams.add(OidcConstants.GRANT_TYPE, grantType); Credentials creds = oidcConfig.getCredentials(); if (OidcCommonUtils.isClientSecretPostAuthRequired(creds)) { grantParams.add(OidcConstants.CLIENT_ID, oidcConfig.clientId.get()); grantParams.add(OidcConstants.CLIENT_SECRET, OidcCommonUtils.clientSecret(creds)); } if (oidcConfig.getScopes().isPresent()) { grantParams.add(OidcConstants.TOKEN_SCOPE, oidcConfig.getScopes().get().stream().collect(Collectors.joining(" "))); } } private static Uni<String> discoverTokenRequestUri(WebClient client, String authServerUrl, OidcClientConfig oidcConfig) { final long connectionRetryCount = OidcCommonUtils.getConnectionRetryCount(oidcConfig); if (connectionRetryCount > 1) { LOG.infof("Connecting to IDP for up to %d times every 2 seconds", connectionRetryCount); } for (long i = 0; i < connectionRetryCount; i++) { try { if (oidcConfig.discoveryEnabled) { return discoverTokenEndpoint(client, authServerUrl); } break; } catch (Throwable throwable) { while (throwable instanceof CompletionException && throwable.getCause() != null) { throwable = throwable.getCause(); } if (throwable instanceof OidcClientException) { if (i + 1 < connectionRetryCount) { try { Thread.sleep(2000); } catch (InterruptedException iex) { } } else { throw (OidcClientException) throwable; } } else { throw new OidcClientException(throwable); } } } return Uni.createFrom().nullItem(); } protected static OidcClientException toOidcClientException(String authServerUrlString, Throwable cause) { return new OidcClientException(OidcCommonUtils.formatConnectionErrorMessage(authServerUrlString), cause); } private static class DisabledOidcClient implements OidcClient { String message; DisabledOidcClient(String message) { this.message = message; } @Override public Uni<Tokens> getTokens() { throw new OidcClientException(message); } @Override public Uni<Tokens> refreshTokens(String refreshToken) { throw new OidcClientException(message); } @Override public void close() throws IOException { throw new OidcClientException(message); } } }
class OidcClientRecorder { private static final Logger LOG = Logger.getLogger(OidcClientRecorder.class); private static final String DEFAULT_OIDC_CLIENT_ID = "Default"; public OidcClients setup(OidcClientsConfig oidcClientsConfig, TlsConfig tlsConfig, Supplier<Vertx> vertx) { String defaultClientId = oidcClientsConfig.defaultClient.getId().orElse(DEFAULT_OIDC_CLIENT_ID); OidcClient defaultClient = createOidcClient(oidcClientsConfig.defaultClient, defaultClientId, tlsConfig, vertx); Map<String, OidcClient> staticOidcClients = new HashMap<>(); for (Map.Entry<String, OidcClientConfig> config : oidcClientsConfig.namedClients.entrySet()) { OidcCommonUtils.verifyConfigurationId(defaultClientId, config.getKey(), config.getValue().getId()); staticOidcClients.put(config.getKey(), createOidcClient(config.getValue(), config.getKey(), tlsConfig, vertx)); } return new OidcClientsImpl(defaultClient, staticOidcClients, new Function<OidcClientConfig, Uni<OidcClient>>() { @Override public Uni<OidcClient> apply(OidcClientConfig config) { return createOidcClientUni(config, config.getId().get(), tlsConfig, vertx); } }); } public Supplier<OidcClient> createOidcClientBean(OidcClients clients) { return new Supplier<OidcClient>() { @Override public OidcClient get() { return clients.getClient(); } }; } public Supplier<OidcClients> createOidcClientsBean(OidcClients clients) { return new Supplier<OidcClients>() { @Override public OidcClients get() { return clients; } }; } protected static OidcClient createOidcClient(OidcClientConfig oidcConfig, String oidcClientId, TlsConfig tlsConfig, Supplier<Vertx> vertx) { return createOidcClientUni(oidcConfig, oidcClientId, tlsConfig, vertx).await().indefinitely(); } protected static Uni<OidcClient> createOidcClientUni(OidcClientConfig oidcConfig, String oidcClientId, TlsConfig tlsConfig, Supplier<Vertx> vertx) { if (!oidcConfig.isClientEnabled()) { String message = String.format("'%s' client configuration is disabled", oidcClientId); LOG.debug(message); return Uni.createFrom().item(new DisabledOidcClient(message)); } if (!oidcConfig.getId().isPresent()) { oidcConfig.setId(oidcClientId); } OidcCommonUtils.verifyCommonConfiguration(oidcConfig); String authServerUriString = OidcCommonUtils.getAuthServerUrl(oidcConfig); WebClientOptions options = new WebClientOptions(); URI authServerUri = URI.create(authServerUriString); OidcCommonUtils.setHttpClientOptions(oidcConfig, tlsConfig, options); WebClient client = WebClient.create(new io.vertx.mutiny.core.Vertx(vertx.get()), options); Uni<String> tokenRequestUriUni = null; if (!oidcConfig.discoveryEnabled) { tokenRequestUriUni = Uni.createFrom() .item(OidcCommonUtils.getOidcEndpointUrl(authServerUri.toString(), oidcConfig.tokenPath)); } else { tokenRequestUriUni = discoverTokenRequestUri(client, authServerUri.toString(), oidcConfig); } return tokenRequestUriUni.onItem().transform(new Function<String, OidcClient>() { @Override public OidcClient apply(String tokenRequestUri) { if (tokenRequestUri == null) { throw new ConfigurationException( "OpenId Connect Provider token endpoint URL is not configured and can not be discovered"); } MultiMap tokenGrantParams = new MultiMap(io.vertx.core.MultiMap.caseInsensitiveMultiMap()); String grantType = oidcConfig.grant.getType() == Grant.Type.CLIENT ? OidcConstants.CLIENT_CREDENTIALS_GRANT : OidcConstants.PASSWORD_GRANT; setGrantClientParams(oidcConfig, tokenGrantParams, grantType); if (oidcConfig.grant.getType() == Grant.Type.PASSWORD) { Map<String, String> passwordGrantOptions = oidcConfig.getGrantOptions().get(OidcConstants.PASSWORD_GRANT); tokenGrantParams.add(OidcConstants.PASSWORD_GRANT_USERNAME, passwordGrantOptions.get(OidcConstants.PASSWORD_GRANT_USERNAME)); tokenGrantParams.add(OidcConstants.PASSWORD_GRANT_PASSWORD, passwordGrantOptions.get(OidcConstants.PASSWORD_GRANT_PASSWORD)); } MultiMap commonRefreshGrantParams = new MultiMap(io.vertx.core.MultiMap.caseInsensitiveMultiMap()); setGrantClientParams(oidcConfig, commonRefreshGrantParams, OidcConstants.REFRESH_TOKEN_GRANT); return new OidcClientImpl(client, tokenRequestUri, grantType, tokenGrantParams, commonRefreshGrantParams, oidcConfig); } }); } private static void setGrantClientParams(OidcClientConfig oidcConfig, MultiMap grantParams, String grantType) { grantParams.add(OidcConstants.GRANT_TYPE, grantType); Credentials creds = oidcConfig.getCredentials(); if (OidcCommonUtils.isClientSecretPostAuthRequired(creds)) { grantParams.add(OidcConstants.CLIENT_ID, oidcConfig.clientId.get()); grantParams.add(OidcConstants.CLIENT_SECRET, OidcCommonUtils.clientSecret(creds)); } if (oidcConfig.getScopes().isPresent()) { grantParams.add(OidcConstants.TOKEN_SCOPE, oidcConfig.getScopes().get().stream().collect(Collectors.joining(" "))); } } private static Uni<String> discoverTokenRequestUri(WebClient client, String authServerUrl, OidcClientConfig oidcConfig) { final long connectionRetryCount = OidcCommonUtils.getConnectionRetryCount(oidcConfig); if (connectionRetryCount > 1) { LOG.infof("Connecting to IDP for up to %d times every 2 seconds", connectionRetryCount); } for (long i = 0; i < connectionRetryCount; i++) { try { if (oidcConfig.discoveryEnabled) { return discoverTokenEndpoint(client, authServerUrl); } break; } catch (Throwable throwable) { while (throwable instanceof CompletionException && throwable.getCause() != null) { throwable = throwable.getCause(); } if (throwable instanceof OidcClientException) { if (i + 1 < connectionRetryCount) { try { Thread.sleep(2000); } catch (InterruptedException iex) { } } else { throw (OidcClientException) throwable; } } else { throw new OidcClientException(throwable); } } } return Uni.createFrom().nullItem(); } protected static OidcClientException toOidcClientException(String authServerUrlString, Throwable cause) { return new OidcClientException(OidcCommonUtils.formatConnectionErrorMessage(authServerUrlString), cause); } private static class DisabledOidcClient implements OidcClient { String message; DisabledOidcClient(String message) { this.message = message; } @Override public Uni<Tokens> getTokens() { throw new OidcClientException(message); } @Override public Uni<Tokens> refreshTokens(String refreshToken) { throw new OidcClientException(message); } @Override public void close() throws IOException { throw new OidcClientException(message); } } }
Thank you, I agree it doesn't hold a reference to a variable here. Implemented!
private void flush() { while (inFlightRequestsCount >= maxInFlightRequests) { mailboxExecutor.tryYield(); } List<RequestEntryT> batch = new ArrayList<>(maxBatchSize); int batchSize = Math.min(maxBatchSize, bufferedRequestEntries.size()); int batchSizeBytes = 0; for (int i = 0; i < batchSize; i++) { RequestEntryWrapper<RequestEntryT> elem = bufferedRequestEntries.remove(); batch.add(elem.getRequestEntry()); bufferedRequestEntriesTotalSizeInBytes -= elem.getSize(); batchSizeBytes += elem.getSize(); } if (batch.size() == 0) { return; } long timestampOfRequest = System.currentTimeMillis(); Consumer<Collection<RequestEntryT>> requestResult = failedRequestEntries -> mailboxExecutor.execute( () -> completeRequest(failedRequestEntries, timestampOfRequest), "Mark in-flight request as completed and requeue %d request entries", failedRequestEntries.size()); Consumer<Exception> fatalExceptionCons = exception -> mailboxExecutor.execute( () -> { throw exception; }, "A fatal exception occurred in the sink that cannot be recovered from or should not be retried."); inFlightRequestsCount++; submitRequestEntries(batch, requestResult, fatalExceptionCons); numRecordsOutCounter.inc(batchSize); numBytesOutCounter.inc(batchSizeBytes); }
Consumer<Exception> fatalExceptionCons =
private void flush() { while (inFlightRequestsCount >= maxInFlightRequests) { mailboxExecutor.tryYield(); } List<RequestEntryT> batch = new ArrayList<>(maxBatchSize); int batchSize = Math.min(maxBatchSize, bufferedRequestEntries.size()); int batchSizeBytes = 0; for (int i = 0; i < batchSize; i++) { RequestEntryWrapper<RequestEntryT> elem = bufferedRequestEntries.remove(); batch.add(elem.getRequestEntry()); bufferedRequestEntriesTotalSizeInBytes -= elem.getSize(); batchSizeBytes += elem.getSize(); } if (batch.size() == 0) { return; } long timestampOfRequest = System.currentTimeMillis(); Consumer<Collection<RequestEntryT>> requestResult = failedRequestEntries -> mailboxExecutor.execute( () -> completeRequest(failedRequestEntries, timestampOfRequest), "Mark in-flight request as completed and requeue %d request entries", failedRequestEntries.size()); inFlightRequestsCount++; submitRequestEntries(batch, requestResult); numRecordsOutCounter.inc(batchSize); numBytesOutCounter.inc(batchSizeBytes); }
class AsyncSinkWriter<InputT, RequestEntryT extends Serializable> implements SinkWriter<InputT, Void, Collection<RequestEntryT>> { private final MailboxExecutor mailboxExecutor; private final Sink.ProcessingTimeService timeService; /* The timestamp of the previous batch of records was sent from this sink. */ private long lastSendTimestamp = 0; /* The timestamp of the response to the previous request from this sink. */ private long ackTime = Long.MAX_VALUE; /* The sink writer metric group. */ private final SinkWriterMetricGroup metrics; /* Counter for number of bytes this sink has attempted to send to the destination. */ private final Counter numBytesOutCounter; /* Counter for number of records this sink has attempted to send to the destination. */ private final Counter numRecordsOutCounter; private final int maxBatchSize; private final int maxInFlightRequests; private final int maxBufferedRequests; private final long flushOnBufferSizeInBytes; private final long maxTimeInBufferMS; /** * The ElementConverter provides a mapping between for the elements of a stream to request * entries that can be sent to the destination. * * <p>The resulting request entry is buffered by the AsyncSinkWriter and sent to the destination * when the {@code submitRequestEntries} method is invoked. */ private final ElementConverter<InputT, RequestEntryT> elementConverter; /** * Buffer to hold request entries that should be persisted into the destination, along with its * size in bytes. * * <p>A request entry contain all relevant details to make a call to the destination. Eg, for * Kinesis Data Streams a request entry contains the payload and partition key. * * <p>It seems more natural to buffer InputT, ie, the events that should be persisted, rather * than RequestEntryT. However, in practice, the response of a failed request call can make it * very hard, if not impossible, to reconstruct the original event. It is much easier, to just * construct a new (retry) request entry from the response and add that back to the queue for * later retry. */ private final Deque<RequestEntryWrapper<RequestEntryT>> bufferedRequestEntries = new ArrayDeque<>(); /** * Tracks all pending async calls that have been executed since the last checkpoint. Calls that * completed (successfully or unsuccessfully) are automatically decrementing the counter. Any * request entry that was not successfully persisted needs to be handled and retried by the * logic in {@code submitRequestsToApi}. * * <p>There is a limit on the number of concurrent (async) requests that can be handled by the * client library. This limit is enforced by checking the queue size before accepting a new * element into the queue. * * <p>To complete a checkpoint, we need to make sure that no requests are in flight, as they may * fail, which could then lead to data loss. */ private int inFlightRequestsCount; /** * Tracks the cumulative size of all elements in {@code bufferedRequestEntries} to facilitate * the criterion for flushing after {@code flushOnBufferSizeInBytes} is reached. */ private double bufferedRequestEntriesTotalSizeInBytes; private boolean existsActiveTimerCallback = false; /** * This method specifies how to persist buffered request entries into the destination. It is * implemented when support for a new destination is added. * * <p>The method is invoked with a set of request entries according to the buffering hints (and * the valid limits of the destination). The logic then needs to create and execute the request * asynchronously against the destination (ideally by batching together multiple request entries * to increase efficiency). The logic also needs to identify individual request entries that * were not persisted successfully and resubmit them using the {@code requestResult} callback. * * <p>From a threading perspective, the mailbox thread will call this method and initiate the * asynchronous request to persist the {@code requestEntries}. NOTE: The client must support * asynchronous requests and the method called to persist the records must asynchronously * execute and return a future with the results of that request. A thread from the destination * client thread pool should complete the request and submit the failed entries that should be * retried. The {@code requestResult} will then trigger the mailbox thread to requeue the * unsuccessful elements. * * <p>An example implementation of this method is included: * * <pre>{@code * @Override * protected void submitRequestEntries * (List<RequestEntryT> records, Consumer<Collection<RequestEntryT>> requestResult) { * Future<Response> response = destinationClient.putRecords(records); * response.whenComplete( * (response, error) -> { * if(error){ * List<RequestEntryT> retryableFailedRecords = getRetryableFailed(response); * requestResult.accept(retryableFailedRecords); * }else{ * requestResult.accept(Collections.emptyList()); * } * } * ); * } * * }</pre> * * <p>During checkpointing, the sink needs to ensure that there are no outstanding in-flight * requests. * * @param requestEntries a set of request entries that should be sent to the destination * @param requestResult the {@code accept} method should be called on this Consumer once the * processing of the {@code requestEntries} are complete. Any entries that encountered * difficulties in persisting should be re-queued through {@code requestResult} by including * that element in the collection of {@code RequestEntryT}s passed to the {@code accept} * method. All other elements are assumed to have been successfully persisted. * @param fatalException the {@code accept} method should be called on this Consumer if the * processing of the {@code requestEntries} raises an exception that should not be retried. * Specifically, any action that we are sure will result in the same exception no matter how * many times we retry should raise a {@code RuntimeException} here. For example, wrong user * credentials. However, it is possible intermittent failures will recover, e.g. flaky * network connections, in which case, some other mechanism may be more appropriate. */ protected abstract void submitRequestEntries( List<RequestEntryT> requestEntries, Consumer<Collection<RequestEntryT>> requestResult, Consumer<Exception> fatalException); /** * This method allows the getting of the size of a {@code RequestEntryT} in bytes. The size in * this case is measured as the total bytes that is written to the destination as a result of * persisting this particular {@code RequestEntryT} rather than the serialized length (which may * be the same). * * @param requestEntry the requestEntry for which we want to know the size * @return the size of the requestEntry, as defined previously */ protected abstract long getSizeInBytes(RequestEntryT requestEntry); public AsyncSinkWriter( ElementConverter<InputT, RequestEntryT> elementConverter, Sink.InitContext context, int maxBatchSize, int maxInFlightRequests, int maxBufferedRequests, long flushOnBufferSizeInBytes, long maxTimeInBufferMS) { this.elementConverter = elementConverter; this.mailboxExecutor = context.getMailboxExecutor(); this.timeService = context.getProcessingTimeService(); Preconditions.checkNotNull(elementConverter); Preconditions.checkArgument(maxBatchSize > 0); Preconditions.checkArgument(maxBufferedRequests > 0); Preconditions.checkArgument(maxInFlightRequests > 0); Preconditions.checkArgument(flushOnBufferSizeInBytes > 0); Preconditions.checkArgument(maxTimeInBufferMS > 0); Preconditions.checkArgument( maxBufferedRequests > maxBatchSize, "The maximum number of requests that may be buffered should be strictly" + " greater than the maximum number of requests per batch."); this.maxBatchSize = maxBatchSize; this.maxInFlightRequests = maxInFlightRequests; this.maxBufferedRequests = maxBufferedRequests; this.flushOnBufferSizeInBytes = flushOnBufferSizeInBytes; this.maxTimeInBufferMS = maxTimeInBufferMS; this.inFlightRequestsCount = 0; this.bufferedRequestEntriesTotalSizeInBytes = 0; this.metrics = context.metricGroup(); this.metrics.setCurrentSendTimeGauge(() -> this.ackTime - this.lastSendTimestamp); this.numBytesOutCounter = this.metrics.getIOMetricGroup().getNumBytesOutCounter(); this.numRecordsOutCounter = this.metrics.getIOMetricGroup().getNumRecordsOutCounter(); } private void registerCallback() { Sink.ProcessingTimeService.ProcessingTimeCallback ptc = instant -> { existsActiveTimerCallback = false; while (!bufferedRequestEntries.isEmpty()) { flush(); } }; timeService.registerProcessingTimer( timeService.getCurrentProcessingTime() + maxTimeInBufferMS, ptc); existsActiveTimerCallback = true; } @Override public void write(InputT element, Context context) throws IOException, InterruptedException { while (bufferedRequestEntries.size() >= maxBufferedRequests) { mailboxExecutor.tryYield(); } addEntryToBuffer(elementConverter.apply(element, context), false); flushIfAble(); } private void flushIfAble() { while (bufferedRequestEntries.size() >= maxBatchSize || bufferedRequestEntriesTotalSizeInBytes >= flushOnBufferSizeInBytes) { flush(); } } /** * Persists buffered RequestsEntries into the destination by invoking {@code * submitRequestEntries} with batches according to the user specified buffering hints. * * <p>The method blocks if too many async requests are in flight. */ /** * Marks an in-flight request as completed and prepends failed requestEntries back to the * internal requestEntry buffer for later retry. * * @param failedRequestEntries requestEntries that need to be retried */ private void completeRequest( Collection<RequestEntryT> failedRequestEntries, long requestStartTime) { lastSendTimestamp = requestStartTime; ackTime = System.currentTimeMillis(); inFlightRequestsCount--; failedRequestEntries.forEach(failedEntry -> addEntryToBuffer(failedEntry, true)); } private void addEntryToBuffer(RequestEntryT entry, boolean insertAtHead) { if (bufferedRequestEntries.isEmpty() && !existsActiveTimerCallback) { registerCallback(); } RequestEntryWrapper<RequestEntryT> wrappedEntry = new RequestEntryWrapper<>(entry, getSizeInBytes(entry)); if (insertAtHead) { bufferedRequestEntries.addFirst(wrappedEntry); } else { bufferedRequestEntries.add(wrappedEntry); } bufferedRequestEntriesTotalSizeInBytes += wrappedEntry.getSize(); } /** * In flight requests will be retried if the sink is still healthy. But if in-flight requests * fail after a checkpoint has been triggered and Flink needs to recover from the checkpoint, * the (failed) in-flight requests are gone and cannot be retried. Hence, there cannot be any * outstanding in-flight requests when a commit is initialized. * * <p>To this end, all in-flight requests need to completed before proceeding with the commit. */ @Override public List<Void> prepareCommit(boolean flush) { while (inFlightRequestsCount > 0 || bufferedRequestEntries.size() > 0) { mailboxExecutor.tryYield(); if (flush) { flush(); } } return Collections.emptyList(); } /** * All in-flight requests that are relevant for the snapshot have been completed, but there may * still be request entries in the internal buffers that are yet to be sent to the endpoint. * These request entries are stored in the snapshot state so that they don't get lost in case of * a failure/restart of the application. */ @Override public List<Collection<RequestEntryT>> snapshotState() { return Arrays.asList( bufferedRequestEntries.stream() .map(RequestEntryWrapper::getRequestEntry) .collect(Collectors.toList())); } @Override public void close() {} }
class AsyncSinkWriter<InputT, RequestEntryT extends Serializable> implements SinkWriter<InputT, Void, Collection<RequestEntryT>> { private final MailboxExecutor mailboxExecutor; private final Sink.ProcessingTimeService timeService; /* The timestamp of the previous batch of records was sent from this sink. */ private long lastSendTimestamp = 0; /* The timestamp of the response to the previous request from this sink. */ private long ackTime = Long.MAX_VALUE; /* The sink writer metric group. */ private final SinkWriterMetricGroup metrics; /* Counter for number of bytes this sink has attempted to send to the destination. */ private final Counter numBytesOutCounter; /* Counter for number of records this sink has attempted to send to the destination. */ private final Counter numRecordsOutCounter; private final int maxBatchSize; private final int maxInFlightRequests; private final int maxBufferedRequests; private final long flushOnBufferSizeInBytes; private final long maxTimeInBufferMS; /** * The ElementConverter provides a mapping between for the elements of a stream to request * entries that can be sent to the destination. * * <p>The resulting request entry is buffered by the AsyncSinkWriter and sent to the destination * when the {@code submitRequestEntries} method is invoked. */ private final ElementConverter<InputT, RequestEntryT> elementConverter; /** * Buffer to hold request entries that should be persisted into the destination, along with its * size in bytes. * * <p>A request entry contain all relevant details to make a call to the destination. Eg, for * Kinesis Data Streams a request entry contains the payload and partition key. * * <p>It seems more natural to buffer InputT, ie, the events that should be persisted, rather * than RequestEntryT. However, in practice, the response of a failed request call can make it * very hard, if not impossible, to reconstruct the original event. It is much easier, to just * construct a new (retry) request entry from the response and add that back to the queue for * later retry. */ private final Deque<RequestEntryWrapper<RequestEntryT>> bufferedRequestEntries = new ArrayDeque<>(); /** * Tracks all pending async calls that have been executed since the last checkpoint. Calls that * completed (successfully or unsuccessfully) are automatically decrementing the counter. Any * request entry that was not successfully persisted needs to be handled and retried by the * logic in {@code submitRequestsToApi}. * * <p>There is a limit on the number of concurrent (async) requests that can be handled by the * client library. This limit is enforced by checking the queue size before accepting a new * element into the queue. * * <p>To complete a checkpoint, we need to make sure that no requests are in flight, as they may * fail, which could then lead to data loss. */ private int inFlightRequestsCount; /** * Tracks the cumulative size of all elements in {@code bufferedRequestEntries} to facilitate * the criterion for flushing after {@code flushOnBufferSizeInBytes} is reached. */ private double bufferedRequestEntriesTotalSizeInBytes; private boolean existsActiveTimerCallback = false; /** * The {@code accept} method should be called on this Consumer if the processing of the {@code * requestEntries} raises an exception that should not be retried. Specifically, any action that * we are sure will result in the same exception no matter how many times we retry should raise * a {@code RuntimeException} here. For example, wrong user credentials. However, it is possible * intermittent failures will recover, e.g. flaky network connections, in which case, some other * mechanism may be more appropriate. */ private final Consumer<Exception> fatalExceptionCons; /** * This method specifies how to persist buffered request entries into the destination. It is * implemented when support for a new destination is added. * * <p>The method is invoked with a set of request entries according to the buffering hints (and * the valid limits of the destination). The logic then needs to create and execute the request * asynchronously against the destination (ideally by batching together multiple request entries * to increase efficiency). The logic also needs to identify individual request entries that * were not persisted successfully and resubmit them using the {@code requestResult} callback. * * <p>From a threading perspective, the mailbox thread will call this method and initiate the * asynchronous request to persist the {@code requestEntries}. NOTE: The client must support * asynchronous requests and the method called to persist the records must asynchronously * execute and return a future with the results of that request. A thread from the destination * client thread pool should complete the request and submit the failed entries that should be * retried. The {@code requestResult} will then trigger the mailbox thread to requeue the * unsuccessful elements. * * <p>An example implementation of this method is included: * * <pre>{@code * @Override * protected void submitRequestEntries * (List<RequestEntryT> records, Consumer<Collection<RequestEntryT>> requestResult) { * Future<Response> response = destinationClient.putRecords(records); * response.whenComplete( * (response, error) -> { * if(error){ * List<RequestEntryT> retryableFailedRecords = getRetryableFailed(response); * requestResult.accept(retryableFailedRecords); * }else{ * requestResult.accept(Collections.emptyList()); * } * } * ); * } * * }</pre> * * <p>During checkpointing, the sink needs to ensure that there are no outstanding in-flight * requests. * * @param requestEntries a set of request entries that should be sent to the destination * @param requestResult the {@code accept} method should be called on this Consumer once the * processing of the {@code requestEntries} are complete. Any entries that encountered * difficulties in persisting should be re-queued through {@code requestResult} by including * that element in the collection of {@code RequestEntryT}s passed to the {@code accept} * method. All other elements are assumed to have been successfully persisted. */ protected abstract void submitRequestEntries( List<RequestEntryT> requestEntries, Consumer<Collection<RequestEntryT>> requestResult); /** * This method allows the getting of the size of a {@code RequestEntryT} in bytes. The size in * this case is measured as the total bytes that is written to the destination as a result of * persisting this particular {@code RequestEntryT} rather than the serialized length (which may * be the same). * * @param requestEntry the requestEntry for which we want to know the size * @return the size of the requestEntry, as defined previously */ protected abstract long getSizeInBytes(RequestEntryT requestEntry); public AsyncSinkWriter( ElementConverter<InputT, RequestEntryT> elementConverter, Sink.InitContext context, int maxBatchSize, int maxInFlightRequests, int maxBufferedRequests, long flushOnBufferSizeInBytes, long maxTimeInBufferMS) { this.elementConverter = elementConverter; this.mailboxExecutor = context.getMailboxExecutor(); this.timeService = context.getProcessingTimeService(); Preconditions.checkNotNull(elementConverter); Preconditions.checkArgument(maxBatchSize > 0); Preconditions.checkArgument(maxBufferedRequests > 0); Preconditions.checkArgument(maxInFlightRequests > 0); Preconditions.checkArgument(flushOnBufferSizeInBytes > 0); Preconditions.checkArgument(maxTimeInBufferMS > 0); Preconditions.checkArgument( maxBufferedRequests > maxBatchSize, "The maximum number of requests that may be buffered should be strictly" + " greater than the maximum number of requests per batch."); this.maxBatchSize = maxBatchSize; this.maxInFlightRequests = maxInFlightRequests; this.maxBufferedRequests = maxBufferedRequests; this.flushOnBufferSizeInBytes = flushOnBufferSizeInBytes; this.maxTimeInBufferMS = maxTimeInBufferMS; this.inFlightRequestsCount = 0; this.bufferedRequestEntriesTotalSizeInBytes = 0; this.metrics = context.metricGroup(); this.metrics.setCurrentSendTimeGauge(() -> this.ackTime - this.lastSendTimestamp); this.numBytesOutCounter = this.metrics.getIOMetricGroup().getNumBytesOutCounter(); this.numRecordsOutCounter = this.metrics.getIOMetricGroup().getNumRecordsOutCounter(); this.fatalExceptionCons = exception -> mailboxExecutor.execute( () -> { throw exception; }, "A fatal exception occurred in the sink that cannot be recovered from or should not be retried."); } private void registerCallback() { Sink.ProcessingTimeService.ProcessingTimeCallback ptc = instant -> { existsActiveTimerCallback = false; while (!bufferedRequestEntries.isEmpty()) { flush(); } }; timeService.registerProcessingTimer( timeService.getCurrentProcessingTime() + maxTimeInBufferMS, ptc); existsActiveTimerCallback = true; } @Override public void write(InputT element, Context context) throws IOException, InterruptedException { while (bufferedRequestEntries.size() >= maxBufferedRequests) { mailboxExecutor.tryYield(); } addEntryToBuffer(elementConverter.apply(element, context), false); flushIfAble(); } private void flushIfAble() { while (bufferedRequestEntries.size() >= maxBatchSize || bufferedRequestEntriesTotalSizeInBytes >= flushOnBufferSizeInBytes) { flush(); } } /** * Persists buffered RequestsEntries into the destination by invoking {@code * submitRequestEntries} with batches according to the user specified buffering hints. * * <p>The method blocks if too many async requests are in flight. */ /** * Marks an in-flight request as completed and prepends failed requestEntries back to the * internal requestEntry buffer for later retry. * * @param failedRequestEntries requestEntries that need to be retried */ private void completeRequest( Collection<RequestEntryT> failedRequestEntries, long requestStartTime) { lastSendTimestamp = requestStartTime; ackTime = System.currentTimeMillis(); inFlightRequestsCount--; failedRequestEntries.forEach(failedEntry -> addEntryToBuffer(failedEntry, true)); } private void addEntryToBuffer(RequestEntryT entry, boolean insertAtHead) { if (bufferedRequestEntries.isEmpty() && !existsActiveTimerCallback) { registerCallback(); } RequestEntryWrapper<RequestEntryT> wrappedEntry = new RequestEntryWrapper<>(entry, getSizeInBytes(entry)); if (insertAtHead) { bufferedRequestEntries.addFirst(wrappedEntry); } else { bufferedRequestEntries.add(wrappedEntry); } bufferedRequestEntriesTotalSizeInBytes += wrappedEntry.getSize(); } /** * In flight requests will be retried if the sink is still healthy. But if in-flight requests * fail after a checkpoint has been triggered and Flink needs to recover from the checkpoint, * the (failed) in-flight requests are gone and cannot be retried. Hence, there cannot be any * outstanding in-flight requests when a commit is initialized. * * <p>To this end, all in-flight requests need to completed before proceeding with the commit. */ @Override public List<Void> prepareCommit(boolean flush) { while (inFlightRequestsCount > 0 || bufferedRequestEntries.size() > 0) { mailboxExecutor.tryYield(); if (flush) { flush(); } } return Collections.emptyList(); } /** * All in-flight requests that are relevant for the snapshot have been completed, but there may * still be request entries in the internal buffers that are yet to be sent to the endpoint. * These request entries are stored in the snapshot state so that they don't get lost in case of * a failure/restart of the application. */ @Override public List<Collection<RequestEntryT>> snapshotState() { return Arrays.asList( bufferedRequestEntries.stream() .map(RequestEntryWrapper::getRequestEntry) .collect(Collectors.toList())); } @Override public void close() {} protected Consumer<Exception> getFatalExceptionCons() { return fatalExceptionCons; } }
Here will be a problem. For now, `DataProperty.DEFAULT_DATA_PROPERTY` is configuration, so here, it is all possible to write `true` or `false`. And when reading, `true` may point to HDD, may point to SSD, depends on the config.
public void write(DataOutput out) throws IOException { Text.writeString(out, type.name()); Preconditions.checkState(idToDataProperty.size() == idToReplicationNum.size()); Preconditions.checkState(idToInMemory.keySet().equals(idToReplicationNum.keySet())); out.writeInt(idToDataProperty.size()); for (Map.Entry<Long, DataProperty> entry : idToDataProperty.entrySet()) { out.writeLong(entry.getKey()); if (entry.getValue() == DataProperty.DEFAULT_DATA_PROPERTY) { out.writeBoolean(true); } else { out.writeBoolean(false); entry.getValue().write(out); } out.writeShort(idToReplicationNum.get(entry.getKey())); out.writeBoolean(idToInMemory.get(entry.getKey())); } }
if (entry.getValue() == DataProperty.DEFAULT_DATA_PROPERTY) {
public void write(DataOutput out) throws IOException { Text.writeString(out, type.name()); Preconditions.checkState(idToDataProperty.size() == idToReplicationNum.size()); Preconditions.checkState(idToInMemory.keySet().equals(idToReplicationNum.keySet())); out.writeInt(idToDataProperty.size()); for (Map.Entry<Long, DataProperty> entry : idToDataProperty.entrySet()) { out.writeLong(entry.getKey()); if (entry.getValue().equals(new DataProperty(TStorageMedium.HDD))) { out.writeBoolean(true); } else { out.writeBoolean(false); entry.getValue().write(out); } out.writeShort(idToReplicationNum.get(entry.getKey())); out.writeBoolean(idToInMemory.get(entry.getKey())); } }
class PartitionInfo implements Writable { private static final Logger LOG = LogManager.getLogger(PartitionInfo.class); protected PartitionType type; protected Map<Long, DataProperty> idToDataProperty; protected Map<Long, Short> idToReplicationNum; protected boolean isMultiColumnPartition = false; protected Map<Long, Boolean> idToInMemory; public PartitionInfo() { this.idToDataProperty = new HashMap<Long, DataProperty>(); this.idToReplicationNum = new HashMap<Long, Short>(); this.idToInMemory = new HashMap<>(); } public PartitionInfo(PartitionType type) { this.type = type; this.idToDataProperty = new HashMap<Long, DataProperty>(); this.idToReplicationNum = new HashMap<Long, Short>(); this.idToInMemory = new HashMap<>(); } public PartitionType getType() { return type; } public DataProperty getDataProperty(long partitionId) { return idToDataProperty.get(partitionId); } public void setDataProperty(long partitionId, DataProperty newDataProperty) { idToDataProperty.put(partitionId, newDataProperty); } public short getReplicationNum(long partitionId) { return idToReplicationNum.get(partitionId); } public void setReplicationNum(long partitionId, short replicationNum) { idToReplicationNum.put(partitionId, replicationNum); } public boolean getIsInMemory(long partitionId) { return idToInMemory.get(partitionId); } public void setIsInMemory(long partitionId, boolean isInMemory) { idToInMemory.put(partitionId, isInMemory); } public void dropPartition(long partitionId) { idToDataProperty.remove(partitionId); idToReplicationNum.remove(partitionId); idToInMemory.remove(partitionId); } public void addPartition(long partitionId, DataProperty dataProperty, short replicationNum, boolean isInMemory) { idToDataProperty.put(partitionId, dataProperty); idToReplicationNum.put(partitionId, replicationNum); idToInMemory.put(partitionId, isInMemory); } public static PartitionInfo read(DataInput in) throws IOException { PartitionInfo partitionInfo = new PartitionInfo(); partitionInfo.readFields(in); return partitionInfo; } public boolean isMultiColumnPartition() { return isMultiColumnPartition; } public String toSql(OlapTable table, List<Long> partitionId) { return ""; } @Override public void readFields(DataInput in) throws IOException { type = PartitionType.valueOf(Text.readString(in)); int counter = in.readInt(); for (int i = 0; i < counter; i++) { long partitionId = in.readLong(); boolean isDefaultDataProperty = in.readBoolean(); if (isDefaultDataProperty) { idToDataProperty.put(partitionId, DataProperty.DEFAULT_DATA_PROPERTY); } else { idToDataProperty.put(partitionId, DataProperty.read(in)); } short replicationNum = in.readShort(); idToReplicationNum.put(partitionId, replicationNum); if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_72) { idToInMemory.put(partitionId, in.readBoolean()); } else { idToInMemory.put(partitionId, false); } } } @Override public String toString() { StringBuilder buff = new StringBuilder(); buff.append("type: ").append(type.typeString).append("; "); for (Map.Entry<Long, DataProperty> entry : idToDataProperty.entrySet()) { buff.append(entry.getKey()).append("is HDD: ");; if (entry.getValue() == DataProperty.DEFAULT_DATA_PROPERTY) { buff.append(true); } else { buff.append(false); } buff.append("data_property: ").append(entry.getValue().toString()); buff.append("replica number: ").append(idToReplicationNum.get(entry.getKey())); buff.append("in memory: ").append(idToInMemory.get(entry.getKey())); } return buff.toString(); } }
class PartitionInfo implements Writable { private static final Logger LOG = LogManager.getLogger(PartitionInfo.class); protected PartitionType type; protected Map<Long, DataProperty> idToDataProperty; protected Map<Long, Short> idToReplicationNum; protected boolean isMultiColumnPartition = false; protected Map<Long, Boolean> idToInMemory; public PartitionInfo() { this.idToDataProperty = new HashMap<Long, DataProperty>(); this.idToReplicationNum = new HashMap<Long, Short>(); this.idToInMemory = new HashMap<>(); } public PartitionInfo(PartitionType type) { this.type = type; this.idToDataProperty = new HashMap<Long, DataProperty>(); this.idToReplicationNum = new HashMap<Long, Short>(); this.idToInMemory = new HashMap<>(); } public PartitionType getType() { return type; } public DataProperty getDataProperty(long partitionId) { return idToDataProperty.get(partitionId); } public void setDataProperty(long partitionId, DataProperty newDataProperty) { idToDataProperty.put(partitionId, newDataProperty); } public short getReplicationNum(long partitionId) { return idToReplicationNum.get(partitionId); } public void setReplicationNum(long partitionId, short replicationNum) { idToReplicationNum.put(partitionId, replicationNum); } public boolean getIsInMemory(long partitionId) { return idToInMemory.get(partitionId); } public void setIsInMemory(long partitionId, boolean isInMemory) { idToInMemory.put(partitionId, isInMemory); } public void dropPartition(long partitionId) { idToDataProperty.remove(partitionId); idToReplicationNum.remove(partitionId); idToInMemory.remove(partitionId); } public void addPartition(long partitionId, DataProperty dataProperty, short replicationNum, boolean isInMemory) { idToDataProperty.put(partitionId, dataProperty); idToReplicationNum.put(partitionId, replicationNum); idToInMemory.put(partitionId, isInMemory); } public static PartitionInfo read(DataInput in) throws IOException { PartitionInfo partitionInfo = new PartitionInfo(); partitionInfo.readFields(in); return partitionInfo; } public boolean isMultiColumnPartition() { return isMultiColumnPartition; } public String toSql(OlapTable table, List<Long> partitionId) { return ""; } @Override public void readFields(DataInput in) throws IOException { type = PartitionType.valueOf(Text.readString(in)); int counter = in.readInt(); for (int i = 0; i < counter; i++) { long partitionId = in.readLong(); boolean isDefaultHddDataProperty = in.readBoolean(); if (isDefaultHddDataProperty) { idToDataProperty.put(partitionId, new DataProperty(TStorageMedium.HDD)); } else { idToDataProperty.put(partitionId, DataProperty.read(in)); } short replicationNum = in.readShort(); idToReplicationNum.put(partitionId, replicationNum); if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_72) { idToInMemory.put(partitionId, in.readBoolean()); } else { idToInMemory.put(partitionId, false); } } } @Override public String toString() { StringBuilder buff = new StringBuilder(); buff.append("type: ").append(type.typeString).append("; "); for (Map.Entry<Long, DataProperty> entry : idToDataProperty.entrySet()) { buff.append(entry.getKey()).append("is HDD: ");; if (entry.getValue().equals(new DataProperty(TStorageMedium.HDD))) { buff.append(true); } else { buff.append(false); } buff.append("data_property: ").append(entry.getValue().toString()); buff.append("replica number: ").append(idToReplicationNum.get(entry.getKey())); buff.append("in memory: ").append(idToInMemory.get(entry.getKey())); } return buff.toString(); } }
Yes I agree `UnresolvedFieldReference` cannot be translated into RexNode and also that it should never end up in the Planner. This is also the main purpose of the `UnresolvedFieldReference` to make this distinction clear. `ExpressionVisitor` is just a way to traverse the operation tree. I am currently working on moving all the resolution of expressions into a single place in the API module, as part of FLINK-11884. I agree we should move this resolution here up to the API module as well once it's in place.
public RexNode visit(Expression other) { if (other instanceof UnresolvedFieldReferenceExpression) { return visitUnresolvedFieldReferenceExpression((UnresolvedFieldReferenceExpression) other); } else if (other instanceof ResolvedAggInputReference) { return visitResolvedAggInputReference((ResolvedAggInputReference) other); } else if (other instanceof ResolvedAggLocalReference) { return visitResolvedAggLocalReference((ResolvedAggLocalReference) other); } else if (other instanceof ResolvedDistinctKeyReference) { return visitResolvedDistinctKeyReference((ResolvedDistinctKeyReference) other); } else { throw new UnsupportedOperationException(other.getClass().getSimpleName() + ":" + other.toString()); } }
if (other instanceof UnresolvedFieldReferenceExpression) {
public RexNode visit(Expression other) { if (other instanceof UnresolvedFieldReferenceExpression) { return visitUnresolvedFieldReferenceExpression((UnresolvedFieldReferenceExpression) other); } else if (other instanceof ResolvedAggInputReference) { return visitResolvedAggInputReference((ResolvedAggInputReference) other); } else if (other instanceof ResolvedAggLocalReference) { return visitResolvedAggLocalReference((ResolvedAggLocalReference) other); } else if (other instanceof ResolvedDistinctKeyReference) { return visitResolvedDistinctKeyReference((ResolvedDistinctKeyReference) other); } else { throw new UnsupportedOperationException(other.getClass().getSimpleName() + ":" + other.toString()); } }
class RexNodeConverter implements ExpressionVisitor<RexNode> { private final RelBuilder relBuilder; private final FlinkTypeFactory typeFactory; public RexNodeConverter(RelBuilder relBuilder) { this.relBuilder = relBuilder; this.typeFactory = (FlinkTypeFactory) relBuilder.getRexBuilder().getTypeFactory(); } @Override public RexNode visitCall(CallExpression call) { List<RexNode> child = call.getChildren().stream() .map(expression -> expression.accept(RexNodeConverter.this)) .collect(Collectors.toList()); switch (call.getFunctionDefinition().getType()) { case SCALAR_FUNCTION: return visitScalarFunc(call.getFunctionDefinition(), child); default: throw new UnsupportedOperationException(); } } private RexNode visitScalarFunc(FunctionDefinition def, List<RexNode> child) { if (BuiltInFunctionDefinitions.IF.equals(def)) { return relBuilder.call(SqlStdOperatorTable.CASE, child); } else if (BuiltInFunctionDefinitions.IS_NULL.equals(def)) { return relBuilder.isNull(child.get(0)); } else if (BuiltInFunctionDefinitions.PLUS.equals(def)) { if (isString(toInternalType(child.get(0).getType()))) { return relBuilder.call( SqlStdOperatorTable.CONCAT, child.get(0), relBuilder.cast(child.get(1), VARCHAR)); } else if (isString(toInternalType(child.get(1).getType()))) { return relBuilder.call( SqlStdOperatorTable.CONCAT, relBuilder.cast(child.get(0), VARCHAR), child.get(1)); } else if (isTimeInterval(toInternalType(child.get(0).getType())) && child.get(0).getType() == child.get(1).getType()) { return relBuilder.call(SqlStdOperatorTable.PLUS, child); } else if (isTimeInterval(toInternalType(child.get(0).getType())) && isTemporal(toInternalType(child.get(1).getType()))) { return relBuilder.call(SqlStdOperatorTable.DATETIME_PLUS, child); } else if (isTemporal(toInternalType(child.get(0).getType())) && isTemporal(toInternalType(child.get(1).getType()))) { return relBuilder.call(SqlStdOperatorTable.DATETIME_PLUS, child); } else { return relBuilder.call(SqlStdOperatorTable.PLUS, child); } } else if (BuiltInFunctionDefinitions.MINUS.equals(def)) { return relBuilder.call(SqlStdOperatorTable.MINUS, child); } else if (BuiltInFunctionDefinitions.EQUALS.equals(def)) { return relBuilder.call(SqlStdOperatorTable.EQUALS, child); } else if (BuiltInFunctionDefinitions.DIVIDE.equals(def)) { return relBuilder.call(SqlStdOperatorTable.DIVIDE, child); } else { throw new UnsupportedOperationException(def.getName()); } } @Override public RexNode visitSymbol(SymbolExpression symbolExpression) { throw new UnsupportedOperationException(); } @Override public RexNode visitValueLiteral(ValueLiteralExpression expr) { InternalType type = createInternalTypeFromTypeInfo(expr.getType()); Object value = expr.getValue(); RexBuilder rexBuilder = relBuilder.getRexBuilder(); FlinkTypeFactory typeFactory = (FlinkTypeFactory) relBuilder.getTypeFactory(); if (value == null) { return relBuilder.getRexBuilder() .makeCast( typeFactory.createTypeFromInternalType(type, true), relBuilder.getRexBuilder().constantNull()); } if (type instanceof DecimalType) { DecimalType dt = (DecimalType) type; BigDecimal bigDecValue = (BigDecimal) value; RelDataType decType = relBuilder.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, dt.precision(), dt.scale()); return relBuilder.getRexBuilder().makeExactLiteral(bigDecValue, decType); } else if (InternalTypes.LONG.equals(type)) { BigDecimal bigint = value instanceof BigDecimal ? (BigDecimal) value : BigDecimal.valueOf((long) value); return relBuilder.getRexBuilder().makeBigintLiteral(bigint); } else if (InternalTypes.FLOAT.equals(type)) { return relBuilder.getRexBuilder().makeApproxLiteral( BigDecimal.valueOf(((Number) value).floatValue()), relBuilder.getTypeFactory().createSqlType(SqlTypeName.FLOAT)); } else if (InternalTypes.DOUBLE.equals(type)) { return rexBuilder.makeApproxLiteral( BigDecimal.valueOf(((Number) value).doubleValue()), relBuilder.getTypeFactory().createSqlType(SqlTypeName.DOUBLE)); } else if (InternalTypes.DATE.equals(type)) { return relBuilder.getRexBuilder().makeDateLiteral( DateString.fromCalendarFields(valueAsCalendar(value))); } else if (InternalTypes.TIME.equals(type)) { return relBuilder.getRexBuilder().makeTimeLiteral( TimeString.fromCalendarFields(valueAsCalendar(value)), 0); } else if (InternalTypes.TIMESTAMP.equals(type)) { return relBuilder.getRexBuilder().makeTimestampLiteral( TimestampString.fromCalendarFields(valueAsCalendar(value)), 3); } else if (InternalTypes.INTERVAL_MONTHS.equals(type)) { BigDecimal interval = BigDecimal.valueOf((int) value); SqlIntervalQualifier intervalQualifier = new SqlIntervalQualifier( TimeUnit.YEAR, TimeUnit.MONTH, SqlParserPos.ZERO); return relBuilder.getRexBuilder().makeIntervalLiteral(interval, intervalQualifier); } else if (InternalTypes.TIMESTAMP.equals(type)) { BigDecimal interval = BigDecimal.valueOf((long) value); SqlIntervalQualifier intervalQualifier = new SqlIntervalQualifier( TimeUnit.DAY, TimeUnit.SECOND, SqlParserPos.ZERO); return relBuilder.getRexBuilder().makeIntervalLiteral(interval, intervalQualifier); } else { return relBuilder.literal(value); } } /** * Convert a Date value to a Calendar. Calcite's fromCalendarField functions use the * Calendar.get methods, so the raw values of the individual fields are preserved when * converted to the String formats. * * @return get the Calendar value */ private static Calendar valueAsCalendar(Object value) { Date date = (Date) value; Calendar cal = Calendar.getInstance(); cal.setTime(date); return cal; } @Override public RexNode visitFieldReference(FieldReferenceExpression fieldReference) { return relBuilder.field(fieldReference.getName()); } @Override public RexNode visitTypeLiteral(TypeLiteralExpression typeLiteral) { throw new UnsupportedOperationException(); } @Override private RexNode visitUnresolvedFieldReferenceExpression(UnresolvedFieldReferenceExpression field) { return relBuilder.field(field.getName()); } private RexNode visitResolvedAggInputReference(ResolvedAggInputReference reference) { return new RexInputRef( reference.getIndex(), typeFactory.createTypeFromInternalType(reference.getResultType(), true)); } private RexNode visitResolvedAggLocalReference(ResolvedAggLocalReference reference) { InternalType type = reference.getResultType(); return new RexAggLocalVariable( reference.getFieldTerm(), reference.getNullTerm(), typeFactory.createTypeFromInternalType(type, true), type); } private RexNode visitResolvedDistinctKeyReference(ResolvedDistinctKeyReference reference) { InternalType type = reference.getResultType(); return new RexDistinctKeyVariable( reference.getName(), typeFactory.createTypeFromInternalType(type, true), type); } }
class RexNodeConverter implements ExpressionVisitor<RexNode> { private final RelBuilder relBuilder; private final FlinkTypeFactory typeFactory; public RexNodeConverter(RelBuilder relBuilder) { this.relBuilder = relBuilder; this.typeFactory = (FlinkTypeFactory) relBuilder.getRexBuilder().getTypeFactory(); } @Override public RexNode visitCall(CallExpression call) { List<RexNode> child = call.getChildren().stream() .map(expression -> expression.accept(RexNodeConverter.this)) .collect(Collectors.toList()); switch (call.getFunctionDefinition().getType()) { case SCALAR_FUNCTION: return visitScalarFunc(call.getFunctionDefinition(), child); default: throw new UnsupportedOperationException(); } } private RexNode visitScalarFunc(FunctionDefinition def, List<RexNode> child) { if (BuiltInFunctionDefinitions.IF.equals(def)) { return relBuilder.call(SqlStdOperatorTable.CASE, child); } else if (BuiltInFunctionDefinitions.IS_NULL.equals(def)) { return relBuilder.isNull(child.get(0)); } else if (BuiltInFunctionDefinitions.PLUS.equals(def)) { if (isString(toInternalType(child.get(0).getType()))) { return relBuilder.call( SqlStdOperatorTable.CONCAT, child.get(0), relBuilder.cast(child.get(1), VARCHAR)); } else if (isString(toInternalType(child.get(1).getType()))) { return relBuilder.call( SqlStdOperatorTable.CONCAT, relBuilder.cast(child.get(0), VARCHAR), child.get(1)); } else if (isTimeInterval(toInternalType(child.get(0).getType())) && child.get(0).getType() == child.get(1).getType()) { return relBuilder.call(SqlStdOperatorTable.PLUS, child); } else if (isTimeInterval(toInternalType(child.get(0).getType())) && isTemporal(toInternalType(child.get(1).getType()))) { return relBuilder.call(SqlStdOperatorTable.DATETIME_PLUS, child); } else if (isTemporal(toInternalType(child.get(0).getType())) && isTemporal(toInternalType(child.get(1).getType()))) { return relBuilder.call(SqlStdOperatorTable.DATETIME_PLUS, child); } else { return relBuilder.call(SqlStdOperatorTable.PLUS, child); } } else if (BuiltInFunctionDefinitions.MINUS.equals(def)) { return relBuilder.call(SqlStdOperatorTable.MINUS, child); } else if (BuiltInFunctionDefinitions.EQUALS.equals(def)) { return relBuilder.call(SqlStdOperatorTable.EQUALS, child); } else if (BuiltInFunctionDefinitions.DIVIDE.equals(def)) { return relBuilder.call(SqlStdOperatorTable.DIVIDE, child); } else { throw new UnsupportedOperationException(def.getName()); } } @Override public RexNode visitSymbol(SymbolExpression symbolExpression) { throw new UnsupportedOperationException(); } @Override public RexNode visitValueLiteral(ValueLiteralExpression expr) { InternalType type = createInternalTypeFromTypeInfo(expr.getType()); Object value = expr.getValue(); RexBuilder rexBuilder = relBuilder.getRexBuilder(); FlinkTypeFactory typeFactory = (FlinkTypeFactory) relBuilder.getTypeFactory(); if (value == null) { return relBuilder.getRexBuilder() .makeCast( typeFactory.createTypeFromInternalType(type, true), relBuilder.getRexBuilder().constantNull()); } if (type instanceof DecimalType) { DecimalType dt = (DecimalType) type; BigDecimal bigDecValue = (BigDecimal) value; RelDataType decType = relBuilder.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, dt.precision(), dt.scale()); return relBuilder.getRexBuilder().makeExactLiteral(bigDecValue, decType); } else if (InternalTypes.LONG.equals(type)) { BigDecimal bigint = value instanceof BigDecimal ? (BigDecimal) value : BigDecimal.valueOf((long) value); return relBuilder.getRexBuilder().makeBigintLiteral(bigint); } else if (InternalTypes.FLOAT.equals(type)) { return relBuilder.getRexBuilder().makeApproxLiteral( BigDecimal.valueOf(((Number) value).floatValue()), relBuilder.getTypeFactory().createSqlType(SqlTypeName.FLOAT)); } else if (InternalTypes.DOUBLE.equals(type)) { return rexBuilder.makeApproxLiteral( BigDecimal.valueOf(((Number) value).doubleValue()), relBuilder.getTypeFactory().createSqlType(SqlTypeName.DOUBLE)); } else if (InternalTypes.DATE.equals(type)) { return relBuilder.getRexBuilder().makeDateLiteral( DateString.fromCalendarFields(valueAsCalendar(value))); } else if (InternalTypes.TIME.equals(type)) { return relBuilder.getRexBuilder().makeTimeLiteral( TimeString.fromCalendarFields(valueAsCalendar(value)), 0); } else if (InternalTypes.TIMESTAMP.equals(type)) { return relBuilder.getRexBuilder().makeTimestampLiteral( TimestampString.fromCalendarFields(valueAsCalendar(value)), 3); } else if (InternalTypes.INTERVAL_MONTHS.equals(type)) { BigDecimal interval = BigDecimal.valueOf((int) value); SqlIntervalQualifier intervalQualifier = new SqlIntervalQualifier( TimeUnit.YEAR, TimeUnit.MONTH, SqlParserPos.ZERO); return relBuilder.getRexBuilder().makeIntervalLiteral(interval, intervalQualifier); } else if (InternalTypes.TIMESTAMP.equals(type)) { BigDecimal interval = BigDecimal.valueOf((long) value); SqlIntervalQualifier intervalQualifier = new SqlIntervalQualifier( TimeUnit.DAY, TimeUnit.SECOND, SqlParserPos.ZERO); return relBuilder.getRexBuilder().makeIntervalLiteral(interval, intervalQualifier); } else { return relBuilder.literal(value); } } /** * Convert a Date value to a Calendar. Calcite's fromCalendarField functions use the * Calendar.get methods, so the raw values of the individual fields are preserved when * converted to the String formats. * * @return get the Calendar value */ private static Calendar valueAsCalendar(Object value) { Date date = (Date) value; Calendar cal = Calendar.getInstance(); cal.setTime(date); return cal; } @Override public RexNode visitFieldReference(FieldReferenceExpression fieldReference) { return relBuilder.field(fieldReference.getName()); } @Override public RexNode visitTypeLiteral(TypeLiteralExpression typeLiteral) { throw new UnsupportedOperationException(); } @Override private RexNode visitUnresolvedFieldReferenceExpression(UnresolvedFieldReferenceExpression field) { return relBuilder.field(field.getName()); } private RexNode visitResolvedAggInputReference(ResolvedAggInputReference reference) { return new RexInputRef( reference.getIndex(), typeFactory.createTypeFromInternalType(reference.getResultType(), true)); } private RexNode visitResolvedAggLocalReference(ResolvedAggLocalReference reference) { InternalType type = reference.getResultType(); return new RexAggLocalVariable( reference.getFieldTerm(), reference.getNullTerm(), typeFactory.createTypeFromInternalType(type, true), type); } private RexNode visitResolvedDistinctKeyReference(ResolvedDistinctKeyReference reference) { InternalType type = reference.getResultType(); return new RexDistinctKeyVariable( reference.getName(), typeFactory.createTypeFromInternalType(type, true), type); } }
I think you can extends DefaultExpressionRewriter, and replace this line to `return super.visit(expr, substitutionMap)`
public Expression visit(Expression expr, Map<Expression, Expression> substitutionMap) { if (substitutionMap.containsKey(expr)) { return substitutionMap.get(expr); } else { List<Expression> newChildren = new ArrayList<>(); boolean hasNewChildren = false; for (Expression child : expr.children()) { Expression newChild = visit(child, substitutionMap); if (newChild != child) { hasNewChildren = true; } newChildren.add(newChild); } return hasNewChildren ? expr.withChildren(newChildren) : expr; } }
List<Expression> newChildren = new ArrayList<>();
public Expression visit(Expression expr, Map<Expression, Expression> substitutionMap) { if (substitutionMap.containsKey(expr)) { return substitutionMap.get(expr); } else { List<Expression> newChildren = new ArrayList<>(); boolean hasNewChildren = false; for (Expression child : expr.children()) { Expression newChild = visit(child, substitutionMap); if (newChild != child) { hasNewChildren = true; } newChildren.add(newChild); } return hasNewChildren ? expr.withChildren(newChildren) : expr; } }
class ExpressionReplacer extends ExpressionVisitor<Expression, Map<Expression, Expression>> { private static final ExpressionReplacer INSTANCE = new ExpressionReplacer(); @Override }
class ExpressionReplacer extends ExpressionVisitor<Expression, Map<Expression, Expression>> { private static final ExpressionReplacer INSTANCE = new ExpressionReplacer(); @Override }
@cescoffier maybe it's not mandatory in this case but we usually restore previous contexts in a finally block. It's not needed here?
public void handle(Promise<Object> f) { final Context previous = Context.current(); grpcContext.attach(); consumer.accept(delegate); f.complete(); grpcContext.detach(previous); }
grpcContext.detach(previous);
public void handle(Promise<Object> f) { ServerCall.Listener<ReqT> listener = next.startCall(call, headers); replay.setDelegate(listener); f.complete(null); }
class BlockingServerInterceptor implements ServerInterceptor { private final Vertx vertx; private final List<String> blockingMethods; private final Map<String, Boolean> cache = new HashMap<>(); public BlockingServerInterceptor(Vertx vertx, List<String> blockingMethods) { this.vertx = vertx; this.blockingMethods = new ArrayList<>(); for (String method : blockingMethods) { this.blockingMethods.add(method.toLowerCase()); } } @Override public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall(ServerCall<ReqT, RespT> call, Metadata headers, ServerCallHandler<ReqT, RespT> next) { String fullMethodName = call.getMethodDescriptor().getFullMethodName(); boolean isBlocking = cache.computeIfAbsent(fullMethodName, new Function<String, Boolean>() { @Override public Boolean apply(String name) { String methodName = name.substring(name.lastIndexOf("/") + 1); return blockingMethods.contains(methodName.toLowerCase()); } }); if (isBlocking) { ReplayListener<ReqT> replay = new ReplayListener<>(); vertx.executeBlocking(new Handler<Promise<Object>>() { @Override }, null); return replay; } else { return next.startCall(call, headers); } } /** * Stores the incoming events until the listener is injected. * When injected, replay the events. * * Note that event must be executed in order, explaining the `ordered:true`. */ private class ReplayListener<ReqT> extends ServerCall.Listener<ReqT> { private ServerCall.Listener<ReqT> delegate; private final List<Consumer<ServerCall.Listener<ReqT>>> incomingEvents = new LinkedList<>(); synchronized void setDelegate(ServerCall.Listener<ReqT> delegate) { this.delegate = delegate; for (Consumer<ServerCall.Listener<ReqT>> event : incomingEvents) { event.accept(delegate); } incomingEvents.clear(); } private synchronized void executeOnContextOrEnqueue(Consumer<ServerCall.Listener<ReqT>> consumer) { if (this.delegate != null) { final Context grpcContext = Context.current(); vertx.executeBlocking(new Handler<Promise<Object>>() { @Override public void handle(Promise<Object> f) { final Context previous = Context.current(); grpcContext.attach(); consumer.accept(delegate); f.complete(); grpcContext.detach(previous); } }, true, null); } else { incomingEvents.add(consumer); } } @Override public void onMessage(ReqT message) { executeOnContextOrEnqueue(new Consumer<ServerCall.Listener<ReqT>>() { @Override public void accept(ServerCall.Listener<ReqT> t) { t.onMessage(message); } }); } @Override public void onHalfClose() { executeOnContextOrEnqueue(ServerCall.Listener::onHalfClose); } @Override public void onCancel() { executeOnContextOrEnqueue(ServerCall.Listener::onCancel); } @Override public void onComplete() { executeOnContextOrEnqueue(ServerCall.Listener::onComplete); } @Override public void onReady() { executeOnContextOrEnqueue(ServerCall.Listener::onReady); } } }
class BlockingServerInterceptor implements ServerInterceptor { private final Vertx vertx; private final List<String> blockingMethods; private final Map<String, Boolean> cache = new HashMap<>(); public BlockingServerInterceptor(Vertx vertx, List<String> blockingMethods) { this.vertx = vertx; this.blockingMethods = new ArrayList<>(); for (String method : blockingMethods) { this.blockingMethods.add(method.toLowerCase()); } } @Override public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall(ServerCall<ReqT, RespT> call, Metadata headers, ServerCallHandler<ReqT, RespT> next) { String fullMethodName = call.getMethodDescriptor().getFullMethodName(); boolean isBlocking = cache.computeIfAbsent(fullMethodName, new Function<String, Boolean>() { @Override public Boolean apply(String name) { String methodName = name.substring(name.lastIndexOf("/") + 1); return blockingMethods.contains(methodName.toLowerCase()); } }); if (isBlocking) { ReplayListener<ReqT> replay = new ReplayListener<>(); vertx.executeBlocking(new Handler<Promise<Object>>() { @Override }, null); return replay; } else { return next.startCall(call, headers); } } /** * Stores the incoming events until the listener is injected. * When injected, replay the events. * * Note that event must be executed in order, explaining the `ordered:true`. */ private class ReplayListener<ReqT> extends ServerCall.Listener<ReqT> { private ServerCall.Listener<ReqT> delegate; private final List<Consumer<ServerCall.Listener<ReqT>>> incomingEvents = new LinkedList<>(); synchronized void setDelegate(ServerCall.Listener<ReqT> delegate) { this.delegate = delegate; for (Consumer<ServerCall.Listener<ReqT>> event : incomingEvents) { event.accept(delegate); } incomingEvents.clear(); } private synchronized void executeOnContextOrEnqueue(Consumer<ServerCall.Listener<ReqT>> consumer) { if (this.delegate != null) { final Context grpcContext = Context.current(); vertx.executeBlocking(new Handler<Promise<Object>>() { @Override public void handle(Promise<Object> f) { final Context previous = Context.current(); grpcContext.attach(); try { consumer.accept(delegate); f.complete(); } finally { grpcContext.detach(previous); } } }, true, null); } else { incomingEvents.add(consumer); } } @Override public void onMessage(ReqT message) { executeOnContextOrEnqueue(new Consumer<ServerCall.Listener<ReqT>>() { @Override public void accept(ServerCall.Listener<ReqT> t) { t.onMessage(message); } }); } @Override public void onHalfClose() { executeOnContextOrEnqueue(ServerCall.Listener::onHalfClose); } @Override public void onCancel() { executeOnContextOrEnqueue(ServerCall.Listener::onCancel); } @Override public void onComplete() { executeOnContextOrEnqueue(ServerCall.Listener::onComplete); } @Override public void onReady() { executeOnContextOrEnqueue(ServerCall.Listener::onReady); } } }
There are multiple strategies to solve this specific problem, but I don't think creating a mapping will be significantly better.
private void removeDuplicateBindingsFromAccessControlChain(Http http) { Set<FilterBinding> duplicateBindings = new HashSet<>(); for (FilterBinding binding : http.getBindings()) { if (binding.filterId().toId().equals(ACCESS_CONTROL_CHAIN_ID)) { for (FilterBinding otherBinding : http.getBindings()) { if (!binding.filterId().equals(otherBinding.filterId()) && binding.binding().equals(otherBinding.binding())) { duplicateBindings.add(binding); } } } } duplicateBindings.forEach(http.getBindings()::remove); }
for (FilterBinding binding : http.getBindings()) {
private void removeDuplicateBindingsFromAccessControlChain(Http http) { Set<FilterBinding> duplicateBindings = new HashSet<>(); for (FilterBinding binding : http.getBindings()) { if (binding.chainId().toId().equals(ACCESS_CONTROL_CHAIN_ID)) { for (FilterBinding otherBinding : http.getBindings()) { if (!binding.chainId().equals(otherBinding.chainId()) && binding.binding().equals(otherBinding.binding())) { duplicateBindings.add(binding); } } } } duplicateBindings.forEach(http.getBindings()::remove); }
class Builder { private final String domain; private boolean readEnabled = false; private boolean writeEnabled = true; private final Set<BindingPattern> excludeBindings = new LinkedHashSet<>(); private Collection<Handler<?>> handlers = Collections.emptyList(); public Builder(String domain) { this.domain = domain; } public Builder readEnabled(boolean readEnabled) { this.readEnabled = readEnabled; return this; } public Builder writeEnabled(boolean writeEnabled) { this.writeEnabled = writeEnabled; return this; } public Builder excludeBinding(BindingPattern binding) { this.excludeBindings.add(binding); return this; } public Builder setHandlers(ApplicationContainerCluster cluster) { this.handlers = cluster.getHandlers(); return this; } public AccessControl build() { return new AccessControl(domain, writeEnabled, readEnabled, excludeBindings, handlers); } }
class Builder { private final String domain; private boolean readEnabled = false; private boolean writeEnabled = true; private final Set<BindingPattern> excludeBindings = new LinkedHashSet<>(); private Collection<Handler<?>> handlers = Collections.emptyList(); public Builder(String domain) { this.domain = domain; } public Builder readEnabled(boolean readEnabled) { this.readEnabled = readEnabled; return this; } public Builder writeEnabled(boolean writeEnabled) { this.writeEnabled = writeEnabled; return this; } public Builder excludeBinding(BindingPattern binding) { this.excludeBindings.add(binding); return this; } public Builder setHandlers(ApplicationContainerCluster cluster) { this.handlers = cluster.getHandlers(); return this; } public AccessControl build() { return new AccessControl(domain, writeEnabled, readEnabled, excludeBindings, handlers); } }
Also can't we have this as an actual object defined in lang.object https://github.com/ballerina-platform/ballerina-spec/issues/442#issuecomment-619482206
private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) { BVarSymbol iteratorSymbol = varDef.var.symbol; BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID, foreach.nillableResultType, this.env.scope.owner); BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos, foreach.nillableResultType, iteratorSymbol, resultSymbol); BLangType userDefineType = getUserDefineTypeNode(foreach.resultType); BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol); BLangTypeTestExpr typeTestExpr = ASTBuilderUtil .createTypeTestExpr(foreach.pos, resultReferenceInWhile, userDefineType); BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode(); whileNode.pos = foreach.pos; whileNode.expr = typeTestExpr; whileNode.body = foreach.body; BLangAssignment resultAssignment = getIteratorNextAssignment(foreach.pos, iteratorSymbol, resultSymbol); VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode; BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol); valueAccessExpr.expr = addConversionExprIfRequired(valueAccessExpr.expr, types.getSafeType(valueAccessExpr.expr.type, true, false)); variableDefinitionNode.getVariable() .setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType)); whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode); whileNode.body.stmts.add(1, resultAssignment); BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos); blockNode.addStatement(varDef); blockNode.addStatement(resultVariableDefinition); blockNode.addStatement(whileNode); return blockNode; } private BLangType getUserDefineTypeNode(BType type) { BLangUserDefinedType recordType = new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""), ASTBuilderUtil.createIdentifier(null, "")); recordType.type = type; return recordType; } @Override public void visit(BLangWhile whileNode) { whileNode.expr = rewriteExpr(whileNode.expr); whileNode.body = rewrite(whileNode.body, env); result = whileNode; } @Override public void visit(BLangLock lockNode) { BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos); BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos); blockStmt.addStatement(lockStmt); enclLocks.push(lockStmt); BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE); BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType); BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral); statementExpression.type = symTable.nilType; BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode(); trapExpr.type = nillableError; trapExpr.expr = statementExpression; BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"), this.env.scope.owner.pkgID, nillableError, this.env.scope.owner); BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult", nillableError, trapExpr, nillableErrorVarSymbol); BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable); blockStmt.addStatement(simpleVariableDef); BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos); unLockStmt.relatedLock = lockStmt; blockStmt.addStatement(unLockStmt); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol); BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos); BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.pos = lockNode.pos; panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType); ifBody.addStatement(panicNode); BLangTypeTestExpr isErrorTest = ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode()); isErrorTest.type = symTable.booleanType; BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null); blockStmt.addStatement(ifelse); result = rewrite(blockStmt, env); enclLocks.pop(); } @Override public void visit(BLangLockStmt lockStmt) { result = lockStmt; } @Override public void visit(BLangUnLockStmt unLockStmt) { result = unLockStmt; } @Override public void visit(BLangTransaction transactionNode) { BLangStatementExpression transactionStmtExpr = transactionDesugar.desugar(transactionNode, env); BLangExpressionStmt transactionExprStmt = (BLangExpressionStmt) TreeBuilder.createExpressionStatementNode(); transactionExprStmt.pos = transactionNode.pos; transactionExprStmt.expr = transactionStmtExpr; result = rewrite(transactionExprStmt, env); } @Override public void visit(BLangRollback rollbackNode) { BLangStatementExpression rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, env); BLangCheckedExpr checkedExpr = ASTBuilderUtil.createCheckExpr(rollbackNode.pos, rollbackStmtExpr, symTable.nilType); checkedExpr.equivalentErrorTypeList.add(symTable.errorType); BLangExpressionStmt rollbackExprStmt = (BLangExpressionStmt) TreeBuilder.createExpressionStatementNode(); rollbackExprStmt.pos = rollbackNode.pos; rollbackExprStmt.expr = checkedExpr; result = rewrite(rollbackExprStmt, env); } String getTransactionBlockId() { return env.enclPkg.packageID.orgName + "$" + env.enclPkg.packageID.name + "$" + transactionIndex++; } BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix, List<BLangSimpleVariable> lambdaFunctionVariable, TypeNode returnType, BLangFunctionBody lambdaBody) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++); lambdaFunction.function = func; func.requiredParams.addAll(lambdaFunctionVariable); func.setReturnTypeNode(returnType); func.desugaredReturnType = true; defineFunction(func, env.enclPkg); lambdaFunctionVariable = func.requiredParams; func.body = lambdaBody; func.desugared = false; lambdaFunction.pos = pos; List<BType> paramTypes = new ArrayList<>(); lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type)); lambdaFunction.type = new BInvokableType(paramTypes, func.symbol.type.getReturnType(), null); return lambdaFunction; } private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix, List<BLangSimpleVariable> lambdaFunctionVariable, TypeNode returnType, List<BLangStatement> fnBodyStmts, SymbolEnv env, Scope trxScope) { BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode(); body.scope = trxScope; SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); body.stmts = rewriteStmt(fnBodyStmts, bodyEnv); return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body); } private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix, TypeNode returnType) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++); lambdaFunction.function = func; func.setReturnTypeNode(returnType); func.desugaredReturnType = true; defineFunction(func, env.enclPkg); func.desugared = false; lambdaFunction.pos = pos; return lambdaFunction; } private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) { final BPackageSymbol packageSymbol = targetPkg.symbol; final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol); symbolEnter.defineNode(funcNode, packageEnv); packageEnv.enclPkg.functions.add(funcNode); packageEnv.enclPkg.topLevelNodes.add(funcNode); } @Override public void visit(BLangForkJoin forkJoin) { result = forkJoin; } @Override public void visit(BLangLiteral literalExpr) { if (literalExpr.type.tag == TypeTags.ARRAY && ((BArrayType) literalExpr.type).eType.tag == TypeTags.BYTE) { result = rewriteBlobLiteral(literalExpr); return; } result = literalExpr; } private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) { String[] result = getBlobTextValue((String) literalExpr.value); byte[] values; if (BASE_64.equals(result[0])) { values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8)); } else { values = hexStringToByteArray(result[1]); } BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); arrayLiteralNode.type = literalExpr.type; arrayLiteralNode.pos = literalExpr.pos; arrayLiteralNode.exprs = new ArrayList<>(); for (byte b : values) { arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b)); } return arrayLiteralNode; } private String[] getBlobTextValue(String blobLiteralNodeText) { String nodeText = blobLiteralNodeText.replaceAll(" ", ""); String[] result = new String[2]; result[0] = nodeText.substring(0, nodeText.indexOf('`')); result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`')); return result; } private static byte[] hexStringToByteArray(String str) { int len = str.length(); byte[] data = new byte[len / 2]; for (int i = 0; i < len; i += 2) { data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16)); } return data; } @Override public void visit(BLangListConstructorExpr listConstructor) { listConstructor.exprs = rewriteExprs(listConstructor.exprs); BLangExpression expr; if (listConstructor.type.tag == TypeTags.TUPLE) { expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type); result = rewriteExpr(expr); } else if (listConstructor.type.tag == TypeTags.JSON) { expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.type)); result = rewriteExpr(expr); } else if (getElementType(listConstructor.type).tag == TypeTags.JSON) { expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.type); result = rewriteExpr(expr); } else if (listConstructor.type.tag == TypeTags.TYPEDESC) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = listConstructor.typedescType; typedescExpr.type = symTable.typeDesc; result = rewriteExpr(typedescExpr); } else { expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type); result = rewriteExpr(expr); } } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { rewriteExprs(tableConstructorExpr.recordLiteralList); result = tableConstructorExpr; } @Override public void visit(BLangArrayLiteral arrayLiteral) { arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs); if (arrayLiteral.type.tag == TypeTags.JSON) { result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.type)); return; } else if (getElementType(arrayLiteral.type).tag == TypeTags.JSON) { result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.type); return; } result = arrayLiteral; } @Override public void visit(BLangTupleLiteral tupleLiteral) { if (tupleLiteral.isTypedescExpr) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = tupleLiteral.typedescType; typedescExpr.type = symTable.typeDesc; result = rewriteExpr(typedescExpr); return; } tupleLiteral.exprs.forEach(expr -> { BType expType = expr.impConversionExpr == null ? expr.type : expr.impConversionExpr.type; types.setImplicitCastExpr(expr, expType, symTable.anyType); }); tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs); result = tupleLiteral; } @Override public void visit(BLangGroupExpr groupExpr) { if (groupExpr.isTypedescExpr) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = groupExpr.typedescType; typedescExpr.type = symTable.typeDesc; result = rewriteExpr(typedescExpr); } else { result = rewriteExpr(groupExpr.expression); } } @Override public void visit(BLangRecordLiteral recordLiteral) { List<RecordLiteralNode.RecordField> fields = recordLiteral.fields; fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2))); result = rewriteExpr(rewriteMappingConstructor(recordLiteral)); } @Override public void visit(BLangSimpleVarRef varRefExpr) { BLangSimpleVarRef genVarRefExpr = varRefExpr; if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) { BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName); qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol; qnameExpr.localname = varRefExpr.variableName; qnameExpr.prefix = varRefExpr.pkgAlias; qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI; qnameExpr.isUsedInXML = false; qnameExpr.pos = varRefExpr.pos; qnameExpr.type = symTable.stringType; result = qnameExpr; return; } if (varRefExpr.symbol == null) { result = varRefExpr; return; } if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) { BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol; if (varSymbol.originalSymbol != null) { varRefExpr.symbol = varSymbol.originalSymbol; } } BSymbol ownerSymbol = varRefExpr.symbol.owner; if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION && varRefExpr.symbol.type.tag == TypeTags.INVOKABLE) { genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol); } else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE && !((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) { genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (ownerSymbol.tag & SymTag.LET) == SymTag.LET) { genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) { genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE || (ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) { if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) { BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol; if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) { BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType, constSymbol.value.value); result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.type)); return; } } genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol); if (!enclLocks.isEmpty()) { enclLocks.peek().addLockVariable((BVarSymbol) varRefExpr.symbol); } } genVarRefExpr.type = varRefExpr.type; genVarRefExpr.pos = varRefExpr.pos; if ((varRefExpr.lhsVar) || genVarRefExpr.symbol.name.equals(IGNORE)) { genVarRefExpr.lhsVar = varRefExpr.lhsVar; genVarRefExpr.type = varRefExpr.symbol.type; result = genVarRefExpr; return; } genVarRefExpr.lhsVar = varRefExpr.lhsVar; BType targetType = genVarRefExpr.type; genVarRefExpr.type = genVarRefExpr.symbol.type; BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType); result = expression.impConversionExpr != null ? expression.impConversionExpr : expression; } @Override public void visit(BLangFieldBasedAccess fieldAccessExpr) { if (safeNavigate(fieldAccessExpr)) { result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr)); return; } BLangAccessExpression targetVarRef = fieldAccessExpr; BType varRefType = fieldAccessExpr.expr.type; fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr); if (!types.isSameType(fieldAccessExpr.expr.type, varRefType)) { fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType); } BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.pos, fieldAccessExpr.field.value); int varRefTypeTag = varRefType.tag; if (varRefTypeTag == TypeTags.OBJECT || (varRefTypeTag == TypeTags.UNION && ((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) { if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE && ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) { targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol); } else { boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation; if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) { BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol; BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) varRefType.tsymbol; BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc; BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc; if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) || (initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) { isStoreOnCreation = true; } } targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, (BVarSymbol) fieldAccessExpr.symbol, false, isStoreOnCreation); } } else if (varRefTypeTag == TypeTags.RECORD || (varRefTypeTag == TypeTags.UNION && ((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) { if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE && ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) { targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol); } else { targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, (BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation); } } else if (types.isLax(varRefType)) { if (!(varRefType.tag == TypeTags.XML || varRefType.tag == TypeTags.XML_ELEMENT)) { if (varRefType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) varRefType).constraint.tag)) { result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr)); return; } fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType); targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit); } else { targetVarRef = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr); } } else if (varRefTypeTag == TypeTags.MAP) { targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, fieldAccessExpr.isStoreOnCreation); } else if (TypeTags.isXMLTypeTag(varRefTypeTag)) { targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, fieldAccessExpr.fieldKind); } targetVarRef.lhsVar = fieldAccessExpr.lhsVar; targetVarRef.type = fieldAccessExpr.type; targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess; result = targetVarRef; } private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) { BLangStatementExpression statementExpression = new BLangStatementExpression(); BLangBlockStmt block = new BLangBlockStmt(); statementExpression.stmt = block; BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.type, symTable.errorType); DiagnosticPos pos = fieldAccessExpr.pos; BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos); block.addStatement(result); BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol); resultRef.type = fieldAccessType; statementExpression.type = fieldAccessType; BLangLiteral mapIndex = ASTBuilderUtil.createLiteral( fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value); BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex); BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.type, symTable.nilType); mapAccessExpr.type = xmlOrNil; BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos); BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol); block.addStatement(mapResult); BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block); BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType); ifStmt.expr = isLikeNilExpr; BLangBlockStmt resultNilBody = new BLangBlockStmt(); ifStmt.body = resultNilBody; BLangBlockStmt resultHasValueBody = new BLangBlockStmt(); ifStmt.elseStmt = resultHasValueBody; BLangInvocation errorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue("error"); errorInvocation.name = name; errorInvocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); errorInvocation.symbol = symTable.errorConstructor; errorInvocation.type = symTable.errorType; ArrayList<BLangExpression> errorCtorArgs = new ArrayList<>(); errorInvocation.requiredArgs = errorCtorArgs; errorCtorArgs.add(createStringLiteral(pos, "{" + BLangConstants.MAP_LANG_LIB + "}InvalidKey")); BLangNamedArgsExpression message = new BLangNamedArgsExpression(); message.name = ASTBuilderUtil.createIdentifier(pos, "key"); message.expr = createStringLiteral(pos, fieldAccessExpr.field.value); errorCtorArgs.add(message); BLangSimpleVariableDef errorDef = createVarDef("_$_invalid_key_error", symTable.errorType, errorInvocation, pos); resultNilBody.addStatement(errorDef); BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol); BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody); errorVarAssignment.varRef = resultRef; errorVarAssignment.expr = errorRef; BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt( pos, resultHasValueBody); mapResultAssignment.varRef = resultRef; mapResultAssignment.expr = mapResultRef; statementExpression.expr = resultRef; return statementExpression; } private BLangAccessExpression rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) { ArrayList<BLangExpression> args = new ArrayList<>(); String fieldName = fieldAccessExpr.field.value; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess = (BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr; fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName); } if (fieldName.equals("_")) { return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING, fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>()); } BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName); args.add(attributeNameLiteral); args.add(isOptionalAccessToLiteral(fieldAccessExpr)); return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args, new ArrayList<>()); } private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) { return rewrite( createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env); } private String createExpandedQName(String nsURI, String localName) { return "{" + nsURI + "}" + localName; } @Override public void visit(BLangIndexBasedAccess indexAccessExpr) { if (safeNavigate(indexAccessExpr)) { result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr)); return; } BLangIndexBasedAccess targetVarRef = indexAccessExpr; indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr); BType varRefType = indexAccessExpr.expr.type; indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr); if (!types.isSameType(indexAccessExpr.expr.type, varRefType)) { indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType); } if (varRefType.tag == TypeTags.MAP) { targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation); } else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) { targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr, (BVarSymbol) indexAccessExpr.symbol, false); } else if (types.isSubTypeOfList(varRefType)) { targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (types.isAssignable(varRefType, symTable.stringType)) { indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType); targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (varRefType.tag == TypeTags.TABLE) { if (targetVarRef.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY) { BLangTupleLiteral listConstructorExpr = new BLangTupleLiteral(); listConstructorExpr.exprs = ((BLangTableMultiKeyExpr) indexAccessExpr.indexExpr).multiKeyIndexExprs; List<BType> memberTypes = new ArrayList<>(); ((BLangTableMultiKeyExpr) indexAccessExpr.indexExpr).multiKeyIndexExprs. forEach(expression -> memberTypes.add(expression.type)); listConstructorExpr.type = new BTupleType(memberTypes); indexAccessExpr.indexExpr = listConstructorExpr; } targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } targetVarRef.lhsVar = indexAccessExpr.lhsVar; targetVarRef.type = indexAccessExpr.type; result = targetVarRef; } @Override public void visit(BLangTableMultiKeyExpr tableMultiKeyExpr) { rewriteExprs(tableMultiKeyExpr.multiKeyIndexExprs); result = tableMultiKeyExpr; } @Override public void visit(BLangInvocation iExpr) { if (iExpr.symbol != null && iExpr.symbol.kind == SymbolKind.ERROR_CONSTRUCTOR) { result = rewriteErrorConstructor(iExpr); } else if (iExpr.symbol.kind == SymbolKind.FUNCTIONAL_CONSTRUCTOR) { String name = ((BConstructorSymbol) iExpr.symbol).name.value; String internalMethodName = name.substring(0, 1).toLowerCase() + name.substring(1) + "Ctor"; BSymbol bSymbol = symResolver.lookupLangLibMethodInModule( symTable.langInternalModuleSymbol, names.fromString(internalMethodName)); iExpr.symbol = bSymbol; } rewriteInvocation(iExpr, false); } @Override public void visit(BLangInvocation.BLangActionInvocation actionInvocation) { rewriteInvocation(actionInvocation, actionInvocation.async); } private void rewriteInvocation(BLangInvocation invocation, boolean async) { BLangInvocation invRef = invocation; if (!enclLocks.isEmpty()) { BLangLockStmt lock = enclLocks.peek(); lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars); } reorderArguments(invocation); invocation.requiredArgs = rewriteExprs(invocation.requiredArgs); fixNonRestArgTypeCastInTypeParamInvocation(invocation); invocation.restArgs = rewriteExprs(invocation.restArgs); annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos, invocation.symbol.pkgID, invocation.symbol.owner, env); if (invocation.functionPointerInvocation) { visitFunctionPointerInvocation(invocation); return; } invocation.expr = rewriteExpr(invocation.expr); result = invRef; if (invocation.expr == null) { fixTypeCastInTypeParamInvocation(invocation, invRef); if (invocation.exprSymbol == null) { return; } invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol); invocation.expr = rewriteExpr(invocation.expr); } switch (invocation.expr.type.tag) { case TypeTags.OBJECT: case TypeTags.RECORD: if (!invocation.langLibInvocation) { List<BLangExpression> argExprs = new ArrayList<>(invocation.requiredArgs); argExprs.add(0, invocation.expr); BLangAttachedFunctionInvocation attachedFunctionInvocation = new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs, invocation.symbol, invocation.type, invocation.expr, async); attachedFunctionInvocation.name = invocation.name; attachedFunctionInvocation.annAttachments = invocation.annAttachments; result = invRef = attachedFunctionInvocation; } break; } fixTypeCastInTypeParamInvocation(invocation, invRef); } private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) { if (!iExpr.langLibInvocation) { return; } List<BLangExpression> requiredArgs = iExpr.requiredArgs; List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params; for (int i = 1; i < requiredArgs.size(); i++) { requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type)); } } private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) { if (iExpr.langLibInvocation || TypeParamAnalyzer.containsTypeParam(((BInvokableSymbol) iExpr.symbol).retType)) { BType originalInvType = genIExpr.type; genIExpr.type = ((BInvokableSymbol) genIExpr.symbol).retType; BLangExpression expr = addConversionExprIfRequired(genIExpr, originalInvType); if (expr.getKind() == NodeKind.TYPE_CONVERSION_EXPR) { this.result = expr; return; } BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.expr = genIExpr; conversionExpr.targetType = originalInvType; conversionExpr.type = originalInvType; conversionExpr.pos = genIExpr.pos; this.result = conversionExpr; } } private BLangInvocation rewriteErrorConstructor(BLangInvocation iExpr) { BLangExpression reasonExpr = iExpr.requiredArgs.get(0); if (reasonExpr.impConversionExpr != null && reasonExpr.impConversionExpr.targetType.tag != TypeTags.STRING) { reasonExpr.impConversionExpr = null; } reasonExpr = addConversionExprIfRequired(reasonExpr, symTable.stringType); reasonExpr = rewriteExpr(reasonExpr); iExpr.requiredArgs.remove(0); iExpr.requiredArgs.add(reasonExpr); BLangExpression errorDetail; BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(iExpr.pos, ((BErrorType) iExpr.symbol.type).detailType); List<BLangExpression> namedArgs = iExpr.requiredArgs.stream() .filter(a -> a.getKind() == NodeKind.NAMED_ARGS_EXPR) .collect(Collectors.toList()); if (namedArgs.isEmpty()) { errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.type); } else { for (BLangExpression arg : namedArgs) { BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg; BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField(); member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos, symTable.stringType, namedArg.name.value)); if (recordLiteral.type.tag == TypeTags.RECORD) { member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType); } else { member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.type); } recordLiteral.fields.add(member); iExpr.requiredArgs.remove(arg); } errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), ((BErrorType) iExpr.symbol.type).detailType); } iExpr.requiredArgs.add(errorDetail); return iExpr; } public void visit(BLangTypeInit typeInitExpr) { if (typeInitExpr.type.tag == TypeTags.STREAM) { result = rewriteExpr(desugarStreamTypeInit(typeInitExpr)); } else { result = rewrite(desugarObjectTypeInit(typeInitExpr), env); } } private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) { typeInitExpr.desugared = true; BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos); BType objType = getObjectType(typeInitExpr.type); BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos); BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol); blockStmt.addStatement(objVarDef); typeInitExpr.initInvocation.exprSymbol = objVarDef.var.symbol; typeInitExpr.initInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol; if (typeInitExpr.initInvocation.type.tag == TypeTags.NIL) { BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt); initInvExpr.expr = typeInitExpr.initInvocation; typeInitExpr.initInvocation.name.value = Names.GENERATED_INIT_SUFFIX.value; BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef); stmtExpr.type = objVarRef.symbol.type; return stmtExpr; } BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitExpr.initInvocation.type, typeInitExpr.initInvocation, typeInitExpr.pos); blockStmt.addStatement(initInvRetValVarDef); BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.type, null, typeInitExpr.pos); blockStmt.addStatement(resultVarDef); BLangSimpleVarRef initRetValVarRefInCondition = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol); BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos); BLangTypeTestExpr isErrorTest = ASTBuilderUtil.createTypeTestExpr(typeInitExpr.pos, initRetValVarRefInCondition, getErrorTypeNode()); isErrorTest.type = symTable.booleanType; BLangSimpleVarRef thenInitRetValVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol); BLangAssignment errAssignment = ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, thenResultVarRef, thenInitRetValVarRef); thenStmt.addStatement(errAssignment); BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol); BLangAssignment objAssignment = ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, elseResultVarRef, objVarRef); BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos); elseStmt.addStatement(objAssignment); BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(typeInitExpr.pos, isErrorTest, thenStmt, elseStmt); blockStmt.addStatement(ifelse); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.type = resultVarRef.symbol.type; return stmtExpr; } private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope .lookup(Names.CONSTRUCT_STREAM).symbol; BType targetType = ((BStreamType) typeInitExpr.type).constraint; BType errorType = ((BStreamType) typeInitExpr.type).error; BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol); BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = targetType; typedescExpr.type = typedescType; BLangExpression iteratorObj = typeInitExpr.argsExpr.get(0); BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod( typeInitExpr.pos, symbol, new ArrayList<>(Lists.of(typedescExpr, iteratorObj)), symResolver); streamConstructInvocation.type = new BStreamType(TypeTags.STREAM, targetType, errorType, null); return streamConstructInvocation; } private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr, DiagnosticPos pos) { BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name)); if (objSym == null || objSym == symTable.notFoundSymbol) { objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type, this.env.scope.owner); } BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(pos, "$" + name + "$", type, expr, (BVarSymbol) objSym); BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(pos); objVarDef.var = objVar; objVarDef.type = objVar.type; return objVarDef; } private BType getObjectType(BType type) { if (type.tag == TypeTags.OBJECT) { return type; } else if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream() .filter(t -> t.tag == TypeTags.OBJECT) .findFirst() .orElse(symTable.noType); } throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context"); } BLangErrorType getErrorTypeNode() { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.type = symTable.errorType; return errorTypeNode; } @Override public void visit(BLangTernaryExpr ternaryExpr) { /* * First desugar to if-else: * * T $result$; * if () { * $result$ = thenExpr; * } else { * $result$ = elseExpr; * } * */ BLangSimpleVariableDef resultVarDef = createVarDef("$ternary_result$", ternaryExpr.type, null, ternaryExpr.pos); BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangAssignment thenAssignment = ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr); thenBody.addStatement(thenAssignment); BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangAssignment elseAssignment = ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr); elseBody.addStatement(elseAssignment); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse)); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.type = ternaryExpr.type; result = rewriteExpr(stmtExpr); } @Override public void visit(BLangWaitExpr waitExpr) { if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) { waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>()); } else { waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression())); } result = waitExpr; } private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) { visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs); visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs); return exprs; } private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) { if (expr.getKind() == NodeKind.BINARY_EXPR) { collectAllBinaryExprs((BLangBinaryExpr) expr, exprs); } else { expr = rewriteExpr(expr); exprs.add(expr); } } @Override public void visit(BLangWaitForAllExpr waitExpr) { waitExpr.keyValuePairs.forEach(keyValue -> { if (keyValue.valueExpr != null) { keyValue.valueExpr = rewriteExpr(keyValue.valueExpr); } else { keyValue.keyExpr = rewriteExpr(keyValue.keyExpr); } }); BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.type); result = rewriteExpr(expr); } @Override public void visit(BLangTrapExpr trapExpr) { trapExpr.expr = rewriteExpr(trapExpr.expr); if (trapExpr.expr.type.tag != TypeTags.NIL) { trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.type); } result = trapExpr; } @Override public void visit(BLangBinaryExpr binaryExpr) { if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) { if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) { binaryExpr.rhsExpr = getModifiedIntRangeEndExpr(binaryExpr.rhsExpr); } result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr)); return; } if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) { visitBinaryLogicalExpr(binaryExpr); return; } OperatorKind binaryOpKind = binaryExpr.opKind; if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB || binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV || binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND || binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) { checkByteTypeIncompatibleOperations(binaryExpr); } binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr); binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr); result = binaryExpr; int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag; int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag; if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL || binaryExpr.opKind == OperatorKind.NOT_EQUAL || binaryExpr.opKind == OperatorKind.REF_EQUAL || binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) { if (lhsExprTypeTag == TypeTags.INT && rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType); return; } if (lhsExprTypeTag == TypeTags.BYTE && rhsExprTypeTag == TypeTags.INT) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType); return; } } if (lhsExprTypeTag == rhsExprTypeTag) { return; } if (TypeTags.isStringTypeTag(lhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) { if (TypeTags.isXMLTypeTag(rhsExprTypeTag)) { binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr, binaryExpr.lhsExpr.pos, symTable.xmlType); return; } binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type); return; } if (TypeTags.isStringTypeTag(rhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) { if (TypeTags.isXMLTypeTag(lhsExprTypeTag)) { binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr, binaryExpr.rhsExpr.pos, symTable.xmlType); return; } binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type); return; } if (lhsExprTypeTag == TypeTags.DECIMAL) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type); return; } if (rhsExprTypeTag == TypeTags.DECIMAL) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type); return; } if (lhsExprTypeTag == TypeTags.FLOAT) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type); return; } if (rhsExprTypeTag == TypeTags.FLOAT) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type); } } private BLangInvocation replaceWithIntRange(DiagnosticPos pos, BLangExpression lhsExpr, BLangExpression rhsExpr) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope .lookup(Names.CREATE_INT_RANGE).symbol; BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, symbol, new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver); createIntRangeInvocation.type = symTable.intRangeType; return createIntRangeInvocation; } private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) { if (binaryExpr.expectedType == null) { return; } int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag; int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag; if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) { return; } int resultTypeTag = binaryExpr.expectedType.tag; if (resultTypeTag == TypeTags.INT) { if (rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType); } if (lhsExprTypeTag == TypeTags.BYTE) { binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType); } } } /** * This method checks whether given binary expression is related to shift operation. * If its true, then both lhs and rhs of the binary expression will be converted to 'int' type. * <p> * byte a = 12; * byte b = 34; * int i = 234; * int j = -4; * <p> * true: where binary expression's expected type is 'int' * int i1 = a >> b; * int i2 = a << b; * int i3 = a >> i; * int i4 = a << i; * int i5 = i >> j; * int i6 = i << j; */ private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) { return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT || binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT || binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT; } public void visit(BLangElvisExpr elvisExpr) { BLangMatchExpression matchExpr = ASTBuilderUtil.createMatchExpression(elvisExpr.lhsExpr); matchExpr.patternClauses.add(getMatchNullPatternGivenExpression(elvisExpr.pos, rewriteExpr(elvisExpr.rhsExpr))); matchExpr.type = elvisExpr.type; matchExpr.pos = elvisExpr.pos; result = rewriteExpr(matchExpr); } @Override public void visit(BLangUnaryExpr unaryExpr) { if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) { rewriteBitwiseComplementOperator(unaryExpr); return; } unaryExpr.expr = rewriteExpr(unaryExpr.expr); result = unaryExpr; } /** * This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below. * Example : ~a -> a ^ -1; * ~ 11110011 -> 00001100 * 11110011 ^ 11111111 -> 00001100 * * @param unaryExpr the bitwise complement expression */ private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) { final DiagnosticPos pos = unaryExpr.pos; final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode(); binaryExpr.pos = pos; binaryExpr.opKind = OperatorKind.BITWISE_XOR; binaryExpr.lhsExpr = unaryExpr.expr; if (TypeTags.BYTE == unaryExpr.type.tag) { binaryExpr.type = symTable.byteType; binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR, symTable.byteType, symTable.byteType); } else { binaryExpr.type = symTable.intType; binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR, symTable.intType, symTable.intType); } result = rewriteExpr(binaryExpr); } @Override public void visit(BLangTypeConversionExpr conversionExpr) { if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) { result = rewriteExpr(conversionExpr.expr); return; } conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env); if (conversionExpr.type.tag == TypeTags.STRING && conversionExpr.expr.type.tag == TypeTags.XML_TEXT) { result = convertXMLTextToString(conversionExpr); return; } conversionExpr.expr = rewriteExpr(conversionExpr.expr); result = conversionExpr; } private BLangExpression convertXMLTextToString(BLangTypeConversionExpr conversionExpr) { BLangInvocation invocationNode = createLanglibXMLInvocation(conversionExpr.pos, XML_GET_CONTENT_OF_TEXT, conversionExpr.expr, new ArrayList<>(), new ArrayList<>()); BLangSimpleVariableDef tempVarDef = createVarDef("$$__xml_string__$$", conversionExpr.targetType, invocationNode, conversionExpr.pos); BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(conversionExpr.pos, tempVarDef.var.symbol); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(conversionExpr.pos); blockStmt.addStatement(tempVarDef); BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef); stmtExpr.type = conversionExpr.type; return rewrite(stmtExpr, env); } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { env.enclPkg.lambdaFunctions.add(bLangLambdaFunction); result = bLangLambdaFunction; } @Override public void visit(BLangArrowFunction bLangArrowFunction) { BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode(); bLangFunction.setName(bLangArrowFunction.functionName); BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaFunction.pos = bLangArrowFunction.pos; bLangFunction.addFlag(Flag.LAMBDA); lambdaFunction.function = bLangFunction; BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode(); returnType.type = bLangArrowFunction.body.expr.type; bLangFunction.setReturnTypeNode(returnType); bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction)); bLangArrowFunction.params.forEach(bLangFunction::addParameter); lambdaFunction.parent = bLangArrowFunction.parent; lambdaFunction.type = bLangArrowFunction.funcType; BLangFunction funcNode = lambdaFunction.function; BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet), new Name(funcNode.name.value), env.enclPkg.symbol.pkgID, bLangArrowFunction.funcType, env.enclEnv.enclVarSym, true); SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env); defineInvokableSymbol(funcNode, funcSymbol, invokableEnv); List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> { Scope enclScope = invokableEnv.scope; varNode.symbol.kind = SymbolKind.FUNCTION; varNode.symbol.owner = invokableEnv.scope.owner; enclScope.define(varNode.symbol.name, varNode.symbol); }).map(varNode -> varNode.symbol).collect(Collectors.toList()); funcSymbol.params = paramSymbols; funcSymbol.restParam = getRestSymbol(funcNode); funcSymbol.retType = funcNode.returnTypeNode.type; List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList()); funcNode.type = new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.type, null); lambdaFunction.function.pos = bLangArrowFunction.pos; lambdaFunction.function.body.pos = bLangArrowFunction.pos; lambdaFunction.capturedClosureEnv = env; rewrite(lambdaFunction.function, env); env.enclPkg.addFunction(lambdaFunction.function); bLangArrowFunction.function = lambdaFunction.function; result = rewriteExpr(lambdaFunction); } private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol, SymbolEnv invokableEnv) { invokableNode.symbol = funcSymbol; funcSymbol.scope = new Scope(funcSymbol); invokableEnv.scope = funcSymbol.scope; } @Override public void visit(BLangXMLQName xmlQName) { result = xmlQName; } @Override public void visit(BLangXMLAttribute xmlAttribute) { xmlAttribute.name = rewriteExpr(xmlAttribute.name); xmlAttribute.value = rewriteExpr(xmlAttribute.value); result = xmlAttribute; } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName); xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName); xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren); xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes); Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator(); while (attributesItr.hasNext()) { BLangXMLAttribute attribute = attributesItr.next(); if (!attribute.isNamespaceDeclr) { continue; } BLangXMLNS xmlns; if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) { xmlns = new BLangPackageXMLNS(); } else { xmlns = new BLangLocalXMLNS(); } xmlns.namespaceURI = attribute.value.concatExpr; xmlns.prefix = ((BLangXMLQName) attribute.name).localname; xmlns.symbol = attribute.symbol; xmlElementLiteral.inlineNamespaces.add(xmlns); } result = xmlElementLiteral; } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments)); result = xmlTextLiteral; } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { xmlCommentLiteral.concatExpr = rewriteExpr( constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments)); result = xmlCommentLiteral; } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target); xmlProcInsLiteral.dataConcatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments)); result = xmlProcInsLiteral; } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { xmlQuotedString.concatExpr = rewriteExpr( constructStringTemplateConcatExpression(xmlQuotedString.textFragments)); result = xmlQuotedString; } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs)); } /** * The raw template literal gets desugared to a type init expression. For each literal, a new object class type * def is generated from the object type. The type init expression creates an instance of this generated object * type. For example, consider the following statements: * string name = "Pubudu"; * 'object:RawTemplate rt = `Hello ${name}!`; * * The raw template literal above is desugared to: * type RawTemplate$Impl$0 object { * public string[] strings = ["Hello ", "!"]; * public (any|error)[] insertions; * * function init((any|error)[] insertions) { * self.insertions = insertions; * } * }; * * * 'object:RawTemplate rt = new RawTemplate$Impl$0([name]); * * @param rawTemplateLiteral The raw template literal to be desugared. */ @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { DiagnosticPos pos = rawTemplateLiteral.pos; BObjectType objType = (BObjectType) rawTemplateLiteral.type; BLangTypeDefinition objClassDef = desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos); BObjectType classObjType = (BObjectType) objClassDef.type; BVarSymbol insertionsSym = classObjType.fields.get("insertions").symbol; BLangListConstructorExpr insertionsList = ASTBuilderUtil.createEmptyArrayLiteral(pos, insertionsSym.type); insertionsList.exprs.addAll(rawTemplateLiteral.insertions); insertionsList.expectedType = insertionsSym.type; BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType); typeNewExpr.argsExpr.add(insertionsList); typeNewExpr.initInvocation.argExprs.add(insertionsList); typeNewExpr.initInvocation.requiredArgs.add(insertionsList); result = rewriteExpr(typeNewExpr); } /** * This method desugars a raw template literal object class for the provided raw template object type as follows: * A literal defined as 'object:RawTemplate rt = `Hello ${name}!`; * is desugared to, * type $anonType$0 object { * public string[] strings = ["Hello ", "!"]; * public (any|error)[] insertions; * * function init((any|error)[] insertions) { * self.insertions = insertions; * } * }; * @param strings The string portions of the literal * @param objectType The abstract object type for which an object class needs to be generated * @param pos The diagnostic position info for the type node * @return Returns the generated concrete object class def */ private BLangTypeDefinition desugarTemplateLiteralObjectTypedef(List<BLangLiteral> strings, BObjectType objectType, DiagnosticPos pos) { BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol; Name objectClassName = names.fromString(anonModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID)); final int updatedFlags = Flags.unset(tSymbol.flags, Flags.ABSTRACT); BObjectTypeSymbol classTSymbol = (BObjectTypeSymbol) Symbols .createObjectSymbol(updatedFlags, objectClassName, env.enclPkg.packageID, null, env.enclPkg.symbol); BObjectType objectClassType = new BObjectType(classTSymbol, updatedFlags); objectClassType.fields = objectType.fields; classTSymbol.type = objectClassType; BLangObjectTypeNode objectClassNode = TypeDefBuilderHelper.createObjectTypeNode(objectClassType, pos); BLangTypeDefinition typeDef = TypeDefBuilderHelper.addTypeDefinition(objectClassType, objectClassType.tsymbol, objectClassNode, env); typeDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value); typeDef.pos = pos; BType stringsType = objectClassType.fields.get("strings").symbol.type; BLangListConstructorExpr stringsList = ASTBuilderUtil.createEmptyArrayLiteral(pos, stringsType); stringsList.exprs.addAll(strings); stringsList.expectedType = stringsType; objectClassNode.fields.get(0).expr = stringsList; BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(objectClassNode, env); objectClassNode.initFunction = userDefinedInitFunction; env.enclPkg.functions.add(userDefinedInitFunction); env.enclPkg.topLevelNodes.add(userDefinedInitFunction); BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(objectClassNode, env); tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction, tempGeneratedInitFunction.symbol.scope, env); this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env); objectClassNode.generatedInitFunction = tempGeneratedInitFunction; env.enclPkg.functions.add(objectClassNode.generatedInitFunction); env.enclPkg.topLevelNodes.add(objectClassNode.generatedInitFunction); return rewrite(typeDef, env); } /** * Creates a user-defined init() method for the provided object type node. If there are fields without default * values specified in the type node, this will add parameters for those fields in the init() method and assign the * param values to the respective fields in the method body. * * @param objectTypeNode The object type node for which the init() method is generated * @param env The symbol env for the object type node * @return The generated init() method */ private BLangFunction createUserDefinedObjectInitFn(BLangObjectTypeNode objectTypeNode, SymbolEnv env) { BLangFunction initFunction = TypeDefBuilderHelper.createInitFunctionForStructureType(objectTypeNode, env, Names.USER_DEFINED_INIT_SUFFIX, names, symTable); BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) objectTypeNode.type.tsymbol); typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol, (BInvokableType) initFunction.type); objectTypeNode.initFunction = initFunction; initFunction.returnTypeNode.type = symTable.nilType; BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body; BInvokableType initFnType = (BInvokableType) initFunction.type; for (BLangSimpleVariable field : objectTypeNode.fields) { if (field.expr != null) { continue; } BVarSymbol fieldSym = field.symbol; BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type, initFunction.symbol); BLangSimpleVariable param = ASTBuilderUtil.createVariable(objectTypeNode.pos, fieldSym.name.value, fieldSym.type, null, paramSym); param.flagSet.add(Flag.FINAL); initFunction.symbol.scope.define(paramSym.name, paramSym); initFunction.symbol.params.add(paramSym); initFnType.paramTypes.add(param.type); initFunction.requiredParams.add(param); BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym); BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.type, initFunction.receiver.symbol, field.name); initFuncBody.addStatement(fieldInit); } return initFunction; } @Override public void visit(BLangWorkerSend workerSendNode) { workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.type); if (workerSendNode.keyExpr != null) { workerSendNode.keyExpr = rewriteExpr(workerSendNode.keyExpr); } result = workerSendNode; } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.type); result = syncSendExpr; } @Override public void visit(BLangWorkerReceive workerReceiveNode) { if (workerReceiveNode.keyExpr != null) { workerReceiveNode.keyExpr = rewriteExpr(workerReceiveNode.keyExpr); } result = workerReceiveNode; } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts .stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList()); result = workerFlushExpr; } @Override public void visit(BLangTransactionalExpr transactionalExpr) { BInvokableSymbol isTransactionalSymbol = (BInvokableSymbol) symResolver. lookupSymbolInMainSpace(symTable.pkgEnvMap.get(getTransactionSymbol(env)), IS_TRANSACTIONAL); result = ASTBuilderUtil .createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(), Collections.emptyList(), symResolver); } @Override public void visit(BLangCommitExpr commitExpr) { BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env); result = rewriteExpr(stmtExpr); } @Override public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { xmlAttributeAccessExpr.indexExpr = rewriteExpr(xmlAttributeAccessExpr.indexExpr); xmlAttributeAccessExpr.expr = rewriteExpr(xmlAttributeAccessExpr.expr); if (xmlAttributeAccessExpr.indexExpr != null && xmlAttributeAccessExpr.indexExpr.getKind() == NodeKind.XML_QNAME) { ((BLangXMLQName) xmlAttributeAccessExpr.indexExpr).isUsedInXML = true; } xmlAttributeAccessExpr.desugared = true; if (xmlAttributeAccessExpr.lhsVar || xmlAttributeAccessExpr.indexExpr != null) { result = xmlAttributeAccessExpr; } else { result = rewriteExpr(xmlAttributeAccessExpr); } } @Override public void visit(BLangLocalVarRef localVarRef) { result = localVarRef; } @Override public void visit(BLangFieldVarRef fieldVarRef) { result = fieldVarRef; } @Override public void visit(BLangPackageVarRef packageVarRef) { result = packageVarRef; } @Override public void visit(BLangFunctionVarRef functionVarRef) { result = functionVarRef; } @Override public void visit(BLangStructFieldAccessExpr fieldAccessExpr) { result = fieldAccessExpr; } @Override public void visit(BLangStructFunctionVarRef functionVarRef) { result = functionVarRef; } @Override public void visit(BLangMapAccessExpr mapKeyAccessExpr) { result = mapKeyAccessExpr; } @Override public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) { result = arrayIndexAccessExpr; } @Override public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) { result = arrayIndexAccessExpr; } @Override public void visit(BLangTableAccessExpr tableKeyAccessExpr) { result = tableKeyAccessExpr; } @Override public void visit(BLangMapLiteral mapLiteral) { result = mapLiteral; } @Override public void visit(BLangStructLiteral structLiteral) { result = structLiteral; } @Override public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) { result = waitLiteral; } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr); ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters); BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS, xmlElementAccess.expr, new ArrayList<>(), filters); result = rewriteExpr(invocationNode); } private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) { Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env); BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX)); String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null; ArrayList<BLangExpression> args = new ArrayList<>(); for (BLangXMLElementFilter filter : filters) { BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace)); if (nsSymbol == symTable.notFoundSymbol) { if (defaultNS != null && !filter.name.equals("*")) { String expandedName = createExpandedQName(defaultNS, filter.name); args.add(createStringLiteral(filter.elemNamePos, expandedName)); } else { args.add(createStringLiteral(filter.elemNamePos, filter.name)); } } else { BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol; String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name); BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName); args.add(stringLiteral); } } return args; } private BLangInvocation createLanglibXMLInvocation(DiagnosticPos pos, String functionName, BLangExpression invokeOnExpr, ArrayList<BLangExpression> args, ArrayList<BLangExpression> restArgs) { invokeOnExpr = rewriteExpr(invokeOnExpr); BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); name.pos = pos; invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.expr = invokeOnExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName)); ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); requiredArgs.add(invokeOnExpr); requiredArgs.addAll(args); invocationNode.requiredArgs = requiredArgs; invocationNode.restArgs = rewriteExprs(restArgs); invocationNode.type = ((BInvokableType) invocationNode.symbol.type).getReturnType(); invocationNode.langLibInvocation = true; return invocationNode; } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { xmlNavigation.expr = rewriteExpr(xmlNavigation.expr); xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex); ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters); if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) { BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters); result = rewriteExpr(invocationNode); } else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) { BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN, xmlNavigation.expr, new ArrayList<>(), new ArrayList<>()); result = rewriteExpr(invocationNode); } else { BLangExpression childIndexExpr; if (xmlNavigation.childIndex == null) { childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType); } else { childIndexExpr = xmlNavigation.childIndex; } ArrayList<BLangExpression> args = new ArrayList<>(); args.add(rewriteExpr(childIndexExpr)); BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters); result = rewriteExpr(invocationNode); } } @Override public void visit(BLangIsAssignableExpr assignableExpr) { assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr); result = assignableExpr; } @Override public void visit(BFunctionPointerInvocation fpInvocation) { result = fpInvocation; } @Override public void visit(BLangTypedescExpr typedescExpr) { typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env); result = typedescExpr; } @Override public void visit(BLangIntRangeExpression intRangeExpression) { if (!intRangeExpression.includeStart) { intRangeExpression.startExpr = getModifiedIntRangeStartExpr(intRangeExpression.startExpr); } if (!intRangeExpression.includeEnd) { intRangeExpression.endExpr = getModifiedIntRangeEndExpr(intRangeExpression.endExpr); } intRangeExpression.startExpr = rewriteExpr(intRangeExpression.startExpr); intRangeExpression.endExpr = rewriteExpr(intRangeExpression.endExpr); result = intRangeExpression; } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { result = rewriteExpr(bLangVarArgsExpression.expr); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr); result = bLangNamedArgsExpression.expr; } @Override public void visit(BLangMatchExpression bLangMatchExpression) { addMatchExprDefaultCase(bLangMatchExpression); String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result"; BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos, matchTempResultVarName, bLangMatchExpression.type, null, new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID, bLangMatchExpression.type, this.env.scope.owner)); BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(bLangMatchExpression.pos, tempResultVar); tempResultVarDef.desugared = true; BLangBlockStmt stmts = ASTBuilderUtil.createBlockStmt(bLangMatchExpression.pos, Lists.of(tempResultVarDef)); List<BLangMatchTypedBindingPatternClause> patternClauses = new ArrayList<>(); for (int i = 0; i < bLangMatchExpression.patternClauses.size(); i++) { BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i); pattern.expr = rewriteExpr(pattern.expr); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol); pattern.expr = addConversionExprIfRequired(pattern.expr, tempResultVarRef.type); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pattern.pos, tempResultVarRef, pattern.expr); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(pattern.pos, Lists.of(assignmentStmt)); patternClauses.add(ASTBuilderUtil.createMatchStatementPattern(pattern.pos, pattern.variable, patternBody)); } stmts.addStatement(ASTBuilderUtil.createMatchStatement(bLangMatchExpression.pos, bLangMatchExpression.expr, patternClauses)); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol); BLangStatementExpression statementExpr = createStatementExpression(stmts, tempResultVarRef); statementExpr.type = bLangMatchExpression.type; result = rewriteExpr(statementExpr); } @Override public void visit(BLangCheckedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr, false); } @Override public void visit(BLangCheckPanickedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr, true); } private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) { String checkedExprVarName = GEN_VAR_PREFIX.value; BLangSimpleVariable checkedExprVar = ASTBuilderUtil.createVariable(checkedExpr.pos, checkedExprVarName, checkedExpr.type, null, new BVarSymbol(0, names.fromString(checkedExprVarName), this.env.scope.owner.pkgID, checkedExpr.type, this.env.scope.owner)); BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(checkedExpr.pos, checkedExprVar); checkedExprVarDef.desugared = true; BLangMatchTypedBindingPatternClause patternSuccessCase = getSafeAssignSuccessPattern(checkedExprVar.pos, checkedExprVar.symbol.type, true, checkedExprVar.symbol, null); BLangMatchTypedBindingPatternClause patternErrorCase = getSafeAssignErrorPattern(checkedExpr.pos, this.env.scope.owner, checkedExpr.equivalentErrorTypeList, isCheckPanic); BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(checkedExpr.pos, checkedExpr.expr, new ArrayList<BLangMatchTypedBindingPatternClause>() {{ add(patternSuccessCase); add(patternErrorCase); }}); BLangBlockStmt generatedStmtBlock = ASTBuilderUtil.createBlockStmt(checkedExpr.pos, new ArrayList<BLangStatement>() {{ add(checkedExprVarDef); add(matchStmt); }}); BLangSimpleVarRef tempCheckedExprVarRef = ASTBuilderUtil.createVariableRef( checkedExpr.pos, checkedExprVar.symbol); BLangStatementExpression statementExpr = createStatementExpression( generatedStmtBlock, tempCheckedExprVarRef); statementExpr.type = checkedExpr.type; result = rewriteExpr(statementExpr); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos, serviceConstructorExpr.serviceNode.serviceTypeDefinition.symbol.type); serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = rewriteExpr(typeInit); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { BLangExpression expr = typeTestExpr.expr; if (types.isValueType(expr.type)) { addConversionExprIfRequired(expr, symTable.anyType); } typeTestExpr.expr = rewriteExpr(expr); typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env); result = typeTestExpr; } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode(); binaryExpr.pos = annotAccessExpr.pos; binaryExpr.opKind = OperatorKind.ANNOT_ACCESS; binaryExpr.lhsExpr = annotAccessExpr.expr; binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType, annotAccessExpr.annotationSymbol.bvmAlias()); binaryExpr.type = annotAccessExpr.type; binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null, new BInvokableType(Lists.of(binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type), annotAccessExpr.type, null), null); result = rewriteExpr(binaryExpr); } @Override public void visit(BLangIsLikeExpr isLikeExpr) { isLikeExpr.expr = rewriteExpr(isLikeExpr.expr); result = isLikeExpr; } @Override public void visit(BLangStatementExpression bLangStatementExpression) { bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr); bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env); result = bLangStatementExpression; } @Override public void visit(BLangQueryExpr queryExpr) { BLangStatementExpression stmtExpr = queryDesugar.desugar(queryExpr, env); result = rewrite(stmtExpr, env); } @Override public void visit(BLangQueryAction queryAction) { BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env); result = rewrite(stmtExpr, env); } @Override public void visit(BLangJSONArrayLiteral jsonArrayLiteral) { jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs); result = jsonArrayLiteral; } @Override public void visit(BLangConstant constant) { BConstantSymbol constSymbol = constant.symbol; if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) { if (constSymbol.literalType.tag != TypeTags.NIL && constSymbol.value.value == null) { throw new IllegalStateException(); } BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType, constSymbol.value.value); constant.expr = rewriteExpr(literal); } else { constant.expr = rewriteExpr(constant.expr); } constant.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = constant; } @Override public void visit(BLangIgnoreExpr ignoreExpr) { result = ignoreExpr; } @Override public void visit(BLangConstRef constantRef) { result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.type, constantRef.value); } BLangSimpleVariableDef getIteratorVariableDefinition(DiagnosticPos pos, BVarSymbol collectionSymbol, BInvokableSymbol iteratorInvokableSymbol, boolean isIteratorFuncFromLangLib) { BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol); BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); iteratorInvocation.pos = pos; iteratorInvocation.expr = dataReference; iteratorInvocation.symbol = iteratorInvokableSymbol; iteratorInvocation.type = iteratorInvokableSymbol.retType; iteratorInvocation.argExprs = Lists.of(dataReference); iteratorInvocation.requiredArgs = iteratorInvocation.argExprs; iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib; BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID, iteratorInvokableSymbol.retType, this.env.scope.owner); BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$", iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol); return ASTBuilderUtil.createVariableDef(pos, iteratorVariable); } BLangSimpleVariableDef getIteratorNextVariableDefinition(DiagnosticPos pos, BType nillableResultType, BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) { BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol); BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$", nillableResultType, nextInvocation, resultSymbol); return ASTBuilderUtil.createVariableDef(pos, resultVariable); } BLangAssignment getIteratorNextAssignment(DiagnosticPos pos, BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) { BLangSimpleVarRef resultReferenceInAssignment = ASTBuilderUtil.createVariableRef(pos, resultSymbol); BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol); nextInvocation.expr.type = types.getSafeType(nextInvocation.expr.type, true, false); return ASTBuilderUtil.createAssignmentStmt(pos, resultReferenceInAssignment, nextInvocation, false); } BLangInvocation createIteratorNextInvocation(DiagnosticPos pos, BVarSymbol iteratorSymbol) { BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next"); BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol); BInvokableSymbol nextFuncSymbol = getNextFunc((BObjectType) iteratorSymbol.type).symbol; BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); nextInvocation.pos = pos; nextInvocation.name = nextIdentifier; nextInvocation.expr = iteratorReferenceInNext; nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol)); nextInvocation.argExprs = nextInvocation.requiredArgs; nextInvocation.symbol = nextFuncSymbol; nextInvocation.type = nextFuncSymbol.retType; return nextInvocation; } private BAttachedFunction getNextFunc(BObjectType iteratorType) { BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol; for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) { if (bAttachedFunction.funcName.value.equals("next")) { return bAttachedFunction; } } return null; } BLangFieldBasedAccess getValueAccessExpression(DiagnosticPos pos, BType varType, BVarSymbol resultSymbol) { return getFieldAccessExpression(pos, "value", varType, resultSymbol); } BLangFieldBasedAccess getFieldAccessExpression(DiagnosticPos pos, String fieldName, BType varType, BVarSymbol resultSymbol) { BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol); BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName); BLangFieldBasedAccess fieldBasedAccessExpression = ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier); fieldBasedAccessExpression.pos = pos; fieldBasedAccessExpression.type = varType; fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.type; return fieldBasedAccessExpression; } private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) { BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode(); BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.pos = bLangArrowFunction.body.expr.pos; returnNode.setExpression(bLangArrowFunction.body.expr); blockNode.addStatement(returnNode); return blockNode; } private BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol; invocationNode.type = retType; invocationNode.requiredArgs = args; return invocationNode; } private BLangInvocation createLangLibInvocationNode(String functionName, BLangExpression onExpr, List<BLangExpression> args, BType retType, DiagnosticPos pos) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); name.pos = pos; invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.expr = onExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.type, names.fromString(functionName)); ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); requiredArgs.add(onExpr); requiredArgs.addAll(args); invocationNode.requiredArgs = requiredArgs; invocationNode.type = retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType; invocationNode.langLibInvocation = true; return invocationNode; } private BLangArrayLiteral createArrayLiteralExprNode() { BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); expr.exprs = new ArrayList<>(); expr.type = new BArrayType(symTable.anyType); return expr; } private void visitFunctionPointerInvocation(BLangInvocation iExpr) { BLangVariableReference expr; if (iExpr.expr == null) { expr = new BLangSimpleVarRef(); } else { BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess(); fieldBasedAccess.expr = iExpr.expr; fieldBasedAccess.field = iExpr.name; expr = fieldBasedAccess; } expr.symbol = iExpr.symbol; expr.type = iExpr.symbol.type; BLangExpression rewritten = rewriteExpr(expr); result = new BFunctionPointerInvocation(iExpr, rewritten); } private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) { if (types.isValueType(expr.type)) { return expr; } if (expr.type.tag == TypeTags.ERROR) { return expr; } BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), expr.type, expr.pos); return addConversionExprIfRequired(cloneInvok, lhsType); } private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) { if (types.isValueType(expr.type)) { return expr; } if (expr.type.tag == TypeTags.ERROR) { return expr; } BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(), expr.type, expr.pos); return addConversionExprIfRequired(cloneInvok, lhsType); } @SuppressWarnings("unchecked") <E extends BLangNode> E rewrite(E node, SymbolEnv env) { if (node == null) { return null; } if (node.desugared) { return node; } SymbolEnv previousEnv = this.env; this.env = env; node.accept(this); BLangNode resultNode = this.result; this.result = null; resultNode.desugared = true; this.env = previousEnv; return (E) resultNode; } @SuppressWarnings("unchecked") <E extends BLangExpression> E rewriteExpr(E node) { if (node == null) { return null; } if (node.desugared) { return node; } BLangExpression expr = node; if (node.impConversionExpr != null) { expr = node.impConversionExpr; node.impConversionExpr = null; } expr.accept(this); BLangNode resultNode = this.result; this.result = null; resultNode.desugared = true; return (E) resultNode; } @SuppressWarnings("unchecked") <E extends BLangStatement> E rewrite(E statement, SymbolEnv env) { if (statement == null) { return null; } BLangStatementLink link = new BLangStatementLink(); link.parent = currentLink; currentLink = link; BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env); link.statement = stmt; stmt.statementLink = link; currentLink = link.parent; return (E) stmt; } private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewrite(nodeList.get(i), env)); } return nodeList; } private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewrite(nodeList.get(i), env)); } return nodeList; } private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewriteExpr(nodeList.get(i))); } return nodeList; } private BLangLiteral createStringLiteral(DiagnosticPos pos, String value) { BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType); stringLit.pos = pos; return stringLit; } private BLangLiteral createIntLiteral(long value) { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = value; literal.type = symTable.intType; return literal; } private BLangLiteral createByteLiteral(DiagnosticPos pos, Byte value) { BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType); byteLiteral.pos = pos; return byteLiteral; } private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) { BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.pos = expr.pos; conversionExpr.expr = expr; conversionExpr.type = targetType; conversionExpr.targetType = targetType; return conversionExpr; } private BType getElementType(BType type) { if (type.tag != TypeTags.ARRAY) { return type; } return getElementType(((BArrayType) type).getElementType()); } private void addReturnIfNotPresent(BLangInvokableNode invokableNode) { if (Symbols.isNative(invokableNode.symbol) || (invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) { return; } BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body; boolean isNeverOrNilableReturn = invokableNode.symbol.type.getReturnType().tag == TypeTags.NEVER || invokableNode.symbol.type.getReturnType().isNullable(); if (invokableNode.workers.size() == 0 && isNeverOrNilableReturn && (funcBody.stmts.size() < 1 || funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) { DiagnosticPos invPos = invokableNode.pos; DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src, invPos.eLine, invPos.eLine, invPos.sCol, invPos.sCol); BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType); funcBody.addStatement(returnStmt); } } /** * Reorder the invocation arguments to match the original function signature. * * @param iExpr Function invocation expressions to reorder the arguments */ private void reorderArguments(BLangInvocation iExpr) { BSymbol symbol = iExpr.symbol; if (symbol == null || symbol.type.tag != TypeTags.INVOKABLE) { return; } BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol; List<BLangExpression> restArgs = iExpr.restArgs; int originalRequiredArgCount = iExpr.requiredArgs.size(); BLangExpression varargRef = null; BLangBlockStmt blockStmt = null; int restArgCount = restArgs.size(); if (restArgCount > 0 && restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR && originalRequiredArgCount < invokableSymbol.params.size()) { BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr; DiagnosticPos varargExpPos = expr.pos; BType varargVarType = expr.type; String varargVarName = DESUGARED_VARARG_KEY + this.varargCount++; BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID, varargVarType, this.env.scope.owner); varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol); BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos); varDef.var = var; varDef.type = varargVarType; blockStmt = createBlockStmt(varargExpPos); blockStmt.stmts.add(varDef); } if (!invokableSymbol.params.isEmpty()) { reorderNamedArgs(iExpr, invokableSymbol, varargRef); } if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) { if (invokableSymbol.restParam == null) { return; } BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); List<BLangExpression> exprs = new ArrayList<>(); BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type; BType elemType = arrayType.eType; for (BLangExpression restArg : restArgs) { exprs.add(addConversionExprIfRequired(restArg, elemType)); } arrayLiteral.exprs = exprs; arrayLiteral.type = arrayType; if (restArgCount != 0) { iExpr.restArgs = new ArrayList<>(); } iExpr.restArgs.add(arrayLiteral); return; } if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) { if (iExpr.requiredArgs.size() == originalRequiredArgCount) { return; } BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0); BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg); stmtExpression.type = firstNonRestArg.type; iExpr.requiredArgs.add(0, stmtExpression); if (invokableSymbol.restParam == null) { return; } BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount); BLangInvocation sliceInvocation = createLangLibInvocationNode(SLICE_LANGLIB_METHOD, varargRef, new ArrayList<BLangExpression>() {{ add(startIndex); }}, varargRef.type, varargRef.pos); restArgs.remove(0); restArgs.add(addConversionExprIfRequired(sliceInvocation, invokableSymbol.restParam.type)); return; } BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type; BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); arrayLiteral.type = restParamType; BType elemType = restParamType.eType; DiagnosticPos pos = restArgs.get(0).pos; List<BLangExpression> exprs = new ArrayList<>(); for (int i = 0; i < restArgCount - 1; i++) { exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType)); } arrayLiteral.exprs = exprs; BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode(); pushRestArgsExpr.pos = pos; pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1); String name = DESUGARED_VARARG_KEY + this.varargCount++; BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType, this.env.scope.owner); BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos); varDef.var = var; varDef.type = restParamType; BLangBlockStmt pushBlockStmt = createBlockStmt(pos); pushBlockStmt.stmts.add(varDef); BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt); BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef, new ArrayList<BLangExpression>() {{ add(pushRestArgsExpr); }}, restParamType, pos); pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1)); expressionStmt.expr = pushInvocation; BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef); stmtExpression.type = restParamType; iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }}; } private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) { List<BLangExpression> args = new ArrayList<>(); Map<String, BLangExpression> namedArgs = new HashMap<>(); iExpr.requiredArgs.stream() .filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR) .forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr)); List<BVarSymbol> params = invokableSymbol.params; int varargIndex = 0; BType varargType = null; boolean tupleTypedVararg = false; if (varargRef != null) { varargType = varargRef.type; tupleTypedVararg = varargType.tag == TypeTags.TUPLE; } for (int i = 0; i < params.size(); i++) { BVarSymbol param = params.get(i); if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) { args.add(iExpr.requiredArgs.get(i)); } else if (namedArgs.containsKey(param.name.value)) { args.add(namedArgs.get(param.name.value)); } else if (varargRef == null) { BLangExpression expr = new BLangIgnoreExpr(); expr.type = param.type; args.add(expr); } else { BLangIndexBasedAccess memberAccessExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode(); memberAccessExpr.pos = varargRef.pos; memberAccessExpr.expr = varargRef; memberAccessExpr.indexExpr = rewriteExpr(createIntLiteral(varargIndex)); memberAccessExpr.type = tupleTypedVararg ? ((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType; varargIndex++; args.add(addConversionExprIfRequired(memberAccessExpr, param.type)); } } iExpr.requiredArgs = args; } private BLangMatchTypedBindingPatternClause getSafeAssignErrorPattern( DiagnosticPos pos, BSymbol invokableSymbol, List<BType> equivalentErrorTypes, boolean isCheckPanicExpr) { BType enclosingFuncReturnType = ((BInvokableType) invokableSymbol.type).retType; Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ? ((BUnionType) enclosingFuncReturnType).getMemberTypes() : new LinkedHashSet<BType>() {{ add(enclosingFuncReturnType); }}; boolean returnOnError = equivalentErrorTypes.stream() .allMatch(errorType -> returnTypeSet.stream() .anyMatch(retType -> types.isAssignable(errorType, retType))); String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure"; BLangSimpleVariable patternFailureCaseVar = ASTBuilderUtil.createVariable(pos, patternFailureCaseVarName, symTable.errorType, null, new BVarSymbol(0, names.fromString(patternFailureCaseVarName), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner)); BLangVariableReference patternFailureCaseVarRef = ASTBuilderUtil.createVariableRef(pos, patternFailureCaseVar.symbol); BLangBlockStmt patternBlockFailureCase = (BLangBlockStmt) TreeBuilder.createBlockNode(); patternBlockFailureCase.pos = pos; if (!isCheckPanicExpr && returnOnError) { BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode(); returnStmt.pos = pos; returnStmt.expr = patternFailureCaseVarRef; patternBlockFailureCase.stmts.add(returnStmt); } else { BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.pos = pos; panicNode.expr = patternFailureCaseVarRef; patternBlockFailureCase.stmts.add(panicNode); } return ASTBuilderUtil.createMatchStatementPattern(pos, patternFailureCaseVar, patternBlockFailureCase); } private BLangMatchTypedBindingPatternClause getSafeAssignSuccessPattern(DiagnosticPos pos, BType lhsType, boolean isVarDef, BVarSymbol varSymbol, BLangExpression lhsExpr) { String patternSuccessCaseVarName = GEN_VAR_PREFIX.value + "t_match"; BLangSimpleVariable patternSuccessCaseVar = ASTBuilderUtil.createVariable(pos, patternSuccessCaseVarName, lhsType, null, new BVarSymbol(0, names.fromString(patternSuccessCaseVarName), this.env.scope.owner.pkgID, lhsType, this.env.scope.owner)); BLangExpression varRefExpr; if (isVarDef) { varRefExpr = ASTBuilderUtil.createVariableRef(pos, varSymbol); } else { varRefExpr = lhsExpr; } BLangVariableReference patternSuccessCaseVarRef = ASTBuilderUtil.createVariableRef(pos, patternSuccessCaseVar.symbol); BLangAssignment assignmentStmtSuccessCase = ASTBuilderUtil.createAssignmentStmt(pos, varRefExpr, patternSuccessCaseVarRef, false); BLangBlockStmt patternBlockSuccessCase = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<BLangStatement>() {{ add(assignmentStmtSuccessCase); }}); return ASTBuilderUtil.createMatchStatementPattern(pos, patternSuccessCaseVar, patternBlockSuccessCase); } private BLangStatement generateIfElseStmt(BLangMatch matchStmt, BLangSimpleVariable matchExprVar) { List<BLangMatchBindingPatternClause> patterns = matchStmt.patternClauses; BLangIf parentIfNode = generateIfElseStmt(patterns.get(0), matchExprVar); BLangIf currentIfNode = parentIfNode; for (int i = 1; i < patterns.size(); i++) { BLangMatchBindingPatternClause patternClause = patterns.get(i); if (i == patterns.size() - 1 && patternClause.isLastPattern) { currentIfNode.elseStmt = getMatchPatternElseBody(patternClause, matchExprVar); } else { currentIfNode.elseStmt = generateIfElseStmt(patternClause, matchExprVar); currentIfNode = (BLangIf) currentIfNode.elseStmt; } } return parentIfNode; } /** * Generate an if-else statement from the given match statement. * * @param pattern match pattern statement node * @param matchExprVar variable node of the match expression * @return if else statement node */ private BLangIf generateIfElseStmt(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangExpression ifCondition = createPatternIfCondition(pattern, matchExprVar.symbol); if (NodeKind.MATCH_TYPED_PATTERN_CLAUSE == pattern.getKind()) { BLangBlockStmt patternBody = getMatchPatternBody(pattern, matchExprVar); return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, patternBody, null); } BType expectedType = matchExprVar.type; if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) { BLangMatchStructuredBindingPatternClause matchPattern = (BLangMatchStructuredBindingPatternClause) pattern; expectedType = getStructuredBindingPatternType(matchPattern.bindingPatternVariable); } if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) { BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) pattern; BLangSimpleVariableDef varDef = forceCastIfApplicable(matchExprVar.symbol, pattern.pos, expectedType); BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, varDef.var.symbol); structuredPattern.bindingPatternVariable.expr = matchExprVarRef; BLangStatement varDefStmt; if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos, (BLangTupleVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos, (BLangRecordVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos, (BLangErrorVariable) structuredPattern.bindingPatternVariable); } else { varDefStmt = ASTBuilderUtil .createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable); } if (structuredPattern.typeGuardExpr != null) { BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(structuredPattern.pos); blockStmt.addStatement(varDef); blockStmt.addStatement(varDefStmt); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, structuredPattern.typeGuardExpr); stmtExpr.type = symTable.booleanType; ifCondition = ASTBuilderUtil .createBinaryExpr(pattern.pos, ifCondition, stmtExpr, symTable.booleanType, OperatorKind.AND, (BOperatorSymbol) symResolver .resolveBinaryOperator(OperatorKind.AND, symTable.booleanType, symTable.booleanType)); } else { structuredPattern.body.stmts.add(0, varDef); structuredPattern.body.stmts.add(1, varDefStmt); } } return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, pattern.body, null); } private BLangBlockStmt getMatchPatternBody(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangBlockStmt body; BLangMatchTypedBindingPatternClause patternClause = (BLangMatchTypedBindingPatternClause) pattern; if (patternClause.variable.name.value.equals(Names.IGNORE.value)) { return patternClause.body; } BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(patternClause.pos, matchExprVar.symbol); BLangExpression patternVarExpr = addConversionExprIfRequired(matchExprVarRef, patternClause.variable.type); BLangSimpleVariable patternVar = ASTBuilderUtil.createVariable(patternClause.pos, "", patternClause.variable.type, patternVarExpr, patternClause.variable.symbol); BLangSimpleVariableDef patternVarDef = ASTBuilderUtil.createVariableDef(patternVar.pos, patternVar); patternClause.body.stmts.add(0, patternVarDef); body = patternClause.body; return body; } private BLangBlockStmt getMatchPatternElseBody(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangBlockStmt body = pattern.body; if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) { BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, matchExprVar.symbol); BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) pattern; structuredPattern.bindingPatternVariable.expr = matchExprVarRef; BLangStatement varDefStmt; if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos, (BLangTupleVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos, (BLangRecordVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos, (BLangErrorVariable) structuredPattern.bindingPatternVariable); } else { varDefStmt = ASTBuilderUtil .createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable); } structuredPattern.body.stmts.add(0, varDefStmt); body = structuredPattern.body; } return body; } BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) { if (lhsType.tag == TypeTags.NONE) { return expr; } BType rhsType = expr.type; if (types.isSameType(rhsType, lhsType)) { return expr; } types.setImplicitCastExpr(expr, rhsType, lhsType); if (expr.impConversionExpr != null) { return expr; } if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) { return expr; } if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) { return expr; } if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) { return expr; } BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.expr = expr; conversionExpr.targetType = lhsType; conversionExpr.type = lhsType; conversionExpr.pos = expr.pos; conversionExpr.checkTypes = false; return conversionExpr; } private BLangExpression createPatternIfCondition(BLangMatchBindingPatternClause patternClause, BVarSymbol varSymbol) { BType patternType; switch (patternClause.getKind()) { case MATCH_STATIC_PATTERN_CLAUSE: BLangMatchStaticBindingPatternClause staticPattern = (BLangMatchStaticBindingPatternClause) patternClause; patternType = staticPattern.literal.type; break; case MATCH_STRUCTURED_PATTERN_CLAUSE: BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) patternClause; patternType = getStructuredBindingPatternType(structuredPattern.bindingPatternVariable); break; default: BLangMatchTypedBindingPatternClause simplePattern = (BLangMatchTypedBindingPatternClause) patternClause; patternType = simplePattern.variable.type; break; } BLangExpression binaryExpr; BType[] memberTypes; if (patternType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) patternType; memberTypes = unionType.getMemberTypes().toArray(new BType[0]); } else { memberTypes = new BType[1]; memberTypes[0] = patternType; } if (memberTypes.length == 1) { binaryExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]); } else { BLangExpression lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]); BLangExpression rhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[1]); binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR, lhsExpr.type, rhsExpr.type)); for (int i = 2; i < memberTypes.length; i++) { lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[i]); rhsExpr = binaryExpr; binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR, lhsExpr.type, rhsExpr.type)); } } return binaryExpr; } private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) { if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) { BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable; List<BType> memberTypes = new ArrayList<>(); for (int i = 0; i < tupleVariable.memberVariables.size(); i++) { memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i))); } BTupleType tupleType = new BTupleType(memberTypes); if (tupleVariable.restVariable != null) { BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable); tupleType.restType = restArrayType.eType; } return tupleType; } if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) { BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable; BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + recordCount++), env.enclPkg.symbol.pkgID, null, env.scope.owner); recordSymbol.initializerFunc = createRecordInitFunc(); recordSymbol.scope = new Scope(recordSymbol); recordSymbol.scope.define( names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value), recordSymbol.initializerFunc.symbol); LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); List<BLangSimpleVariable> typeDefFields = new ArrayList<>(); for (int i = 0; i < recordVariable.variableList.size(); i++) { String fieldNameStr = recordVariable.variableList.get(i).key.value; Name fieldName = names.fromString(fieldNameStr); BType fieldType = getStructuredBindingPatternType( recordVariable.variableList.get(i).valueBindingPattern); BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType, recordSymbol); fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol)); typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordVarType = new BRecordType(recordSymbol); recordVarType.fields = fields; recordVarType.restFieldType = recordVariable.restParam != null ? ((BMapType) ((BLangSimpleVariable) recordVariable.restParam).type).constraint : symTable.anydataType; recordSymbol.type = recordVarType; recordVarType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields, recordVarType, bindingPatternVariable.pos); recordTypeNode.initFunction = rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable), env); TypeDefBuilderHelper.addTypeDefinition(recordVarType, recordSymbol, recordTypeNode, env); return recordVarType; } if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) { BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable; BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol( SymTag.ERROR, Flags.PUBLIC, names.fromString("$anonErrorType$" + errorCount++), env.enclPkg.symbol.pkgID, null, null); BType detailType; if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) { detailType = symTable.detailType; } else { detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++); BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType); recordTypeNode.initFunction = TypeDefBuilderHelper .createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(detailType, detailType.tsymbol, recordTypeNode, env); } BErrorType errorType = new BErrorType(errorTypeSymbol, ((BErrorType) errorVariable.type).reasonType, detailType); errorTypeSymbol.type = errorType; TypeDefBuilderHelper.addTypeDefinition(errorType, errorTypeSymbol, createErrorTypeNode(errorType), env); return errorType; } return bindingPatternVariable.type; } private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) { List<BLangSimpleVariable> fieldList = new ArrayList<>(); for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) { BVarSymbol symbol = field.valueBindingPattern.symbol; if (symbol == null) { symbol = new BVarSymbol( Flags.PUBLIC, names.fromString(field.key.value + "$"), this.env.enclPkg.packageID, symTable.pureType, null); } BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable( field.valueBindingPattern.pos, symbol.name.value, field.valueBindingPattern.type, field.valueBindingPattern.expr, symbol); fieldList.add(fieldVar); } return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos); } private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail, BLangSimpleVariable restDetail, int errorNo) { BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol( SymTag.RECORD, Flags.PUBLIC, names.fromString("$anonErrorType$" + errorNo + "$detailType"), env.enclPkg.symbol.pkgID, null, null); detailRecordTypeSymbol.initializerFunc = createRecordInitFunc(); detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol); detailRecordTypeSymbol.scope.define( names.fromString(detailRecordTypeSymbol.name.value + "." + detailRecordTypeSymbol.initializerFunc.funcName.value), detailRecordTypeSymbol.initializerFunc.symbol); BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol); detailRecordType.restFieldType = symTable.anydataType; if (restDetail == null) { detailRecordType.sealed = true; } for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) { Name fieldName = names.fromIdNode(detailEntry.key); BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern); BVarSymbol fieldSym = new BVarSymbol( Flags.PUBLIC, fieldName, detailRecordTypeSymbol.pkgID, fieldType, detailRecordTypeSymbol); detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym)); detailRecordTypeSymbol.scope.define(fieldName, fieldSym); } return detailRecordType; } private BAttachedFunction createRecordInitFunc() { BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null); BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol( Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false); initFuncSymbol.retType = symTable.nilType; return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType); } BLangErrorType createErrorTypeNode(BErrorType errorType) { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.type = errorType; return errorTypeNode; } private BLangExpression createPatternMatchBinaryExpr(BLangMatchBindingPatternClause patternClause, BVarSymbol varSymbol, BType patternType) { DiagnosticPos pos = patternClause.pos; BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); if (NodeKind.MATCH_STATIC_PATTERN_CLAUSE == patternClause.getKind()) { BLangMatchStaticBindingPatternClause pattern = (BLangMatchStaticBindingPatternClause) patternClause; return createBinaryExpression(pos, varRef, pattern.literal); } if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == patternClause.getKind()) { return createIsLikeExpression(pos, ASTBuilderUtil.createVariableRef(pos, varSymbol), patternType); } if (patternType == symTable.nilType) { BLangLiteral bLangLiteral = ASTBuilderUtil.createLiteral(pos, symTable.nilType, null); return ASTBuilderUtil.createBinaryExpr(pos, varRef, bLangLiteral, symTable.booleanType, OperatorKind.EQUAL, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.EQUAL, symTable.anyType, symTable.nilType)); } else { return createIsAssignableExpression(pos, varSymbol, patternType); } } private BLangExpression createBinaryExpression(DiagnosticPos pos, BLangSimpleVarRef varRef, BLangExpression expression) { BLangBinaryExpr binaryExpr; if (NodeKind.GROUP_EXPR == expression.getKind()) { return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression); } if (NodeKind.BINARY_EXPR == expression.getKind()) { binaryExpr = (BLangBinaryExpr) expression; BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr); BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr); binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver .resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType)); } else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) { BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode(); anyType.type = symTable.anyType; anyType.typeKind = TypeKind.ANY; return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType); } else { binaryExpr = ASTBuilderUtil .createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null); BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.type, expression.type); if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver .getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.type, binaryExpr); } binaryExpr.opSymbol = (BOperatorSymbol) opSymbol; } return binaryExpr; } private BLangIsAssignableExpr createIsAssignableExpression(DiagnosticPos pos, BVarSymbol varSymbol, BType patternType) { BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); return ASTBuilderUtil.createIsAssignableExpr(pos, varRef, patternType, symTable.booleanType, names); } private BLangIsLikeExpr createIsLikeExpression(DiagnosticPos pos, BLangExpression expr, BType type) { return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType); } private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) { BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); varRef.pos = variable.pos; varRef.variableName = variable.name; varRef.symbol = variable.symbol; varRef.type = variable.type; BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode(); assignmentStmt.expr = variable.expr; assignmentStmt.pos = variable.pos; assignmentStmt.setVariable(varRef); return assignmentStmt; } private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable, BVarSymbol selfSymbol) { return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.type, selfSymbol, variable.name); } private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr, BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol, BLangIdentifier fieldName) { BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol); BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName); fieldAccess.symbol = fieldSymbol; fieldAccess.type = fieldType; fieldAccess.isStoreOnCreation = true; BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode(); assignmentStmt.expr = expr; assignmentStmt.pos = function.pos; assignmentStmt.setVariable(fieldAccess); SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env); return rewrite(assignmentStmt, initFuncEnv); } private void addMatchExprDefaultCase(BLangMatchExpression bLangMatchExpression) { List<BType> exprTypes; List<BType> unmatchedTypes = new ArrayList<>(); if (bLangMatchExpression.expr.type.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) bLangMatchExpression.expr.type; exprTypes = new ArrayList<>(unionType.getMemberTypes()); } else { exprTypes = Lists.of(bLangMatchExpression.type); } for (BType type : exprTypes) { boolean assignable = false; for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) { if (this.types.isAssignable(type, pattern.variable.type)) { assignable = true; break; } } if (!assignable) { unmatchedTypes.add(type); } } if (unmatchedTypes.isEmpty()) { return; } BType defaultPatternType; if (unmatchedTypes.size() == 1) { defaultPatternType = unmatchedTypes.get(0); } else { defaultPatternType = BUnionType.create(null, new LinkedHashSet<>(unmatchedTypes)); } String patternCaseVarName = GEN_VAR_PREFIX.value + "t_match_default"; BLangSimpleVariable patternMatchCaseVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos, patternCaseVarName, defaultPatternType, null, new BVarSymbol(0, names.fromString(patternCaseVarName), this.env.scope.owner.pkgID, defaultPatternType, this.env.scope.owner)); BLangMatchExprPatternClause defaultPattern = (BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern(); defaultPattern.variable = patternMatchCaseVar; defaultPattern.expr = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, patternMatchCaseVar.symbol); defaultPattern.pos = bLangMatchExpression.pos; bLangMatchExpression.patternClauses.add(defaultPattern); } private boolean safeNavigate(BLangAccessExpression accessExpr) { if (accessExpr.lhsVar || accessExpr.expr == null) { return false; } if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) { return true; } NodeKind kind = accessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) { return safeNavigate((BLangAccessExpression) accessExpr.expr); } return false; } private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) { BType originalExprType = accessExpr.type; String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result"; BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.type, null, new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID, accessExpr.type, this.env.scope.owner)); BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol); handleSafeNavigation(accessExpr, accessExpr.type, tempResultVar); BLangMatch matcEXpr = this.matchStmtStack.firstElement(); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matcEXpr)); BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef); stmtExpression.type = originalExprType; this.matchStmtStack = new Stack<>(); this.accessExprStack = new Stack<>(); this.successPattern = null; this.safeNavigationAssignment = null; return stmtExpression; } private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) { if (accessExpr.expr == null) { return; } NodeKind kind = accessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR || kind == NodeKind.INVOCATION) { handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar); } if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) { BType originalType = accessExpr.originalType; if (TypeTags.isXMLTypeTag(originalType.tag)) { accessExpr.type = BUnionType.create(null, originalType, symTable.errorType); } else { accessExpr.type = originalType; } if (this.safeNavigationAssignment != null) { this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.type); } return; } /* * If the field access is a safe navigation, create a match expression. * Then chain the current expression as the success-pattern of the parent * match expr, if available. * eg: * x but { <--- parent match expr * error e => e, * T t => t.y but { <--- current expr * error e => e, * R r => r.z * } * } */ BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(accessExpr.pos, accessExpr.expr, new ArrayList<>()); boolean isAllTypesRecords = false; LinkedHashSet<BType> memTypes = new LinkedHashSet<>(); if (accessExpr.expr.type.tag == TypeTags.UNION) { memTypes = new LinkedHashSet<>(((BUnionType) accessExpr.expr.type).getMemberTypes()); isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes); } if (accessExpr.nilSafeNavigation) { matchStmt.patternClauses.add(getMatchNullPattern(accessExpr, tempResultVar)); matchStmt.type = type; memTypes.remove(symTable.nilType); } if (accessExpr.errorSafeNavigation) { matchStmt.patternClauses.add(getMatchErrorPattern(accessExpr, tempResultVar)); matchStmt.type = type; matchStmt.pos = accessExpr.pos; memTypes.remove(symTable.errorType); } BLangMatchTypedBindingPatternClause successPattern = null; Name field = getFieldName(accessExpr); if (field == Names.EMPTY) { successPattern = getSuccessPattern(accessExpr.expr.type, accessExpr, tempResultVar, accessExpr.errorSafeNavigation); matchStmt.patternClauses.add(successPattern); pushToMatchStatementStack(matchStmt, accessExpr, successPattern); return; } if (isAllTypesRecords) { for (BType memberType : memTypes) { if (((BRecordType) memberType).fields.containsKey(field.value)) { successPattern = getSuccessPattern(memberType, accessExpr, tempResultVar, accessExpr.errorSafeNavigation); matchStmt.patternClauses.add(successPattern); } } matchStmt.patternClauses.add(getMatchAllAndNilReturnPattern(accessExpr, tempResultVar)); pushToMatchStatementStack(matchStmt, accessExpr, successPattern); return; } successPattern = getSuccessPattern(accessExpr.expr.type, accessExpr, tempResultVar, accessExpr.errorSafeNavigation); matchStmt.patternClauses.add(successPattern); pushToMatchStatementStack(matchStmt, accessExpr, successPattern); } private void pushToMatchStatementStack(BLangMatch matchStmt, BLangAccessExpression accessExpr, BLangMatchTypedBindingPatternClause successPattern) { this.matchStmtStack.push(matchStmt); if (this.successPattern != null) { this.successPattern.body = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(matchStmt)); } this.successPattern = successPattern; } private Name getFieldName(BLangAccessExpression accessExpr) { Name field = Names.EMPTY; if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { field = new Name(((BLangFieldBasedAccess) accessExpr).field.value); } else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr; if (indexBasedExpression.getKind() == NodeKind.LITERAL) { field = new Name(((BLangLiteral) indexBasedExpression).value.toString()); } } return field; } private boolean isAllTypesAreRecordsInUnion(LinkedHashSet<BType> memTypes) { for (BType memType : memTypes) { int typeTag = memType.tag; if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) { return false; } } return true; } private BLangMatchTypedBindingPatternClause getMatchErrorPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error"; BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(expr.pos, errorPatternVarName, symTable.errorType, null, new BVarSymbol(0, names.fromString(errorPatternVarName), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner)); BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, errorPatternVar.symbol); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause errorPattern = ASTBuilderUtil .createMatchStatementPattern(expr.pos, errorPatternVar, patternBody); return errorPattern; } private BLangMatchExprPatternClause getMatchNullPatternGivenExpression(DiagnosticPos pos, BLangExpression expr) { String nullPatternVarName = IGNORE.toString(); BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(pos, nullPatternVarName, symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName), this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner)); BLangMatchExprPatternClause nullPattern = (BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern(); nullPattern.variable = errorPatternVar; nullPattern.expr = expr; nullPattern.pos = pos; return nullPattern; } private BLangMatchTypedBindingPatternClause getMatchNullPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null"; BLangSimpleVariable nullPatternVar = ASTBuilderUtil.createVariable(expr.pos, nullPatternVarName, symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName), this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner)); BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, nullPatternVar.symbol); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause nullPattern = ASTBuilderUtil .createMatchStatementPattern(expr.pos, nullPatternVar, patternBody); return nullPattern; } private BLangMatchStaticBindingPatternClause getMatchAllAndNilReturnPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, createLiteral(expr.pos, symTable.nilType, Names.NIL_VALUE), false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchStaticBindingPatternClause matchAllPattern = (BLangMatchStaticBindingPatternClause) TreeBuilder.createMatchStatementStaticBindingPattern(); String matchAllVarName = "_"; matchAllPattern.literal = ASTBuilderUtil.createVariableRef(expr.pos, new BVarSymbol(0, names.fromString(matchAllVarName), this.env.scope.owner.pkgID, symTable.anyType, this.env.scope.owner)); matchAllPattern.body = patternBody; return matchAllPattern; } private BLangMatchTypedBindingPatternClause getSuccessPattern(BType type, BLangAccessExpression accessExpr, BLangSimpleVariable tempResultVar, boolean liftError) { type = types.getSafeType(type, true, liftError); String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success"; BVarSymbol successPatternSymbol; if (type.tag == TypeTags.INVOKABLE) { successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, names.fromString(successPatternVarName), this.env.scope.owner.pkgID, type, this.env.scope.owner); } else { successPatternSymbol = new BVarSymbol(0, names.fromString(successPatternVarName), this.env.scope.owner.pkgID, type, this.env.scope.owner); } BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName, type, null, successPatternSymbol); BLangAccessExpression tempAccessExpr = nodeCloner.clone(accessExpr); if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { ((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr; } if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) { ((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol = ((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol; } tempAccessExpr.expr = ASTBuilderUtil.createVariableRef(accessExpr.pos, successPatternVar.symbol); tempAccessExpr.errorSafeNavigation = false; tempAccessExpr.nilSafeNavigation = false; accessExpr.cloneRef = null; if (TypeTags.isXMLTypeTag(tempAccessExpr.expr.type.tag)) { tempAccessExpr.type = BUnionType.create(null, accessExpr.originalType, symTable.errorType, symTable.nilType); } else { tempAccessExpr.type = accessExpr.originalType; } tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess; BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol); BLangExpression assignmentRhsExpr = addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.type); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause successPattern = ASTBuilderUtil.createMatchStatementPattern(accessExpr.pos, successPatternVar, patternBody); this.safeNavigationAssignment = assignmentStmt; return successPattern; } private boolean safeNavigateLHS(BLangExpression expr) { if (expr.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR && expr.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) { return false; } BLangExpression varRef = ((BLangAccessExpression) expr).expr; if (varRef.type.isNullable()) { return true; } return safeNavigateLHS(varRef); } private BLangStatement rewriteSafeNavigationAssignment(BLangAccessExpression accessExpr, BLangExpression rhsExpr, boolean safeAssignment) { this.accessExprStack = new Stack<>(); List<BLangStatement> stmts = new ArrayList<>(); createLHSSafeNavigation(stmts, accessExpr.expr); BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, cloneExpression(accessExpr), rhsExpr); stmts.add(assignment); return ASTBuilderUtil.createBlockStmt(accessExpr.pos, stmts); } private void createLHSSafeNavigation(List<BLangStatement> stmts, BLangExpression expr) { NodeKind kind = expr.getKind(); boolean root = false; if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR || kind == NodeKind.INVOCATION) { BLangAccessExpression accessExpr = (BLangAccessExpression) expr; createLHSSafeNavigation(stmts, accessExpr.expr); accessExpr.expr = accessExprStack.pop(); } else { root = true; } if (expr.getKind() == NodeKind.INVOCATION) { BLangInvocation invocation = (BLangInvocation) expr; BVarSymbol interMediateSymbol = new BVarSymbol(0, names.fromString(GEN_VAR_PREFIX.value + "i_intermediate"), this.env.scope.owner.pkgID, invocation.type, this.env.scope.owner); BLangSimpleVariable intermediateVariable = ASTBuilderUtil.createVariable(expr.pos, interMediateSymbol.name.value, invocation.type, invocation, interMediateSymbol); BLangSimpleVariableDef intermediateVariableDefinition = ASTBuilderUtil.createVariableDef(invocation.pos, intermediateVariable); stmts.add(intermediateVariableDefinition); expr = ASTBuilderUtil.createVariableRef(invocation.pos, interMediateSymbol); } if (expr.type.isNullable()) { BLangTypeTestExpr isNillTest = ASTBuilderUtil.createTypeTestExpr(expr.pos, expr, getNillTypeNode()); isNillTest.type = symTable.booleanType; BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(expr.pos); expr = cloneExpression(expr); expr.type = types.getSafeType(expr.type, true, false); if (isDefaultableMappingType(expr.type) && !root) { BLangRecordLiteral jsonLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode(); jsonLiteral.type = expr.type; jsonLiteral.pos = expr.pos; BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(expr.pos, expr, jsonLiteral); thenStmt.addStatement(assignment); } else { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = ERROR_REASON_NULL_REFERENCE_ERROR; literal.type = symTable.stringType; BLangInvocation errorCtorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); errorCtorInvocation.pos = expr.pos; errorCtorInvocation.argExprs.add(literal); errorCtorInvocation.requiredArgs.add(literal); errorCtorInvocation.type = symTable.errorType; errorCtorInvocation.symbol = symTable.errorConstructor; BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.expr = errorCtorInvocation; panicNode.pos = expr.pos; thenStmt.addStatement(panicNode); } BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(expr.pos, isNillTest, thenStmt, null); stmts.add(ifelse); } accessExprStack.push(expr); } BLangValueType getNillTypeNode() { BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); nillTypeNode.typeKind = TypeKind.NIL; nillTypeNode.type = symTable.nilType; return nillTypeNode; } private BLangVariableReference cloneExpression(BLangExpression expr) { switch (expr.getKind()) { case SIMPLE_VARIABLE_REF: return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol); case FIELD_BASED_ACCESS_EXPR: case INDEX_BASED_ACCESS_EXPR: case INVOCATION: return cloneAccessExpr((BLangAccessExpression) expr); default: throw new IllegalStateException(); } } private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) { if (originalAccessExpr.expr == null) { return originalAccessExpr; } BLangVariableReference varRef; NodeKind kind = originalAccessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR || kind == NodeKind.INVOCATION) { varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr); } else { varRef = cloneExpression(originalAccessExpr.expr); } varRef.type = types.getSafeType(originalAccessExpr.expr.type, true, false); BLangAccessExpression accessExpr; switch (originalAccessExpr.getKind()) { case FIELD_BASED_ACCESS_EXPR: accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef, ((BLangFieldBasedAccess) originalAccessExpr).field); break; case INDEX_BASED_ACCESS_EXPR: accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef, ((BLangIndexBasedAccess) originalAccessExpr).indexExpr); break; case INVOCATION: accessExpr = null; break; default: throw new IllegalStateException(); } accessExpr.originalType = originalAccessExpr.originalType; accessExpr.pos = originalAccessExpr.pos; accessExpr.lhsVar = originalAccessExpr.lhsVar; accessExpr.symbol = originalAccessExpr.symbol; accessExpr.errorSafeNavigation = false; accessExpr.nilSafeNavigation = false; accessExpr.type = originalAccessExpr.originalType; return accessExpr; } private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) { BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L); return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD, symTable.intType, symTable.intType)); } private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) { BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L); return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB, symTable.intType, symTable.intType)); } private BLangLiteral getBooleanLiteral(boolean value) { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = value; literal.type = symTable.booleanType; return literal; } private boolean isDefaultableMappingType(BType type) { switch (types.getSafeType(type, true, false).tag) { case TypeTags.JSON: case TypeTags.MAP: case TypeTags.RECORD: return true; default: return false; } } private BLangFunction createInitFunctionForObjectType(BLangObjectTypeNode structureTypeNode, SymbolEnv env) { BLangFunction initFunction = TypeDefBuilderHelper.createInitFunctionForStructureType(structureTypeNode, env, Names.GENERATED_INIT_SUFFIX, names, symTable); BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) structureTypeNode.type.tsymbol); typeSymbol.generatedInitializerFunc = new BAttachedFunction(Names.GENERATED_INIT_SUFFIX, initFunction.symbol, (BInvokableType) initFunction.type); structureTypeNode.generatedInitFunction = initFunction; initFunction.returnTypeNode.type = symTable.nilType; return rewrite(initFunction, env); } private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) { /* * Desugar (lhsExpr && rhsExpr) to following if-else: * * logical AND: * ------------- * T $result$; * if (lhsExpr) { * $result$ = rhsExpr; * } else { * $result$ = false; * } * * logical OR: * ------------- * T $result$; * if (lhsExpr) { * $result$ = true; * } else { * $result$ = rhsExpr; * } * */ BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.type, null, binaryExpr.pos); BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol); BLangExpression thenResult; if (binaryExpr.opKind == OperatorKind.AND) { thenResult = binaryExpr.rhsExpr; } else { thenResult = getBooleanLiteral(true); } BLangAssignment thenAssignment = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult); thenBody.addStatement(thenAssignment); BLangExpression elseResult; BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol); if (binaryExpr.opKind == OperatorKind.AND) { elseResult = getBooleanLiteral(false); } else { elseResult = binaryExpr.rhsExpr; } BLangAssignment elseAssignment = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult); elseBody.addStatement(elseAssignment); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol); BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse)); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.type = binaryExpr.type; result = rewriteExpr(stmtExpr); } /** * Split packahe init function into several smaller functions. * * @param packageNode package node * @param env symbol environment * @return initial init function but trimmed in size */ private BLangFunction splitInitFunction(BLangPackage packageNode, SymbolEnv env) { int methodSize = INIT_METHOD_SPLIT_SIZE; BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) packageNode.initFunction.body; if (funcBody.stmts.size() < methodSize || !isJvmTarget) { return packageNode.initFunction; } BLangFunction initFunction = packageNode.initFunction; List<BLangFunction> generatedFunctions = new ArrayList<>(); List<BLangStatement> stmts = new ArrayList<>(funcBody.stmts); funcBody.stmts.clear(); BLangFunction newFunc = initFunction; BLangBlockFunctionBody newFuncBody = (BLangBlockFunctionBody) newFunc.body; int varDefIndex = 0; for (int i = 0; i < stmts.size(); i++) { if (stmts.get(i).getKind() == NodeKind.VARIABLE_DEF) { break; } varDefIndex++; if (i > 0 && i % methodSize == 0) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.add(stmts.get(i)); } List<BLangStatement> chunkStmts = new ArrayList<>(); for (int i = varDefIndex; i < stmts.size(); i++) { BLangStatement stmt = stmts.get(i); chunkStmts.add(stmt); varDefIndex++; if ((stmt.getKind() == NodeKind.ASSIGNMENT) && (((BLangAssignment) stmt).expr.getKind() == NodeKind.SERVICE_CONSTRUCTOR) && (newFuncBody.stmts.size() + chunkStmts.size() > methodSize)) { if (newFuncBody.stmts.size() + chunkStmts.size() > methodSize) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.addAll(chunkStmts); chunkStmts.clear(); } else if ((stmt.getKind() == NodeKind.ASSIGNMENT) && (((BLangAssignment) stmt).varRef instanceof BLangPackageVarRef) && Symbols.isFlagOn(((BLangPackageVarRef) ((BLangAssignment) stmt).varRef).varSymbol.flags, Flags.LISTENER) ) { break; } } newFuncBody.stmts.addAll(chunkStmts); for (int i = varDefIndex; i < stmts.size(); i++) { if (i > 0 && i % methodSize == 0) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.add(stmts.get(i)); } generatedFunctions.add(newFunc); for (int j = 0; j < generatedFunctions.size() - 1; j++) { BLangFunction thisFunction = generatedFunctions.get(j); BLangCheckedExpr checkedExpr = ASTBuilderUtil.createCheckExpr(initFunction.pos, createInvocationNode(generatedFunctions.get(j + 1).name.value, new ArrayList<>(), symTable.errorOrNilType), symTable.nilType); checkedExpr.equivalentErrorTypeList.add(symTable.errorType); BLangExpressionStmt expressionStmt = ASTBuilderUtil .createExpressionStmt(thisFunction.pos, (BLangBlockFunctionBody) thisFunction.body); expressionStmt.expr = checkedExpr; expressionStmt.expr.pos = initFunction.pos; if (j > 0) { thisFunction = rewrite(thisFunction, env); packageNode.functions.add(thisFunction); packageNode.topLevelNodes.add(thisFunction); } } if (generatedFunctions.size() > 1) { BLangFunction lastFunc = generatedFunctions.get(generatedFunctions.size() - 1); lastFunc = rewrite(lastFunc, env); packageNode.functions.add(lastFunc); packageNode.topLevelNodes.add(lastFunc); } return generatedFunctions.get(0); } /** * Create an intermediate package init function. * * @param pkgNode package node * @param env symbol environment of package */ private BLangFunction createIntermediateInitFunction(BLangPackage pkgNode, SymbolEnv env) { String alias = pkgNode.symbol.pkgID.toString(); BLangFunction initFunction = ASTBuilderUtil .createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias, new Name(Names.INIT_FUNCTION_SUFFIX.value + this.initFuncIndex++), symTable); createInvokableSymbol(initFunction, env); return initFunction; } private BType getRestType(BInvokableSymbol invokableSymbol) { if (invokableSymbol != null && invokableSymbol.restParam != null) { return invokableSymbol.restParam.type; } return null; } private BType getRestType(BLangFunction function) { if (function != null && function.restParam != null) { return function.restParam.type; } return null; } private BVarSymbol getRestSymbol(BLangFunction function) { if (function != null && function.restParam != null) { return function.restParam.symbol; } return null; } private boolean isComputedKey(RecordLiteralNode.RecordField field) { if (!field.isKeyValueField()) { return false; } return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey; } private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) { List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields; BType type = mappingConstructorExpr.type; DiagnosticPos pos = mappingConstructorExpr.pos; List<RecordLiteralNode.RecordField> rewrittenFields = new ArrayList<>(fields.size()); for (RecordLiteralNode.RecordField field : fields) { if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValueField = (BLangRecordLiteral.BLangRecordKeyValueField) field; BLangRecordLiteral.BLangRecordKey key = keyValueField.key; BLangExpression origKey = key.expr; BLangExpression keyExpr = key.computedKey ? origKey : origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos, ((BLangSimpleVarRef) origKey).variableName.value) : ((BLangLiteral) origKey); rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr), rewriteExpr(keyValueField.valueExpr))); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field; rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue( rewriteExpr(createStringLiteral(pos, varRefField.variableName.value)), rewriteExpr(varRefField))); } else { BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField = (BLangRecordLiteral.BLangRecordSpreadOperatorField) field; spreadOpField.expr = rewriteExpr(spreadOpField.expr); rewrittenFields.add(spreadOpField); } } fields.clear(); return type.tag == TypeTags.RECORD ? new BLangStructLiteral(pos, type, rewrittenFields) : new BLangMapLiteral(pos, type, rewrittenFields); } public BSymbol getTransactionSymbol(SymbolEnv env) { return env.enclPkg.imports .stream() .filter(importPackage -> importPackage.symbol.pkgID.orgName.value.equals(Names.TRANSACTION_ORG.value) && importPackage.symbol.pkgID.name.value.equals(Names.TRANSACTION_PACKAGE.value)) .findAny().get().symbol; } }
BObjectType objectClassType = new BObjectType(classTSymbol, updatedFlags);
private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) { BVarSymbol iteratorSymbol = varDef.var.symbol; BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID, foreach.nillableResultType, this.env.scope.owner); BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos, foreach.nillableResultType, iteratorSymbol, resultSymbol); BLangType userDefineType = getUserDefineTypeNode(foreach.resultType); BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol); BLangTypeTestExpr typeTestExpr = ASTBuilderUtil .createTypeTestExpr(foreach.pos, resultReferenceInWhile, userDefineType); BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode(); whileNode.pos = foreach.pos; whileNode.expr = typeTestExpr; whileNode.body = foreach.body; BLangAssignment resultAssignment = getIteratorNextAssignment(foreach.pos, iteratorSymbol, resultSymbol); VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode; BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol); valueAccessExpr.expr = addConversionExprIfRequired(valueAccessExpr.expr, types.getSafeType(valueAccessExpr.expr.type, true, false)); variableDefinitionNode.getVariable() .setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType)); whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode); whileNode.body.stmts.add(1, resultAssignment); BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos); blockNode.addStatement(varDef); blockNode.addStatement(resultVariableDefinition); blockNode.addStatement(whileNode); return blockNode; } private BLangType getUserDefineTypeNode(BType type) { BLangUserDefinedType recordType = new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""), ASTBuilderUtil.createIdentifier(null, "")); recordType.type = type; return recordType; } @Override public void visit(BLangWhile whileNode) { whileNode.expr = rewriteExpr(whileNode.expr); whileNode.body = rewrite(whileNode.body, env); result = whileNode; } @Override public void visit(BLangLock lockNode) { BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos); BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos); blockStmt.addStatement(lockStmt); enclLocks.push(lockStmt); BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE); BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType); BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral); statementExpression.type = symTable.nilType; BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode(); trapExpr.type = nillableError; trapExpr.expr = statementExpression; BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"), this.env.scope.owner.pkgID, nillableError, this.env.scope.owner); BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult", nillableError, trapExpr, nillableErrorVarSymbol); BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable); blockStmt.addStatement(simpleVariableDef); BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos); unLockStmt.relatedLock = lockStmt; blockStmt.addStatement(unLockStmt); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol); BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos); BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.pos = lockNode.pos; panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType); ifBody.addStatement(panicNode); BLangTypeTestExpr isErrorTest = ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode()); isErrorTest.type = symTable.booleanType; BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null); blockStmt.addStatement(ifelse); result = rewrite(blockStmt, env); enclLocks.pop(); } @Override public void visit(BLangLockStmt lockStmt) { result = lockStmt; } @Override public void visit(BLangUnLockStmt unLockStmt) { result = unLockStmt; } @Override public void visit(BLangTransaction transactionNode) { BLangStatementExpression transactionStmtExpr = transactionDesugar.desugar(transactionNode, env); BLangExpressionStmt transactionExprStmt = (BLangExpressionStmt) TreeBuilder.createExpressionStatementNode(); transactionExprStmt.pos = transactionNode.pos; transactionExprStmt.expr = transactionStmtExpr; result = rewrite(transactionExprStmt, env); } @Override public void visit(BLangRollback rollbackNode) { BLangStatementExpression rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, env); BLangCheckedExpr checkedExpr = ASTBuilderUtil.createCheckExpr(rollbackNode.pos, rollbackStmtExpr, symTable.nilType); checkedExpr.equivalentErrorTypeList.add(symTable.errorType); BLangExpressionStmt rollbackExprStmt = (BLangExpressionStmt) TreeBuilder.createExpressionStatementNode(); rollbackExprStmt.pos = rollbackNode.pos; rollbackExprStmt.expr = checkedExpr; result = rewrite(rollbackExprStmt, env); } BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix, List<BLangSimpleVariable> lambdaFunctionVariable, TypeNode returnType, BLangFunctionBody lambdaBody) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++); lambdaFunction.function = func; func.requiredParams.addAll(lambdaFunctionVariable); func.setReturnTypeNode(returnType); func.desugaredReturnType = true; defineFunction(func, env.enclPkg); lambdaFunctionVariable = func.requiredParams; func.body = lambdaBody; func.desugared = false; lambdaFunction.pos = pos; List<BType> paramTypes = new ArrayList<>(); lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type)); lambdaFunction.type = new BInvokableType(paramTypes, func.symbol.type.getReturnType(), null); return lambdaFunction; } private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix, List<BLangSimpleVariable> lambdaFunctionVariable, TypeNode returnType, List<BLangStatement> fnBodyStmts, SymbolEnv env, Scope trxScope) { BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode(); body.scope = trxScope; SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); body.stmts = rewriteStmt(fnBodyStmts, bodyEnv); return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body); } private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix, TypeNode returnType) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++); lambdaFunction.function = func; func.setReturnTypeNode(returnType); func.desugaredReturnType = true; defineFunction(func, env.enclPkg); func.desugared = false; lambdaFunction.pos = pos; return lambdaFunction; } private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) { final BPackageSymbol packageSymbol = targetPkg.symbol; final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol); symbolEnter.defineNode(funcNode, packageEnv); packageEnv.enclPkg.functions.add(funcNode); packageEnv.enclPkg.topLevelNodes.add(funcNode); } @Override public void visit(BLangForkJoin forkJoin) { result = forkJoin; } @Override public void visit(BLangLiteral literalExpr) { if (literalExpr.type.tag == TypeTags.ARRAY && ((BArrayType) literalExpr.type).eType.tag == TypeTags.BYTE) { result = rewriteBlobLiteral(literalExpr); return; } result = literalExpr; } private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) { String[] result = getBlobTextValue((String) literalExpr.value); byte[] values; if (BASE_64.equals(result[0])) { values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8)); } else { values = hexStringToByteArray(result[1]); } BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); arrayLiteralNode.type = literalExpr.type; arrayLiteralNode.pos = literalExpr.pos; arrayLiteralNode.exprs = new ArrayList<>(); for (byte b : values) { arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b)); } return arrayLiteralNode; } private String[] getBlobTextValue(String blobLiteralNodeText) { String nodeText = blobLiteralNodeText.replaceAll(" ", ""); String[] result = new String[2]; result[0] = nodeText.substring(0, nodeText.indexOf('`')); result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`')); return result; } private static byte[] hexStringToByteArray(String str) { int len = str.length(); byte[] data = new byte[len / 2]; for (int i = 0; i < len; i += 2) { data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16)); } return data; } @Override public void visit(BLangListConstructorExpr listConstructor) { listConstructor.exprs = rewriteExprs(listConstructor.exprs); BLangExpression expr; if (listConstructor.type.tag == TypeTags.TUPLE) { expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type); result = rewriteExpr(expr); } else if (listConstructor.type.tag == TypeTags.JSON) { expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.type)); result = rewriteExpr(expr); } else if (getElementType(listConstructor.type).tag == TypeTags.JSON) { expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.type); result = rewriteExpr(expr); } else if (listConstructor.type.tag == TypeTags.TYPEDESC) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = listConstructor.typedescType; typedescExpr.type = symTable.typeDesc; result = rewriteExpr(typedescExpr); } else { expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type); result = rewriteExpr(expr); } } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { rewriteExprs(tableConstructorExpr.recordLiteralList); result = tableConstructorExpr; } @Override public void visit(BLangArrayLiteral arrayLiteral) { arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs); if (arrayLiteral.type.tag == TypeTags.JSON) { result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.type)); return; } else if (getElementType(arrayLiteral.type).tag == TypeTags.JSON) { result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.type); return; } result = arrayLiteral; } @Override public void visit(BLangTupleLiteral tupleLiteral) { if (tupleLiteral.isTypedescExpr) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = tupleLiteral.typedescType; typedescExpr.type = symTable.typeDesc; result = rewriteExpr(typedescExpr); return; } tupleLiteral.exprs.forEach(expr -> { BType expType = expr.impConversionExpr == null ? expr.type : expr.impConversionExpr.type; types.setImplicitCastExpr(expr, expType, symTable.anyType); }); tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs); result = tupleLiteral; } @Override public void visit(BLangGroupExpr groupExpr) { if (groupExpr.isTypedescExpr) { final BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = groupExpr.typedescType; typedescExpr.type = symTable.typeDesc; result = rewriteExpr(typedescExpr); } else { result = rewriteExpr(groupExpr.expression); } } @Override public void visit(BLangRecordLiteral recordLiteral) { List<RecordLiteralNode.RecordField> fields = recordLiteral.fields; fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2))); result = rewriteExpr(rewriteMappingConstructor(recordLiteral)); } @Override public void visit(BLangSimpleVarRef varRefExpr) { BLangSimpleVarRef genVarRefExpr = varRefExpr; if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) { BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName); qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol; qnameExpr.localname = varRefExpr.variableName; qnameExpr.prefix = varRefExpr.pkgAlias; qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI; qnameExpr.isUsedInXML = false; qnameExpr.pos = varRefExpr.pos; qnameExpr.type = symTable.stringType; result = qnameExpr; return; } if (varRefExpr.symbol == null) { result = varRefExpr; return; } if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) { BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol; if (varSymbol.originalSymbol != null) { varRefExpr.symbol = varSymbol.originalSymbol; } } BSymbol ownerSymbol = varRefExpr.symbol.owner; if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION && varRefExpr.symbol.type.tag == TypeTags.INVOKABLE) { genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol); } else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE && !((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) { genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (ownerSymbol.tag & SymTag.LET) == SymTag.LET) { genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) { genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol); } else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE || (ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) { if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) { BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol; if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) { BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType, constSymbol.value.value); result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.type)); return; } } genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol); if (!enclLocks.isEmpty()) { enclLocks.peek().addLockVariable((BVarSymbol) varRefExpr.symbol); } } genVarRefExpr.type = varRefExpr.type; genVarRefExpr.pos = varRefExpr.pos; if ((varRefExpr.lhsVar) || genVarRefExpr.symbol.name.equals(IGNORE)) { genVarRefExpr.lhsVar = varRefExpr.lhsVar; genVarRefExpr.type = varRefExpr.symbol.type; result = genVarRefExpr; return; } genVarRefExpr.lhsVar = varRefExpr.lhsVar; BType targetType = genVarRefExpr.type; genVarRefExpr.type = genVarRefExpr.symbol.type; BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType); result = expression.impConversionExpr != null ? expression.impConversionExpr : expression; } @Override public void visit(BLangFieldBasedAccess fieldAccessExpr) { if (safeNavigate(fieldAccessExpr)) { result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr)); return; } BLangAccessExpression targetVarRef = fieldAccessExpr; BType varRefType = fieldAccessExpr.expr.type; fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr); if (!types.isSameType(fieldAccessExpr.expr.type, varRefType)) { fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType); } BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.pos, fieldAccessExpr.field.value); int varRefTypeTag = varRefType.tag; if (varRefTypeTag == TypeTags.OBJECT || (varRefTypeTag == TypeTags.UNION && ((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) { if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE && ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) { targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol); } else { boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation; if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) { BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol; BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) varRefType.tsymbol; BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc; BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc; if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) || (initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) { isStoreOnCreation = true; } } targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, (BVarSymbol) fieldAccessExpr.symbol, false, isStoreOnCreation); } } else if (varRefTypeTag == TypeTags.RECORD || (varRefTypeTag == TypeTags.UNION && ((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) { if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE && ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) { targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol); } else { targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, (BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation); } } else if (types.isLax(varRefType)) { if (!(varRefType.tag == TypeTags.XML || varRefType.tag == TypeTags.XML_ELEMENT)) { if (varRefType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) varRefType).constraint.tag)) { result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr)); return; } fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType); targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit); } else { targetVarRef = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr); } } else if (varRefTypeTag == TypeTags.MAP) { targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, fieldAccessExpr.isStoreOnCreation); } else if (TypeTags.isXMLTypeTag(varRefTypeTag)) { targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit, fieldAccessExpr.fieldKind); } targetVarRef.lhsVar = fieldAccessExpr.lhsVar; targetVarRef.type = fieldAccessExpr.type; targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess; result = targetVarRef; } private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) { BLangStatementExpression statementExpression = new BLangStatementExpression(); BLangBlockStmt block = new BLangBlockStmt(); statementExpression.stmt = block; BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.type, symTable.errorType); DiagnosticPos pos = fieldAccessExpr.pos; BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos); block.addStatement(result); BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol); resultRef.type = fieldAccessType; statementExpression.type = fieldAccessType; BLangLiteral mapIndex = ASTBuilderUtil.createLiteral( fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value); BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex); BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.type, symTable.nilType); mapAccessExpr.type = xmlOrNil; BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos); BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol); block.addStatement(mapResult); BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block); BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType); ifStmt.expr = isLikeNilExpr; BLangBlockStmt resultNilBody = new BLangBlockStmt(); ifStmt.body = resultNilBody; BLangBlockStmt resultHasValueBody = new BLangBlockStmt(); ifStmt.elseStmt = resultHasValueBody; BLangInvocation errorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue("error"); errorInvocation.name = name; errorInvocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); errorInvocation.symbol = symTable.errorConstructor; errorInvocation.type = symTable.errorType; ArrayList<BLangExpression> errorCtorArgs = new ArrayList<>(); errorInvocation.requiredArgs = errorCtorArgs; errorCtorArgs.add(createStringLiteral(pos, "{" + BLangConstants.MAP_LANG_LIB + "}InvalidKey")); BLangNamedArgsExpression message = new BLangNamedArgsExpression(); message.name = ASTBuilderUtil.createIdentifier(pos, "key"); message.expr = createStringLiteral(pos, fieldAccessExpr.field.value); errorCtorArgs.add(message); BLangSimpleVariableDef errorDef = createVarDef("_$_invalid_key_error", symTable.errorType, errorInvocation, pos); resultNilBody.addStatement(errorDef); BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol); BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody); errorVarAssignment.varRef = resultRef; errorVarAssignment.expr = errorRef; BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt( pos, resultHasValueBody); mapResultAssignment.varRef = resultRef; mapResultAssignment.expr = mapResultRef; statementExpression.expr = resultRef; return statementExpression; } private BLangAccessExpression rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) { ArrayList<BLangExpression> args = new ArrayList<>(); String fieldName = fieldAccessExpr.field.value; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess = (BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr; fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName); } if (fieldName.equals("_")) { return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING, fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>()); } BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName); args.add(attributeNameLiteral); args.add(isOptionalAccessToLiteral(fieldAccessExpr)); return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args, new ArrayList<>()); } private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) { return rewrite( createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env); } private String createExpandedQName(String nsURI, String localName) { return "{" + nsURI + "}" + localName; } @Override public void visit(BLangIndexBasedAccess indexAccessExpr) { if (safeNavigate(indexAccessExpr)) { result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr)); return; } BLangIndexBasedAccess targetVarRef = indexAccessExpr; indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr); BType varRefType = indexAccessExpr.expr.type; indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr); if (!types.isSameType(indexAccessExpr.expr.type, varRefType)) { indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType); } if (varRefType.tag == TypeTags.MAP) { targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation); } else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) { targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr, (BVarSymbol) indexAccessExpr.symbol, false); } else if (types.isSubTypeOfList(varRefType)) { targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (types.isAssignable(varRefType, symTable.stringType)) { indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType); targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } else if (varRefType.tag == TypeTags.TABLE) { if (targetVarRef.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY) { BLangTupleLiteral listConstructorExpr = new BLangTupleLiteral(); listConstructorExpr.exprs = ((BLangTableMultiKeyExpr) indexAccessExpr.indexExpr).multiKeyIndexExprs; List<BType> memberTypes = new ArrayList<>(); ((BLangTableMultiKeyExpr) indexAccessExpr.indexExpr).multiKeyIndexExprs. forEach(expression -> memberTypes.add(expression.type)); listConstructorExpr.type = new BTupleType(memberTypes); indexAccessExpr.indexExpr = listConstructorExpr; } targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr); } targetVarRef.lhsVar = indexAccessExpr.lhsVar; targetVarRef.type = indexAccessExpr.type; result = targetVarRef; } @Override public void visit(BLangTableMultiKeyExpr tableMultiKeyExpr) { rewriteExprs(tableMultiKeyExpr.multiKeyIndexExprs); result = tableMultiKeyExpr; } @Override public void visit(BLangInvocation iExpr) { if (iExpr.symbol != null && iExpr.symbol.kind == SymbolKind.ERROR_CONSTRUCTOR) { result = rewriteErrorConstructor(iExpr); } else if (iExpr.symbol.kind == SymbolKind.FUNCTIONAL_CONSTRUCTOR) { String name = ((BConstructorSymbol) iExpr.symbol).name.value; String internalMethodName = name.substring(0, 1).toLowerCase() + name.substring(1) + "Ctor"; BSymbol bSymbol = symResolver.lookupLangLibMethodInModule( symTable.langInternalModuleSymbol, names.fromString(internalMethodName)); iExpr.symbol = bSymbol; } rewriteInvocation(iExpr, false); } @Override public void visit(BLangInvocation.BLangActionInvocation actionInvocation) { rewriteInvocation(actionInvocation, actionInvocation.async); } private void rewriteInvocation(BLangInvocation invocation, boolean async) { BLangInvocation invRef = invocation; if (!enclLocks.isEmpty()) { BLangLockStmt lock = enclLocks.peek(); lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars); } reorderArguments(invocation); invocation.requiredArgs = rewriteExprs(invocation.requiredArgs); fixNonRestArgTypeCastInTypeParamInvocation(invocation); invocation.restArgs = rewriteExprs(invocation.restArgs); annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos, invocation.symbol.pkgID, invocation.symbol.owner, env); if (invocation.functionPointerInvocation) { visitFunctionPointerInvocation(invocation); return; } invocation.expr = rewriteExpr(invocation.expr); result = invRef; BInvokableSymbol invSym = (BInvokableSymbol) invocation.symbol; if (Symbols.isFlagOn(invSym.retType.flags, Flags.PARAMETERIZED)) { BType retType = typeBuilder.build(invSym.retType); invocation.type = retType; } if (invocation.expr == null) { fixTypeCastInTypeParamInvocation(invocation, invRef); if (invocation.exprSymbol == null) { return; } invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol); invocation.expr = rewriteExpr(invocation.expr); } switch (invocation.expr.type.tag) { case TypeTags.OBJECT: case TypeTags.RECORD: if (!invocation.langLibInvocation) { List<BLangExpression> argExprs = new ArrayList<>(invocation.requiredArgs); argExprs.add(0, invocation.expr); BLangAttachedFunctionInvocation attachedFunctionInvocation = new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs, invocation.symbol, invocation.type, invocation.expr, async); attachedFunctionInvocation.name = invocation.name; attachedFunctionInvocation.annAttachments = invocation.annAttachments; result = invRef = attachedFunctionInvocation; } break; } fixTypeCastInTypeParamInvocation(invocation, invRef); } private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) { if (!iExpr.langLibInvocation) { return; } List<BLangExpression> requiredArgs = iExpr.requiredArgs; List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params; for (int i = 1; i < requiredArgs.size(); i++) { requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type)); } } private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) { if (iExpr.langLibInvocation || TypeParamAnalyzer.containsTypeParam(((BInvokableSymbol) iExpr.symbol).retType)) { BType originalInvType = genIExpr.type; genIExpr.type = ((BInvokableSymbol) genIExpr.symbol).retType; BLangExpression expr = addConversionExprIfRequired(genIExpr, originalInvType); if (expr.getKind() == NodeKind.TYPE_CONVERSION_EXPR) { this.result = expr; return; } BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.expr = genIExpr; conversionExpr.targetType = originalInvType; conversionExpr.type = originalInvType; conversionExpr.pos = genIExpr.pos; this.result = conversionExpr; } } private BLangInvocation rewriteErrorConstructor(BLangInvocation iExpr) { BLangExpression reasonExpr = iExpr.requiredArgs.get(0); if (reasonExpr.impConversionExpr != null && reasonExpr.impConversionExpr.targetType.tag != TypeTags.STRING) { reasonExpr.impConversionExpr = null; } reasonExpr = addConversionExprIfRequired(reasonExpr, symTable.stringType); reasonExpr = rewriteExpr(reasonExpr); iExpr.requiredArgs.remove(0); iExpr.requiredArgs.add(reasonExpr); BLangExpression errorDetail; BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(iExpr.pos, ((BErrorType) iExpr.symbol.type).detailType); List<BLangExpression> namedArgs = iExpr.requiredArgs.stream() .filter(a -> a.getKind() == NodeKind.NAMED_ARGS_EXPR) .collect(Collectors.toList()); if (namedArgs.isEmpty()) { errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.type); } else { for (BLangExpression arg : namedArgs) { BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg; BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField(); member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos, symTable.stringType, namedArg.name.value)); if (recordLiteral.type.tag == TypeTags.RECORD) { member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType); } else { member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.type); } recordLiteral.fields.add(member); iExpr.requiredArgs.remove(arg); } errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), ((BErrorType) iExpr.symbol.type).detailType); } iExpr.requiredArgs.add(errorDetail); return iExpr; } public void visit(BLangTypeInit typeInitExpr) { if (typeInitExpr.type.tag == TypeTags.STREAM) { result = rewriteExpr(desugarStreamTypeInit(typeInitExpr)); } else { result = rewrite(desugarObjectTypeInit(typeInitExpr), env); } } private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) { typeInitExpr.desugared = true; BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos); BType objType = getObjectType(typeInitExpr.type); BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos); BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol); blockStmt.addStatement(objVarDef); typeInitExpr.initInvocation.exprSymbol = objVarDef.var.symbol; typeInitExpr.initInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol; if (typeInitExpr.initInvocation.type.tag == TypeTags.NIL) { BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt); initInvExpr.expr = typeInitExpr.initInvocation; typeInitExpr.initInvocation.name.value = Names.GENERATED_INIT_SUFFIX.value; BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef); stmtExpr.type = objVarRef.symbol.type; return stmtExpr; } BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitExpr.initInvocation.type, typeInitExpr.initInvocation, typeInitExpr.pos); blockStmt.addStatement(initInvRetValVarDef); BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.type, null, typeInitExpr.pos); blockStmt.addStatement(resultVarDef); BLangSimpleVarRef initRetValVarRefInCondition = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol); BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos); BLangTypeTestExpr isErrorTest = ASTBuilderUtil.createTypeTestExpr(typeInitExpr.pos, initRetValVarRefInCondition, getErrorTypeNode()); isErrorTest.type = symTable.booleanType; BLangSimpleVarRef thenInitRetValVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol); BLangAssignment errAssignment = ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, thenResultVarRef, thenInitRetValVarRef); thenStmt.addStatement(errAssignment); BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol); BLangAssignment objAssignment = ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, elseResultVarRef, objVarRef); BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos); elseStmt.addStatement(objAssignment); BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(typeInitExpr.pos, isErrorTest, thenStmt, elseStmt); blockStmt.addStatement(ifelse); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.type = resultVarRef.symbol.type; return stmtExpr; } private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope .lookup(Names.CONSTRUCT_STREAM).symbol; BType targetType = ((BStreamType) typeInitExpr.type).constraint; BType errorType = ((BStreamType) typeInitExpr.type).error; BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol); BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = targetType; typedescExpr.type = typedescType; BLangExpression iteratorObj = typeInitExpr.argsExpr.get(0); BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod( typeInitExpr.pos, symbol, new ArrayList<>(Lists.of(typedescExpr, iteratorObj)), symResolver); streamConstructInvocation.type = new BStreamType(TypeTags.STREAM, targetType, errorType, null); return streamConstructInvocation; } private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr, DiagnosticPos pos) { BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name)); if (objSym == null || objSym == symTable.notFoundSymbol) { objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type, this.env.scope.owner); } BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(pos, "$" + name + "$", type, expr, (BVarSymbol) objSym); BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(pos); objVarDef.var = objVar; objVarDef.type = objVar.type; return objVarDef; } private BType getObjectType(BType type) { if (type.tag == TypeTags.OBJECT) { return type; } else if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream() .filter(t -> t.tag == TypeTags.OBJECT) .findFirst() .orElse(symTable.noType); } throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context"); } BLangErrorType getErrorTypeNode() { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.type = symTable.errorType; return errorTypeNode; } @Override public void visit(BLangTernaryExpr ternaryExpr) { /* * First desugar to if-else: * * T $result$; * if () { * $result$ = thenExpr; * } else { * $result$ = elseExpr; * } * */ BLangSimpleVariableDef resultVarDef = createVarDef("$ternary_result$", ternaryExpr.type, null, ternaryExpr.pos); BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangAssignment thenAssignment = ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr); thenBody.addStatement(thenAssignment); BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangAssignment elseAssignment = ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr); elseBody.addStatement(elseAssignment); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol); BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse)); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.type = ternaryExpr.type; result = rewriteExpr(stmtExpr); } @Override public void visit(BLangWaitExpr waitExpr) { if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) { waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>()); } else { waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression())); } result = waitExpr; } private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) { visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs); visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs); return exprs; } private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) { if (expr.getKind() == NodeKind.BINARY_EXPR) { collectAllBinaryExprs((BLangBinaryExpr) expr, exprs); } else { expr = rewriteExpr(expr); exprs.add(expr); } } @Override public void visit(BLangWaitForAllExpr waitExpr) { waitExpr.keyValuePairs.forEach(keyValue -> { if (keyValue.valueExpr != null) { keyValue.valueExpr = rewriteExpr(keyValue.valueExpr); } else { keyValue.keyExpr = rewriteExpr(keyValue.keyExpr); } }); BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.type); result = rewriteExpr(expr); } @Override public void visit(BLangTrapExpr trapExpr) { trapExpr.expr = rewriteExpr(trapExpr.expr); if (trapExpr.expr.type.tag != TypeTags.NIL) { trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.type); } result = trapExpr; } @Override public void visit(BLangBinaryExpr binaryExpr) { if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) { if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) { binaryExpr.rhsExpr = getModifiedIntRangeEndExpr(binaryExpr.rhsExpr); } result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr)); return; } if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) { visitBinaryLogicalExpr(binaryExpr); return; } OperatorKind binaryOpKind = binaryExpr.opKind; if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB || binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV || binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND || binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) { checkByteTypeIncompatibleOperations(binaryExpr); } binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr); binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr); result = binaryExpr; int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag; int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag; if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL || binaryExpr.opKind == OperatorKind.NOT_EQUAL || binaryExpr.opKind == OperatorKind.REF_EQUAL || binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) { if (lhsExprTypeTag == TypeTags.INT && rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType); return; } if (lhsExprTypeTag == TypeTags.BYTE && rhsExprTypeTag == TypeTags.INT) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType); return; } } if (lhsExprTypeTag == rhsExprTypeTag) { return; } if (TypeTags.isStringTypeTag(lhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) { if (TypeTags.isXMLTypeTag(rhsExprTypeTag)) { binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr, binaryExpr.lhsExpr.pos, symTable.xmlType); return; } binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type); return; } if (TypeTags.isStringTypeTag(rhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) { if (TypeTags.isXMLTypeTag(lhsExprTypeTag)) { binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr, binaryExpr.rhsExpr.pos, symTable.xmlType); return; } binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type); return; } if (lhsExprTypeTag == TypeTags.DECIMAL) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type); return; } if (rhsExprTypeTag == TypeTags.DECIMAL) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type); return; } if (lhsExprTypeTag == TypeTags.FLOAT) { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type); return; } if (rhsExprTypeTag == TypeTags.FLOAT) { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type); } } private BLangInvocation replaceWithIntRange(DiagnosticPos pos, BLangExpression lhsExpr, BLangExpression rhsExpr) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope .lookup(Names.CREATE_INT_RANGE).symbol; BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, symbol, new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver); createIntRangeInvocation.type = symTable.intRangeType; return createIntRangeInvocation; } private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) { if (binaryExpr.expectedType == null) { return; } int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag; int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag; if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) { return; } int resultTypeTag = binaryExpr.expectedType.tag; if (resultTypeTag == TypeTags.INT) { if (rhsExprTypeTag == TypeTags.BYTE) { binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType); } if (lhsExprTypeTag == TypeTags.BYTE) { binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType); } } } /** * This method checks whether given binary expression is related to shift operation. * If its true, then both lhs and rhs of the binary expression will be converted to 'int' type. * <p> * byte a = 12; * byte b = 34; * int i = 234; * int j = -4; * <p> * true: where binary expression's expected type is 'int' * int i1 = a >> b; * int i2 = a << b; * int i3 = a >> i; * int i4 = a << i; * int i5 = i >> j; * int i6 = i << j; */ private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) { return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT || binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT || binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT; } public void visit(BLangElvisExpr elvisExpr) { BLangMatchExpression matchExpr = ASTBuilderUtil.createMatchExpression(elvisExpr.lhsExpr); matchExpr.patternClauses.add(getMatchNullPatternGivenExpression(elvisExpr.pos, rewriteExpr(elvisExpr.rhsExpr))); matchExpr.type = elvisExpr.type; matchExpr.pos = elvisExpr.pos; result = rewriteExpr(matchExpr); } @Override public void visit(BLangUnaryExpr unaryExpr) { if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) { rewriteBitwiseComplementOperator(unaryExpr); return; } unaryExpr.expr = rewriteExpr(unaryExpr.expr); result = unaryExpr; } /** * This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below. * Example : ~a -> a ^ -1; * ~ 11110011 -> 00001100 * 11110011 ^ 11111111 -> 00001100 * * @param unaryExpr the bitwise complement expression */ private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) { final DiagnosticPos pos = unaryExpr.pos; final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode(); binaryExpr.pos = pos; binaryExpr.opKind = OperatorKind.BITWISE_XOR; binaryExpr.lhsExpr = unaryExpr.expr; if (TypeTags.BYTE == unaryExpr.type.tag) { binaryExpr.type = symTable.byteType; binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR, symTable.byteType, symTable.byteType); } else { binaryExpr.type = symTable.intType; binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR, symTable.intType, symTable.intType); } result = rewriteExpr(binaryExpr); } @Override public void visit(BLangTypeConversionExpr conversionExpr) { if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) { result = rewriteExpr(conversionExpr.expr); return; } conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env); if (conversionExpr.type.tag == TypeTags.STRING && conversionExpr.expr.type.tag == TypeTags.XML_TEXT) { result = convertXMLTextToString(conversionExpr); return; } conversionExpr.expr = rewriteExpr(conversionExpr.expr); result = conversionExpr; } private BLangExpression convertXMLTextToString(BLangTypeConversionExpr conversionExpr) { BLangInvocation invocationNode = createLanglibXMLInvocation(conversionExpr.pos, XML_GET_CONTENT_OF_TEXT, conversionExpr.expr, new ArrayList<>(), new ArrayList<>()); BLangSimpleVariableDef tempVarDef = createVarDef("$$__xml_string__$$", conversionExpr.targetType, invocationNode, conversionExpr.pos); BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(conversionExpr.pos, tempVarDef.var.symbol); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(conversionExpr.pos); blockStmt.addStatement(tempVarDef); BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef); stmtExpr.type = conversionExpr.type; return rewrite(stmtExpr, env); } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { env.enclPkg.lambdaFunctions.add(bLangLambdaFunction); result = bLangLambdaFunction; } @Override public void visit(BLangArrowFunction bLangArrowFunction) { BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode(); bLangFunction.setName(bLangArrowFunction.functionName); BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaFunction.pos = bLangArrowFunction.pos; bLangFunction.addFlag(Flag.LAMBDA); lambdaFunction.function = bLangFunction; BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode(); returnType.type = bLangArrowFunction.body.expr.type; bLangFunction.setReturnTypeNode(returnType); bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction)); bLangArrowFunction.params.forEach(bLangFunction::addParameter); lambdaFunction.parent = bLangArrowFunction.parent; lambdaFunction.type = bLangArrowFunction.funcType; BLangFunction funcNode = lambdaFunction.function; BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet), new Name(funcNode.name.value), env.enclPkg.symbol.pkgID, bLangArrowFunction.funcType, env.enclEnv.enclVarSym, true); SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env); defineInvokableSymbol(funcNode, funcSymbol, invokableEnv); List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> { Scope enclScope = invokableEnv.scope; varNode.symbol.kind = SymbolKind.FUNCTION; varNode.symbol.owner = invokableEnv.scope.owner; enclScope.define(varNode.symbol.name, varNode.symbol); }).map(varNode -> varNode.symbol).collect(Collectors.toList()); funcSymbol.params = paramSymbols; funcSymbol.restParam = getRestSymbol(funcNode); funcSymbol.retType = funcNode.returnTypeNode.type; List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList()); funcNode.type = new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.type, null); lambdaFunction.function.pos = bLangArrowFunction.pos; lambdaFunction.function.body.pos = bLangArrowFunction.pos; lambdaFunction.capturedClosureEnv = env; rewrite(lambdaFunction.function, env); env.enclPkg.addFunction(lambdaFunction.function); bLangArrowFunction.function = lambdaFunction.function; result = rewriteExpr(lambdaFunction); } private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol, SymbolEnv invokableEnv) { invokableNode.symbol = funcSymbol; funcSymbol.scope = new Scope(funcSymbol); invokableEnv.scope = funcSymbol.scope; } @Override public void visit(BLangXMLQName xmlQName) { result = xmlQName; } @Override public void visit(BLangXMLAttribute xmlAttribute) { xmlAttribute.name = rewriteExpr(xmlAttribute.name); xmlAttribute.value = rewriteExpr(xmlAttribute.value); result = xmlAttribute; } @Override public void visit(BLangXMLElementLiteral xmlElementLiteral) { xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName); xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName); xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren); xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes); Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator(); while (attributesItr.hasNext()) { BLangXMLAttribute attribute = attributesItr.next(); if (!attribute.isNamespaceDeclr) { continue; } BLangXMLNS xmlns; if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) { xmlns = new BLangPackageXMLNS(); } else { xmlns = new BLangLocalXMLNS(); } xmlns.namespaceURI = attribute.value.concatExpr; xmlns.prefix = ((BLangXMLQName) attribute.name).localname; xmlns.symbol = attribute.symbol; xmlElementLiteral.inlineNamespaces.add(xmlns); } result = xmlElementLiteral; } @Override public void visit(BLangXMLTextLiteral xmlTextLiteral) { xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments)); result = xmlTextLiteral; } @Override public void visit(BLangXMLCommentLiteral xmlCommentLiteral) { xmlCommentLiteral.concatExpr = rewriteExpr( constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments)); result = xmlCommentLiteral; } @Override public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) { xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target); xmlProcInsLiteral.dataConcatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments)); result = xmlProcInsLiteral; } @Override public void visit(BLangXMLQuotedString xmlQuotedString) { xmlQuotedString.concatExpr = rewriteExpr( constructStringTemplateConcatExpression(xmlQuotedString.textFragments)); result = xmlQuotedString; } @Override public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs)); } /** * The raw template literal gets desugared to a type init expression. For each literal, a new object class type * def is generated from the object type. The type init expression creates an instance of this generated object * type. For example, consider the following statements: * string name = "Pubudu"; * 'object:RawTemplate rt = `Hello ${name}!`; * * The raw template literal above is desugared to: * type RawTemplate$Impl$0 object { * public string[] strings = ["Hello ", "!"]; * public (any|error)[] insertions; * * function init((any|error)[] insertions) { * self.insertions = insertions; * } * }; * * * 'object:RawTemplate rt = new RawTemplate$Impl$0([name]); * * @param rawTemplateLiteral The raw template literal to be desugared. */ @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { DiagnosticPos pos = rawTemplateLiteral.pos; BObjectType objType = (BObjectType) rawTemplateLiteral.type; BLangTypeDefinition objClassDef = desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos); BObjectType classObjType = (BObjectType) objClassDef.type; BVarSymbol insertionsSym = classObjType.fields.get("insertions").symbol; BLangListConstructorExpr insertionsList = ASTBuilderUtil.createListConstructorExpr(pos, insertionsSym.type); insertionsList.exprs.addAll(rawTemplateLiteral.insertions); insertionsList.expectedType = insertionsSym.type; BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType); typeNewExpr.argsExpr.add(insertionsList); typeNewExpr.initInvocation.argExprs.add(insertionsList); typeNewExpr.initInvocation.requiredArgs.add(insertionsList); result = rewriteExpr(typeNewExpr); } /** * This method desugars a raw template literal object class for the provided raw template object type as follows. * A literal defined as 'object:RawTemplate rt = `Hello ${name}!`; * is desugared to, * type $anonType$0 object { * public string[] strings = ["Hello ", "!"]; * public (any|error)[] insertions; * * function init((any|error)[] insertions) { * self.insertions = insertions; * } * }; * @param strings The string portions of the literal * @param objectType The abstract object type for which an object class needs to be generated * @param pos The diagnostic position info for the type node * @return Returns the generated concrete object class def */ private BLangTypeDefinition desugarTemplateLiteralObjectTypedef(List<BLangLiteral> strings, BObjectType objectType, DiagnosticPos pos) { BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol; Name objectClassName = names.fromString(anonModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID)); final int updatedFlags = Flags.unset(tSymbol.flags, Flags.ABSTRACT); BObjectTypeSymbol classTSymbol = (BObjectTypeSymbol) Symbols .createObjectSymbol(updatedFlags, objectClassName, env.enclPkg.packageID, null, env.enclPkg.symbol); BObjectType objectClassType = new BObjectType(classTSymbol, updatedFlags); objectClassType.fields = objectType.fields; classTSymbol.type = objectClassType; BLangObjectTypeNode objectClassNode = TypeDefBuilderHelper.createObjectTypeNode(objectClassType, pos); BLangTypeDefinition typeDef = TypeDefBuilderHelper.addTypeDefinition(objectClassType, objectClassType.tsymbol, objectClassNode, env); typeDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value); typeDef.pos = pos; BType stringsType = objectClassType.fields.get("strings").symbol.type; BLangListConstructorExpr stringsList = ASTBuilderUtil.createListConstructorExpr(pos, stringsType); stringsList.exprs.addAll(strings); stringsList.expectedType = stringsType; objectClassNode.fields.get(0).expr = stringsList; BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(objectClassNode, env); objectClassNode.initFunction = userDefinedInitFunction; env.enclPkg.functions.add(userDefinedInitFunction); env.enclPkg.topLevelNodes.add(userDefinedInitFunction); BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(objectClassNode, env); tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction, tempGeneratedInitFunction.symbol.scope, env); this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env); objectClassNode.generatedInitFunction = tempGeneratedInitFunction; env.enclPkg.functions.add(objectClassNode.generatedInitFunction); env.enclPkg.topLevelNodes.add(objectClassNode.generatedInitFunction); return rewrite(typeDef, env); } /** * Creates a user-defined init() method for the provided object type node. If there are fields without default * values specified in the type node, this will add parameters for those fields in the init() method and assign the * param values to the respective fields in the method body. * * @param objectTypeNode The object type node for which the init() method is generated * @param env The symbol env for the object type node * @return The generated init() method */ private BLangFunction createUserDefinedObjectInitFn(BLangObjectTypeNode objectTypeNode, SymbolEnv env) { BLangFunction initFunction = TypeDefBuilderHelper.createInitFunctionForStructureType(objectTypeNode, env, Names.USER_DEFINED_INIT_SUFFIX, names, symTable); BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) objectTypeNode.type.tsymbol); typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol, (BInvokableType) initFunction.type); objectTypeNode.initFunction = initFunction; initFunction.returnTypeNode.type = symTable.nilType; BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body; BInvokableType initFnType = (BInvokableType) initFunction.type; for (BLangSimpleVariable field : objectTypeNode.fields) { if (field.expr != null) { continue; } BVarSymbol fieldSym = field.symbol; BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type, initFunction.symbol); BLangSimpleVariable param = ASTBuilderUtil.createVariable(objectTypeNode.pos, fieldSym.name.value, fieldSym.type, null, paramSym); param.flagSet.add(Flag.FINAL); initFunction.symbol.scope.define(paramSym.name, paramSym); initFunction.symbol.params.add(paramSym); initFnType.paramTypes.add(param.type); initFunction.requiredParams.add(param); BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym); BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.type, initFunction.receiver.symbol, field.name); initFuncBody.addStatement(fieldInit); } return initFunction; } @Override public void visit(BLangWorkerSend workerSendNode) { workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.type); if (workerSendNode.keyExpr != null) { workerSendNode.keyExpr = rewriteExpr(workerSendNode.keyExpr); } result = workerSendNode; } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.type); result = syncSendExpr; } @Override public void visit(BLangWorkerReceive workerReceiveNode) { if (workerReceiveNode.keyExpr != null) { workerReceiveNode.keyExpr = rewriteExpr(workerReceiveNode.keyExpr); } result = workerReceiveNode; } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts .stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList()); result = workerFlushExpr; } @Override public void visit(BLangTransactionalExpr transactionalExpr) { BInvokableSymbol isTransactionalSymbol = (BInvokableSymbol) symResolver. lookupSymbolInMainSpace(symTable.pkgEnvMap.get(getTransactionSymbol(env)), IS_TRANSACTIONAL); result = ASTBuilderUtil .createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(), Collections.emptyList(), symResolver); } @Override public void visit(BLangCommitExpr commitExpr) { BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env); result = rewriteExpr(stmtExpr); } @Override public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { xmlAttributeAccessExpr.indexExpr = rewriteExpr(xmlAttributeAccessExpr.indexExpr); xmlAttributeAccessExpr.expr = rewriteExpr(xmlAttributeAccessExpr.expr); if (xmlAttributeAccessExpr.indexExpr != null && xmlAttributeAccessExpr.indexExpr.getKind() == NodeKind.XML_QNAME) { ((BLangXMLQName) xmlAttributeAccessExpr.indexExpr).isUsedInXML = true; } xmlAttributeAccessExpr.desugared = true; if (xmlAttributeAccessExpr.lhsVar || xmlAttributeAccessExpr.indexExpr != null) { result = xmlAttributeAccessExpr; } else { result = rewriteExpr(xmlAttributeAccessExpr); } } @Override public void visit(BLangLocalVarRef localVarRef) { result = localVarRef; } @Override public void visit(BLangFieldVarRef fieldVarRef) { result = fieldVarRef; } @Override public void visit(BLangPackageVarRef packageVarRef) { result = packageVarRef; } @Override public void visit(BLangFunctionVarRef functionVarRef) { result = functionVarRef; } @Override public void visit(BLangStructFieldAccessExpr fieldAccessExpr) { result = fieldAccessExpr; } @Override public void visit(BLangStructFunctionVarRef functionVarRef) { result = functionVarRef; } @Override public void visit(BLangMapAccessExpr mapKeyAccessExpr) { result = mapKeyAccessExpr; } @Override public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) { result = arrayIndexAccessExpr; } @Override public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) { result = arrayIndexAccessExpr; } @Override public void visit(BLangTableAccessExpr tableKeyAccessExpr) { result = tableKeyAccessExpr; } @Override public void visit(BLangMapLiteral mapLiteral) { result = mapLiteral; } @Override public void visit(BLangStructLiteral structLiteral) { result = structLiteral; } @Override public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) { result = waitLiteral; } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr); ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters); BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS, xmlElementAccess.expr, new ArrayList<>(), filters); result = rewriteExpr(invocationNode); } private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) { Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env); BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX)); String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null; ArrayList<BLangExpression> args = new ArrayList<>(); for (BLangXMLElementFilter filter : filters) { BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace)); if (nsSymbol == symTable.notFoundSymbol) { if (defaultNS != null && !filter.name.equals("*")) { String expandedName = createExpandedQName(defaultNS, filter.name); args.add(createStringLiteral(filter.elemNamePos, expandedName)); } else { args.add(createStringLiteral(filter.elemNamePos, filter.name)); } } else { BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol; String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name); BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName); args.add(stringLiteral); } } return args; } private BLangInvocation createLanglibXMLInvocation(DiagnosticPos pos, String functionName, BLangExpression invokeOnExpr, ArrayList<BLangExpression> args, ArrayList<BLangExpression> restArgs) { invokeOnExpr = rewriteExpr(invokeOnExpr); BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); name.pos = pos; invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.expr = invokeOnExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName)); ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); requiredArgs.add(invokeOnExpr); requiredArgs.addAll(args); invocationNode.requiredArgs = requiredArgs; invocationNode.restArgs = rewriteExprs(restArgs); invocationNode.type = ((BInvokableType) invocationNode.symbol.type).getReturnType(); invocationNode.langLibInvocation = true; return invocationNode; } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { xmlNavigation.expr = rewriteExpr(xmlNavigation.expr); xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex); ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters); if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) { BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters); result = rewriteExpr(invocationNode); } else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) { BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN, xmlNavigation.expr, new ArrayList<>(), new ArrayList<>()); result = rewriteExpr(invocationNode); } else { BLangExpression childIndexExpr; if (xmlNavigation.childIndex == null) { childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType); } else { childIndexExpr = xmlNavigation.childIndex; } ArrayList<BLangExpression> args = new ArrayList<>(); args.add(rewriteExpr(childIndexExpr)); BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters); result = rewriteExpr(invocationNode); } } @Override public void visit(BLangIsAssignableExpr assignableExpr) { assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr); result = assignableExpr; } @Override public void visit(BFunctionPointerInvocation fpInvocation) { result = fpInvocation; } @Override public void visit(BLangTypedescExpr typedescExpr) { typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env); result = typedescExpr; } @Override public void visit(BLangIntRangeExpression intRangeExpression) { if (!intRangeExpression.includeStart) { intRangeExpression.startExpr = getModifiedIntRangeStartExpr(intRangeExpression.startExpr); } if (!intRangeExpression.includeEnd) { intRangeExpression.endExpr = getModifiedIntRangeEndExpr(intRangeExpression.endExpr); } intRangeExpression.startExpr = rewriteExpr(intRangeExpression.startExpr); intRangeExpression.endExpr = rewriteExpr(intRangeExpression.endExpr); result = intRangeExpression; } @Override public void visit(BLangRestArgsExpression bLangVarArgsExpression) { result = rewriteExpr(bLangVarArgsExpression.expr); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr); result = bLangNamedArgsExpression.expr; } @Override public void visit(BLangMatchExpression bLangMatchExpression) { addMatchExprDefaultCase(bLangMatchExpression); String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result"; BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos, matchTempResultVarName, bLangMatchExpression.type, null, new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID, bLangMatchExpression.type, this.env.scope.owner)); BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(bLangMatchExpression.pos, tempResultVar); tempResultVarDef.desugared = true; BLangBlockStmt stmts = ASTBuilderUtil.createBlockStmt(bLangMatchExpression.pos, Lists.of(tempResultVarDef)); List<BLangMatchTypedBindingPatternClause> patternClauses = new ArrayList<>(); for (int i = 0; i < bLangMatchExpression.patternClauses.size(); i++) { BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i); pattern.expr = rewriteExpr(pattern.expr); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol); pattern.expr = addConversionExprIfRequired(pattern.expr, tempResultVarRef.type); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pattern.pos, tempResultVarRef, pattern.expr); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(pattern.pos, Lists.of(assignmentStmt)); patternClauses.add(ASTBuilderUtil.createMatchStatementPattern(pattern.pos, pattern.variable, patternBody)); } stmts.addStatement(ASTBuilderUtil.createMatchStatement(bLangMatchExpression.pos, bLangMatchExpression.expr, patternClauses)); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol); BLangStatementExpression statementExpr = createStatementExpression(stmts, tempResultVarRef); statementExpr.type = bLangMatchExpression.type; result = rewriteExpr(statementExpr); } @Override public void visit(BLangCheckedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr, false); } @Override public void visit(BLangCheckPanickedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr, true); } private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) { String checkedExprVarName = GEN_VAR_PREFIX.value; BLangSimpleVariable checkedExprVar = ASTBuilderUtil.createVariable(checkedExpr.pos, checkedExprVarName, checkedExpr.type, null, new BVarSymbol(0, names.fromString(checkedExprVarName), this.env.scope.owner.pkgID, checkedExpr.type, this.env.scope.owner)); BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(checkedExpr.pos, checkedExprVar); checkedExprVarDef.desugared = true; BLangMatchTypedBindingPatternClause patternSuccessCase = getSafeAssignSuccessPattern(checkedExprVar.pos, checkedExprVar.symbol.type, true, checkedExprVar.symbol, null); BLangMatchTypedBindingPatternClause patternErrorCase = getSafeAssignErrorPattern(checkedExpr.pos, this.env.scope.owner, checkedExpr.equivalentErrorTypeList, isCheckPanic); BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(checkedExpr.pos, checkedExpr.expr, new ArrayList<BLangMatchTypedBindingPatternClause>() {{ add(patternSuccessCase); add(patternErrorCase); }}); BLangBlockStmt generatedStmtBlock = ASTBuilderUtil.createBlockStmt(checkedExpr.pos, new ArrayList<BLangStatement>() {{ add(checkedExprVarDef); add(matchStmt); }}); BLangSimpleVarRef tempCheckedExprVarRef = ASTBuilderUtil.createVariableRef( checkedExpr.pos, checkedExprVar.symbol); BLangStatementExpression statementExpr = createStatementExpression( generatedStmtBlock, tempCheckedExprVarRef); statementExpr.type = checkedExpr.type; result = rewriteExpr(statementExpr); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos, serviceConstructorExpr.serviceNode.serviceTypeDefinition.symbol.type); serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = rewriteExpr(typeInit); } @Override public void visit(BLangTypeTestExpr typeTestExpr) { BLangExpression expr = typeTestExpr.expr; if (types.isValueType(expr.type)) { addConversionExprIfRequired(expr, symTable.anyType); } typeTestExpr.expr = rewriteExpr(expr); typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env); result = typeTestExpr; } @Override public void visit(BLangAnnotAccessExpr annotAccessExpr) { BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode(); binaryExpr.pos = annotAccessExpr.pos; binaryExpr.opKind = OperatorKind.ANNOT_ACCESS; binaryExpr.lhsExpr = annotAccessExpr.expr; binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType, annotAccessExpr.annotationSymbol.bvmAlias()); binaryExpr.type = annotAccessExpr.type; binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null, new BInvokableType(Lists.of(binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type), annotAccessExpr.type, null), null); result = rewriteExpr(binaryExpr); } @Override public void visit(BLangIsLikeExpr isLikeExpr) { isLikeExpr.expr = rewriteExpr(isLikeExpr.expr); result = isLikeExpr; } @Override public void visit(BLangStatementExpression bLangStatementExpression) { bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr); bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env); result = bLangStatementExpression; } @Override public void visit(BLangQueryExpr queryExpr) { BLangStatementExpression stmtExpr = queryDesugar.desugar(queryExpr, env); result = rewrite(stmtExpr, env); } @Override public void visit(BLangQueryAction queryAction) { BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env); result = rewrite(stmtExpr, env); } @Override public void visit(BLangJSONArrayLiteral jsonArrayLiteral) { jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs); result = jsonArrayLiteral; } @Override public void visit(BLangConstant constant) { BConstantSymbol constSymbol = constant.symbol; if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) { if (constSymbol.literalType.tag != TypeTags.NIL && constSymbol.value.value == null) { throw new IllegalStateException(); } BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType, constSymbol.value.value); constant.expr = rewriteExpr(literal); } else { constant.expr = rewriteExpr(constant.expr); } constant.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = constant; } @Override public void visit(BLangIgnoreExpr ignoreExpr) { result = ignoreExpr; } @Override public void visit(BLangConstRef constantRef) { result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.type, constantRef.value); } BLangSimpleVariableDef getIteratorVariableDefinition(DiagnosticPos pos, BVarSymbol collectionSymbol, BInvokableSymbol iteratorInvokableSymbol, boolean isIteratorFuncFromLangLib) { BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol); BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); iteratorInvocation.pos = pos; iteratorInvocation.expr = dataReference; iteratorInvocation.symbol = iteratorInvokableSymbol; iteratorInvocation.type = iteratorInvokableSymbol.retType; iteratorInvocation.argExprs = Lists.of(dataReference); iteratorInvocation.requiredArgs = iteratorInvocation.argExprs; iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib; BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID, iteratorInvokableSymbol.retType, this.env.scope.owner); BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$", iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol); return ASTBuilderUtil.createVariableDef(pos, iteratorVariable); } BLangSimpleVariableDef getIteratorNextVariableDefinition(DiagnosticPos pos, BType nillableResultType, BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) { BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol); BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$", nillableResultType, nextInvocation, resultSymbol); return ASTBuilderUtil.createVariableDef(pos, resultVariable); } BLangAssignment getIteratorNextAssignment(DiagnosticPos pos, BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) { BLangSimpleVarRef resultReferenceInAssignment = ASTBuilderUtil.createVariableRef(pos, resultSymbol); BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol); nextInvocation.expr.type = types.getSafeType(nextInvocation.expr.type, true, false); return ASTBuilderUtil.createAssignmentStmt(pos, resultReferenceInAssignment, nextInvocation, false); } BLangInvocation createIteratorNextInvocation(DiagnosticPos pos, BVarSymbol iteratorSymbol) { BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next"); BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol); BInvokableSymbol nextFuncSymbol = getNextFunc((BObjectType) iteratorSymbol.type).symbol; BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); nextInvocation.pos = pos; nextInvocation.name = nextIdentifier; nextInvocation.expr = iteratorReferenceInNext; nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol)); nextInvocation.argExprs = nextInvocation.requiredArgs; nextInvocation.symbol = nextFuncSymbol; nextInvocation.type = nextFuncSymbol.retType; return nextInvocation; } private BAttachedFunction getNextFunc(BObjectType iteratorType) { BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol; for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) { if (bAttachedFunction.funcName.value.equals("next")) { return bAttachedFunction; } } return null; } BLangFieldBasedAccess getValueAccessExpression(DiagnosticPos pos, BType varType, BVarSymbol resultSymbol) { return getFieldAccessExpression(pos, "value", varType, resultSymbol); } BLangFieldBasedAccess getFieldAccessExpression(DiagnosticPos pos, String fieldName, BType varType, BVarSymbol resultSymbol) { BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol); BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName); BLangFieldBasedAccess fieldBasedAccessExpression = ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier); fieldBasedAccessExpression.pos = pos; fieldBasedAccessExpression.type = varType; fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.type; return fieldBasedAccessExpression; } private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) { BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode(); BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode(); returnNode.pos = bLangArrowFunction.body.expr.pos; returnNode.setExpression(bLangArrowFunction.body.expr); blockNode.addStatement(returnNode); return blockNode; } private BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol; invocationNode.type = retType; invocationNode.requiredArgs = args; return invocationNode; } private BLangInvocation createLangLibInvocationNode(String functionName, BLangExpression onExpr, List<BLangExpression> args, BType retType, DiagnosticPos pos) { BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode(); invocationNode.pos = pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(functionName); name.pos = pos; invocationNode.name = name; invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); invocationNode.expr = onExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.type, names.fromString(functionName)); ArrayList<BLangExpression> requiredArgs = new ArrayList<>(); requiredArgs.add(onExpr); requiredArgs.addAll(args); invocationNode.requiredArgs = requiredArgs; invocationNode.type = retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType; invocationNode.langLibInvocation = true; return invocationNode; } private BLangArrayLiteral createArrayLiteralExprNode() { BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); expr.exprs = new ArrayList<>(); expr.type = new BArrayType(symTable.anyType); return expr; } private void visitFunctionPointerInvocation(BLangInvocation iExpr) { BLangVariableReference expr; if (iExpr.expr == null) { expr = new BLangSimpleVarRef(); } else { BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess(); fieldBasedAccess.expr = iExpr.expr; fieldBasedAccess.field = iExpr.name; expr = fieldBasedAccess; } expr.symbol = iExpr.symbol; expr.type = iExpr.symbol.type; BLangExpression rewritten = rewriteExpr(expr); result = new BFunctionPointerInvocation(iExpr, rewritten); } private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) { if (types.isValueType(expr.type)) { return expr; } if (expr.type.tag == TypeTags.ERROR) { return expr; } BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), expr.type, expr.pos); return addConversionExprIfRequired(cloneInvok, lhsType); } private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) { if (types.isValueType(expr.type)) { return expr; } if (expr.type.tag == TypeTags.ERROR) { return expr; } BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(), expr.type, expr.pos); return addConversionExprIfRequired(cloneInvok, lhsType); } @SuppressWarnings("unchecked") <E extends BLangNode> E rewrite(E node, SymbolEnv env) { if (node == null) { return null; } if (node.desugared) { return node; } SymbolEnv previousEnv = this.env; this.env = env; node.accept(this); BLangNode resultNode = this.result; this.result = null; resultNode.desugared = true; this.env = previousEnv; return (E) resultNode; } @SuppressWarnings("unchecked") <E extends BLangExpression> E rewriteExpr(E node) { if (node == null) { return null; } if (node.desugared) { return node; } BLangExpression expr = node; if (node.impConversionExpr != null) { expr = node.impConversionExpr; node.impConversionExpr = null; } expr.accept(this); BLangNode resultNode = this.result; this.result = null; resultNode.desugared = true; return (E) resultNode; } @SuppressWarnings("unchecked") <E extends BLangStatement> E rewrite(E statement, SymbolEnv env) { if (statement == null) { return null; } BLangStatementLink link = new BLangStatementLink(); link.parent = currentLink; currentLink = link; BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env); link.statement = stmt; stmt.statementLink = link; currentLink = link.parent; return (E) stmt; } private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewrite(nodeList.get(i), env)); } return nodeList; } private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewrite(nodeList.get(i), env)); } return nodeList; } private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) { for (int i = 0; i < nodeList.size(); i++) { nodeList.set(i, rewriteExpr(nodeList.get(i))); } return nodeList; } private BLangLiteral createStringLiteral(DiagnosticPos pos, String value) { BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType); stringLit.pos = pos; return stringLit; } private BLangLiteral createIntLiteral(long value) { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = value; literal.type = symTable.intType; return literal; } private BLangLiteral createByteLiteral(DiagnosticPos pos, Byte value) { BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType); byteLiteral.pos = pos; return byteLiteral; } private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) { BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.pos = expr.pos; conversionExpr.expr = expr; conversionExpr.type = targetType; conversionExpr.targetType = targetType; return conversionExpr; } private BType getElementType(BType type) { if (type.tag != TypeTags.ARRAY) { return type; } return getElementType(((BArrayType) type).getElementType()); } private void addReturnIfNotPresent(BLangInvokableNode invokableNode) { if (Symbols.isNative(invokableNode.symbol) || (invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) { return; } BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body; boolean isNeverOrNilableReturn = invokableNode.symbol.type.getReturnType().tag == TypeTags.NEVER || invokableNode.symbol.type.getReturnType().isNullable(); if (invokableNode.workers.size() == 0 && isNeverOrNilableReturn && (funcBody.stmts.size() < 1 || funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) { DiagnosticPos invPos = invokableNode.pos; DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src, invPos.eLine, invPos.eLine, invPos.sCol, invPos.sCol); BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType); funcBody.addStatement(returnStmt); } } /** * Reorder the invocation arguments to match the original function signature. * * @param iExpr Function invocation expressions to reorder the arguments */ private void reorderArguments(BLangInvocation iExpr) { BSymbol symbol = iExpr.symbol; if (symbol == null || symbol.type.tag != TypeTags.INVOKABLE) { return; } BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol; List<BLangExpression> restArgs = iExpr.restArgs; int originalRequiredArgCount = iExpr.requiredArgs.size(); BLangExpression varargRef = null; BLangBlockStmt blockStmt = null; int restArgCount = restArgs.size(); if (restArgCount > 0 && restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR && originalRequiredArgCount < invokableSymbol.params.size()) { BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr; DiagnosticPos varargExpPos = expr.pos; BType varargVarType = expr.type; String varargVarName = DESUGARED_VARARG_KEY + this.varargCount++; BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID, varargVarType, this.env.scope.owner); varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol); BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos); varDef.var = var; varDef.type = varargVarType; blockStmt = createBlockStmt(varargExpPos); blockStmt.stmts.add(varDef); } if (!invokableSymbol.params.isEmpty()) { reorderNamedArgs(iExpr, invokableSymbol, varargRef); } if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) { if (invokableSymbol.restParam == null) { return; } BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); List<BLangExpression> exprs = new ArrayList<>(); BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type; BType elemType = arrayType.eType; for (BLangExpression restArg : restArgs) { exprs.add(addConversionExprIfRequired(restArg, elemType)); } arrayLiteral.exprs = exprs; arrayLiteral.type = arrayType; if (restArgCount != 0) { iExpr.restArgs = new ArrayList<>(); } iExpr.restArgs.add(arrayLiteral); return; } if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) { if (iExpr.requiredArgs.size() == originalRequiredArgCount) { return; } BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0); BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg); stmtExpression.type = firstNonRestArg.type; iExpr.requiredArgs.add(0, stmtExpression); if (invokableSymbol.restParam == null) { return; } BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount); BLangInvocation sliceInvocation = createLangLibInvocationNode(SLICE_LANGLIB_METHOD, varargRef, new ArrayList<BLangExpression>() {{ add(startIndex); }}, varargRef.type, varargRef.pos); restArgs.remove(0); restArgs.add(addConversionExprIfRequired(sliceInvocation, invokableSymbol.restParam.type)); return; } BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type; BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode(); arrayLiteral.type = restParamType; BType elemType = restParamType.eType; DiagnosticPos pos = restArgs.get(0).pos; List<BLangExpression> exprs = new ArrayList<>(); for (int i = 0; i < restArgCount - 1; i++) { exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType)); } arrayLiteral.exprs = exprs; BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode(); pushRestArgsExpr.pos = pos; pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1); String name = DESUGARED_VARARG_KEY + this.varargCount++; BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType, this.env.scope.owner); BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol); BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos); varDef.var = var; varDef.type = restParamType; BLangBlockStmt pushBlockStmt = createBlockStmt(pos); pushBlockStmt.stmts.add(varDef); BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt); BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef, new ArrayList<BLangExpression>() {{ add(pushRestArgsExpr); }}, restParamType, pos); pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1)); expressionStmt.expr = pushInvocation; BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef); stmtExpression.type = restParamType; iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }}; } private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) { List<BLangExpression> args = new ArrayList<>(); Map<String, BLangExpression> namedArgs = new HashMap<>(); iExpr.requiredArgs.stream() .filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR) .forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr)); List<BVarSymbol> params = invokableSymbol.params; int varargIndex = 0; BType varargType = null; boolean tupleTypedVararg = false; if (varargRef != null) { varargType = varargRef.type; tupleTypedVararg = varargType.tag == TypeTags.TUPLE; } for (int i = 0; i < params.size(); i++) { BVarSymbol param = params.get(i); if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) { args.add(iExpr.requiredArgs.get(i)); } else if (namedArgs.containsKey(param.name.value)) { args.add(namedArgs.get(param.name.value)); } else if (varargRef == null) { BLangExpression expr = new BLangIgnoreExpr(); expr.type = param.type; args.add(expr); } else { BLangIndexBasedAccess memberAccessExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode(); memberAccessExpr.pos = varargRef.pos; memberAccessExpr.expr = varargRef; memberAccessExpr.indexExpr = rewriteExpr(createIntLiteral(varargIndex)); memberAccessExpr.type = tupleTypedVararg ? ((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType; varargIndex++; args.add(addConversionExprIfRequired(memberAccessExpr, param.type)); } } iExpr.requiredArgs = args; } private BLangMatchTypedBindingPatternClause getSafeAssignErrorPattern( DiagnosticPos pos, BSymbol invokableSymbol, List<BType> equivalentErrorTypes, boolean isCheckPanicExpr) { BType enclosingFuncReturnType = ((BInvokableType) invokableSymbol.type).retType; Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ? ((BUnionType) enclosingFuncReturnType).getMemberTypes() : new LinkedHashSet<BType>() {{ add(enclosingFuncReturnType); }}; boolean returnOnError = equivalentErrorTypes.stream() .allMatch(errorType -> returnTypeSet.stream() .anyMatch(retType -> types.isAssignable(errorType, retType))); String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure"; BLangSimpleVariable patternFailureCaseVar = ASTBuilderUtil.createVariable(pos, patternFailureCaseVarName, symTable.errorType, null, new BVarSymbol(0, names.fromString(patternFailureCaseVarName), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner)); BLangVariableReference patternFailureCaseVarRef = ASTBuilderUtil.createVariableRef(pos, patternFailureCaseVar.symbol); BLangBlockStmt patternBlockFailureCase = (BLangBlockStmt) TreeBuilder.createBlockNode(); patternBlockFailureCase.pos = pos; if (!isCheckPanicExpr && returnOnError) { BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode(); returnStmt.pos = pos; returnStmt.expr = patternFailureCaseVarRef; patternBlockFailureCase.stmts.add(returnStmt); } else { BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.pos = pos; panicNode.expr = patternFailureCaseVarRef; patternBlockFailureCase.stmts.add(panicNode); } return ASTBuilderUtil.createMatchStatementPattern(pos, patternFailureCaseVar, patternBlockFailureCase); } private BLangMatchTypedBindingPatternClause getSafeAssignSuccessPattern(DiagnosticPos pos, BType lhsType, boolean isVarDef, BVarSymbol varSymbol, BLangExpression lhsExpr) { String patternSuccessCaseVarName = GEN_VAR_PREFIX.value + "t_match"; BLangSimpleVariable patternSuccessCaseVar = ASTBuilderUtil.createVariable(pos, patternSuccessCaseVarName, lhsType, null, new BVarSymbol(0, names.fromString(patternSuccessCaseVarName), this.env.scope.owner.pkgID, lhsType, this.env.scope.owner)); BLangExpression varRefExpr; if (isVarDef) { varRefExpr = ASTBuilderUtil.createVariableRef(pos, varSymbol); } else { varRefExpr = lhsExpr; } BLangVariableReference patternSuccessCaseVarRef = ASTBuilderUtil.createVariableRef(pos, patternSuccessCaseVar.symbol); BLangAssignment assignmentStmtSuccessCase = ASTBuilderUtil.createAssignmentStmt(pos, varRefExpr, patternSuccessCaseVarRef, false); BLangBlockStmt patternBlockSuccessCase = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<BLangStatement>() {{ add(assignmentStmtSuccessCase); }}); return ASTBuilderUtil.createMatchStatementPattern(pos, patternSuccessCaseVar, patternBlockSuccessCase); } private BLangStatement generateIfElseStmt(BLangMatch matchStmt, BLangSimpleVariable matchExprVar) { List<BLangMatchBindingPatternClause> patterns = matchStmt.patternClauses; BLangIf parentIfNode = generateIfElseStmt(patterns.get(0), matchExprVar); BLangIf currentIfNode = parentIfNode; for (int i = 1; i < patterns.size(); i++) { BLangMatchBindingPatternClause patternClause = patterns.get(i); if (i == patterns.size() - 1 && patternClause.isLastPattern) { currentIfNode.elseStmt = getMatchPatternElseBody(patternClause, matchExprVar); } else { currentIfNode.elseStmt = generateIfElseStmt(patternClause, matchExprVar); currentIfNode = (BLangIf) currentIfNode.elseStmt; } } return parentIfNode; } /** * Generate an if-else statement from the given match statement. * * @param pattern match pattern statement node * @param matchExprVar variable node of the match expression * @return if else statement node */ private BLangIf generateIfElseStmt(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangExpression ifCondition = createPatternIfCondition(pattern, matchExprVar.symbol); if (NodeKind.MATCH_TYPED_PATTERN_CLAUSE == pattern.getKind()) { BLangBlockStmt patternBody = getMatchPatternBody(pattern, matchExprVar); return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, patternBody, null); } BType expectedType = matchExprVar.type; if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) { BLangMatchStructuredBindingPatternClause matchPattern = (BLangMatchStructuredBindingPatternClause) pattern; expectedType = getStructuredBindingPatternType(matchPattern.bindingPatternVariable); } if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) { BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) pattern; BLangSimpleVariableDef varDef = forceCastIfApplicable(matchExprVar.symbol, pattern.pos, expectedType); BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, varDef.var.symbol); structuredPattern.bindingPatternVariable.expr = matchExprVarRef; BLangStatement varDefStmt; if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos, (BLangTupleVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos, (BLangRecordVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos, (BLangErrorVariable) structuredPattern.bindingPatternVariable); } else { varDefStmt = ASTBuilderUtil .createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable); } if (structuredPattern.typeGuardExpr != null) { BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(structuredPattern.pos); blockStmt.addStatement(varDef); blockStmt.addStatement(varDefStmt); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, structuredPattern.typeGuardExpr); stmtExpr.type = symTable.booleanType; ifCondition = ASTBuilderUtil .createBinaryExpr(pattern.pos, ifCondition, stmtExpr, symTable.booleanType, OperatorKind.AND, (BOperatorSymbol) symResolver .resolveBinaryOperator(OperatorKind.AND, symTable.booleanType, symTable.booleanType)); } else { structuredPattern.body.stmts.add(0, varDef); structuredPattern.body.stmts.add(1, varDefStmt); } } return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, pattern.body, null); } private BLangBlockStmt getMatchPatternBody(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangBlockStmt body; BLangMatchTypedBindingPatternClause patternClause = (BLangMatchTypedBindingPatternClause) pattern; if (patternClause.variable.name.value.equals(Names.IGNORE.value)) { return patternClause.body; } BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(patternClause.pos, matchExprVar.symbol); BLangExpression patternVarExpr = addConversionExprIfRequired(matchExprVarRef, patternClause.variable.type); BLangSimpleVariable patternVar = ASTBuilderUtil.createVariable(patternClause.pos, "", patternClause.variable.type, patternVarExpr, patternClause.variable.symbol); BLangSimpleVariableDef patternVarDef = ASTBuilderUtil.createVariableDef(patternVar.pos, patternVar); patternClause.body.stmts.add(0, patternVarDef); body = patternClause.body; return body; } private BLangBlockStmt getMatchPatternElseBody(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) { BLangBlockStmt body = pattern.body; if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) { BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, matchExprVar.symbol); BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) pattern; structuredPattern.bindingPatternVariable.expr = matchExprVarRef; BLangStatement varDefStmt; if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos, (BLangTupleVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos, (BLangRecordVariable) structuredPattern.bindingPatternVariable); } else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) { varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos, (BLangErrorVariable) structuredPattern.bindingPatternVariable); } else { varDefStmt = ASTBuilderUtil .createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable); } structuredPattern.body.stmts.add(0, varDefStmt); body = structuredPattern.body; } return body; } BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) { if (lhsType.tag == TypeTags.NONE) { return expr; } BType rhsType = expr.type; if (types.isSameType(rhsType, lhsType)) { return expr; } types.setImplicitCastExpr(expr, rhsType, lhsType); if (expr.impConversionExpr != null) { return expr; } if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) { return expr; } if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) { return expr; } if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) { return expr; } BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); conversionExpr.expr = expr; conversionExpr.targetType = lhsType; conversionExpr.type = lhsType; conversionExpr.pos = expr.pos; conversionExpr.checkTypes = false; return conversionExpr; } private BLangExpression createPatternIfCondition(BLangMatchBindingPatternClause patternClause, BVarSymbol varSymbol) { BType patternType; switch (patternClause.getKind()) { case MATCH_STATIC_PATTERN_CLAUSE: BLangMatchStaticBindingPatternClause staticPattern = (BLangMatchStaticBindingPatternClause) patternClause; patternType = staticPattern.literal.type; break; case MATCH_STRUCTURED_PATTERN_CLAUSE: BLangMatchStructuredBindingPatternClause structuredPattern = (BLangMatchStructuredBindingPatternClause) patternClause; patternType = getStructuredBindingPatternType(structuredPattern.bindingPatternVariable); break; default: BLangMatchTypedBindingPatternClause simplePattern = (BLangMatchTypedBindingPatternClause) patternClause; patternType = simplePattern.variable.type; break; } BLangExpression binaryExpr; BType[] memberTypes; if (patternType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) patternType; memberTypes = unionType.getMemberTypes().toArray(new BType[0]); } else { memberTypes = new BType[1]; memberTypes[0] = patternType; } if (memberTypes.length == 1) { binaryExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]); } else { BLangExpression lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]); BLangExpression rhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[1]); binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR, lhsExpr.type, rhsExpr.type)); for (int i = 2; i < memberTypes.length; i++) { lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[i]); rhsExpr = binaryExpr; binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR, lhsExpr.type, rhsExpr.type)); } } return binaryExpr; } private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) { if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) { BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable; List<BType> memberTypes = new ArrayList<>(); for (int i = 0; i < tupleVariable.memberVariables.size(); i++) { memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i))); } BTupleType tupleType = new BTupleType(memberTypes); if (tupleVariable.restVariable != null) { BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable); tupleType.restType = restArrayType.eType; } return tupleType; } if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) { BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable; BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + recordCount++), env.enclPkg.symbol.pkgID, null, env.scope.owner); recordSymbol.initializerFunc = createRecordInitFunc(); recordSymbol.scope = new Scope(recordSymbol); recordSymbol.scope.define( names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value), recordSymbol.initializerFunc.symbol); LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); List<BLangSimpleVariable> typeDefFields = new ArrayList<>(); for (int i = 0; i < recordVariable.variableList.size(); i++) { String fieldNameStr = recordVariable.variableList.get(i).key.value; Name fieldName = names.fromString(fieldNameStr); BType fieldType = getStructuredBindingPatternType( recordVariable.variableList.get(i).valueBindingPattern); BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType, recordSymbol); fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol)); typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordVarType = new BRecordType(recordSymbol); recordVarType.fields = fields; recordVarType.restFieldType = recordVariable.restParam != null ? ((BMapType) ((BLangSimpleVariable) recordVariable.restParam).type).constraint : symTable.anydataType; recordSymbol.type = recordVarType; recordVarType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields, recordVarType, bindingPatternVariable.pos); recordTypeNode.initFunction = rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable), env); TypeDefBuilderHelper.addTypeDefinition(recordVarType, recordSymbol, recordTypeNode, env); return recordVarType; } if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) { BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable; BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol( SymTag.ERROR, Flags.PUBLIC, names.fromString("$anonErrorType$" + errorCount++), env.enclPkg.symbol.pkgID, null, null); BType detailType; if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) { detailType = symTable.detailType; } else { detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++); BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType); recordTypeNode.initFunction = TypeDefBuilderHelper .createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(detailType, detailType.tsymbol, recordTypeNode, env); } BErrorType errorType = new BErrorType(errorTypeSymbol, ((BErrorType) errorVariable.type).reasonType, detailType); errorTypeSymbol.type = errorType; TypeDefBuilderHelper.addTypeDefinition(errorType, errorTypeSymbol, createErrorTypeNode(errorType), env); return errorType; } return bindingPatternVariable.type; } private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) { List<BLangSimpleVariable> fieldList = new ArrayList<>(); for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) { BVarSymbol symbol = field.valueBindingPattern.symbol; if (symbol == null) { symbol = new BVarSymbol( Flags.PUBLIC, names.fromString(field.key.value + "$"), this.env.enclPkg.packageID, symTable.pureType, null); } BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable( field.valueBindingPattern.pos, symbol.name.value, field.valueBindingPattern.type, field.valueBindingPattern.expr, symbol); fieldList.add(fieldVar); } return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos); } private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail, BLangSimpleVariable restDetail, int errorNo) { BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol( SymTag.RECORD, Flags.PUBLIC, names.fromString("$anonErrorType$" + errorNo + "$detailType"), env.enclPkg.symbol.pkgID, null, null); detailRecordTypeSymbol.initializerFunc = createRecordInitFunc(); detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol); detailRecordTypeSymbol.scope.define( names.fromString(detailRecordTypeSymbol.name.value + "." + detailRecordTypeSymbol.initializerFunc.funcName.value), detailRecordTypeSymbol.initializerFunc.symbol); BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol); detailRecordType.restFieldType = symTable.anydataType; if (restDetail == null) { detailRecordType.sealed = true; } for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) { Name fieldName = names.fromIdNode(detailEntry.key); BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern); BVarSymbol fieldSym = new BVarSymbol( Flags.PUBLIC, fieldName, detailRecordTypeSymbol.pkgID, fieldType, detailRecordTypeSymbol); detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym)); detailRecordTypeSymbol.scope.define(fieldName, fieldSym); } return detailRecordType; } private BAttachedFunction createRecordInitFunc() { BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null); BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol( Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false); initFuncSymbol.retType = symTable.nilType; return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType); } BLangErrorType createErrorTypeNode(BErrorType errorType) { BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode(); errorTypeNode.type = errorType; return errorTypeNode; } private BLangExpression createPatternMatchBinaryExpr(BLangMatchBindingPatternClause patternClause, BVarSymbol varSymbol, BType patternType) { DiagnosticPos pos = patternClause.pos; BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); if (NodeKind.MATCH_STATIC_PATTERN_CLAUSE == patternClause.getKind()) { BLangMatchStaticBindingPatternClause pattern = (BLangMatchStaticBindingPatternClause) patternClause; return createBinaryExpression(pos, varRef, pattern.literal); } if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == patternClause.getKind()) { return createIsLikeExpression(pos, ASTBuilderUtil.createVariableRef(pos, varSymbol), patternType); } if (patternType == symTable.nilType) { BLangLiteral bLangLiteral = ASTBuilderUtil.createLiteral(pos, symTable.nilType, null); return ASTBuilderUtil.createBinaryExpr(pos, varRef, bLangLiteral, symTable.booleanType, OperatorKind.EQUAL, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.EQUAL, symTable.anyType, symTable.nilType)); } else { return createIsAssignableExpression(pos, varSymbol, patternType); } } private BLangExpression createBinaryExpression(DiagnosticPos pos, BLangSimpleVarRef varRef, BLangExpression expression) { BLangBinaryExpr binaryExpr; if (NodeKind.GROUP_EXPR == expression.getKind()) { return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression); } if (NodeKind.BINARY_EXPR == expression.getKind()) { binaryExpr = (BLangBinaryExpr) expression; BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr); BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr); binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR, (BOperatorSymbol) symResolver .resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType)); } else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) { BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode(); anyType.type = symTable.anyType; anyType.typeKind = TypeKind.ANY; return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType); } else { binaryExpr = ASTBuilderUtil .createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null); BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.type, expression.type); if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver .getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.type, binaryExpr); } binaryExpr.opSymbol = (BOperatorSymbol) opSymbol; } return binaryExpr; } private BLangIsAssignableExpr createIsAssignableExpression(DiagnosticPos pos, BVarSymbol varSymbol, BType patternType) { BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol); return ASTBuilderUtil.createIsAssignableExpr(pos, varRef, patternType, symTable.booleanType, names); } private BLangIsLikeExpr createIsLikeExpression(DiagnosticPos pos, BLangExpression expr, BType type) { return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType); } private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) { BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode(); varRef.pos = variable.pos; varRef.variableName = variable.name; varRef.symbol = variable.symbol; varRef.type = variable.type; BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode(); assignmentStmt.expr = variable.expr; assignmentStmt.pos = variable.pos; assignmentStmt.setVariable(varRef); return assignmentStmt; } private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable, BVarSymbol selfSymbol) { return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.type, selfSymbol, variable.name); } private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr, BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol, BLangIdentifier fieldName) { BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol); BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName); fieldAccess.symbol = fieldSymbol; fieldAccess.type = fieldType; fieldAccess.isStoreOnCreation = true; BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode(); assignmentStmt.expr = expr; assignmentStmt.pos = function.pos; assignmentStmt.setVariable(fieldAccess); SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env); return rewrite(assignmentStmt, initFuncEnv); } private void addMatchExprDefaultCase(BLangMatchExpression bLangMatchExpression) { List<BType> exprTypes; List<BType> unmatchedTypes = new ArrayList<>(); if (bLangMatchExpression.expr.type.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) bLangMatchExpression.expr.type; exprTypes = new ArrayList<>(unionType.getMemberTypes()); } else { exprTypes = Lists.of(bLangMatchExpression.type); } for (BType type : exprTypes) { boolean assignable = false; for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) { if (this.types.isAssignable(type, pattern.variable.type)) { assignable = true; break; } } if (!assignable) { unmatchedTypes.add(type); } } if (unmatchedTypes.isEmpty()) { return; } BType defaultPatternType; if (unmatchedTypes.size() == 1) { defaultPatternType = unmatchedTypes.get(0); } else { defaultPatternType = BUnionType.create(null, new LinkedHashSet<>(unmatchedTypes)); } String patternCaseVarName = GEN_VAR_PREFIX.value + "t_match_default"; BLangSimpleVariable patternMatchCaseVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos, patternCaseVarName, defaultPatternType, null, new BVarSymbol(0, names.fromString(patternCaseVarName), this.env.scope.owner.pkgID, defaultPatternType, this.env.scope.owner)); BLangMatchExprPatternClause defaultPattern = (BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern(); defaultPattern.variable = patternMatchCaseVar; defaultPattern.expr = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, patternMatchCaseVar.symbol); defaultPattern.pos = bLangMatchExpression.pos; bLangMatchExpression.patternClauses.add(defaultPattern); } private boolean safeNavigate(BLangAccessExpression accessExpr) { if (accessExpr.lhsVar || accessExpr.expr == null) { return false; } if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) { return true; } NodeKind kind = accessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) { return safeNavigate((BLangAccessExpression) accessExpr.expr); } return false; } private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) { BType originalExprType = accessExpr.type; String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result"; BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.type, null, new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID, accessExpr.type, this.env.scope.owner)); BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol); handleSafeNavigation(accessExpr, accessExpr.type, tempResultVar); BLangMatch matcEXpr = this.matchStmtStack.firstElement(); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matcEXpr)); BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef); stmtExpression.type = originalExprType; this.matchStmtStack = new Stack<>(); this.accessExprStack = new Stack<>(); this.successPattern = null; this.safeNavigationAssignment = null; return stmtExpression; } private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) { if (accessExpr.expr == null) { return; } NodeKind kind = accessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR || kind == NodeKind.INVOCATION) { handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar); } if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) { BType originalType = accessExpr.originalType; if (TypeTags.isXMLTypeTag(originalType.tag)) { accessExpr.type = BUnionType.create(null, originalType, symTable.errorType); } else { accessExpr.type = originalType; } if (this.safeNavigationAssignment != null) { this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.type); } return; } /* * If the field access is a safe navigation, create a match expression. * Then chain the current expression as the success-pattern of the parent * match expr, if available. * eg: * x but { <--- parent match expr * error e => e, * T t => t.y but { <--- current expr * error e => e, * R r => r.z * } * } */ BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(accessExpr.pos, accessExpr.expr, new ArrayList<>()); boolean isAllTypesRecords = false; LinkedHashSet<BType> memTypes = new LinkedHashSet<>(); if (accessExpr.expr.type.tag == TypeTags.UNION) { memTypes = new LinkedHashSet<>(((BUnionType) accessExpr.expr.type).getMemberTypes()); isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes); } if (accessExpr.nilSafeNavigation) { matchStmt.patternClauses.add(getMatchNullPattern(accessExpr, tempResultVar)); matchStmt.type = type; memTypes.remove(symTable.nilType); } if (accessExpr.errorSafeNavigation) { matchStmt.patternClauses.add(getMatchErrorPattern(accessExpr, tempResultVar)); matchStmt.type = type; matchStmt.pos = accessExpr.pos; memTypes.remove(symTable.errorType); } BLangMatchTypedBindingPatternClause successPattern = null; Name field = getFieldName(accessExpr); if (field == Names.EMPTY) { successPattern = getSuccessPattern(accessExpr.expr.type, accessExpr, tempResultVar, accessExpr.errorSafeNavigation); matchStmt.patternClauses.add(successPattern); pushToMatchStatementStack(matchStmt, accessExpr, successPattern); return; } if (isAllTypesRecords) { for (BType memberType : memTypes) { if (((BRecordType) memberType).fields.containsKey(field.value)) { successPattern = getSuccessPattern(memberType, accessExpr, tempResultVar, accessExpr.errorSafeNavigation); matchStmt.patternClauses.add(successPattern); } } matchStmt.patternClauses.add(getMatchAllAndNilReturnPattern(accessExpr, tempResultVar)); pushToMatchStatementStack(matchStmt, accessExpr, successPattern); return; } successPattern = getSuccessPattern(accessExpr.expr.type, accessExpr, tempResultVar, accessExpr.errorSafeNavigation); matchStmt.patternClauses.add(successPattern); pushToMatchStatementStack(matchStmt, accessExpr, successPattern); } private void pushToMatchStatementStack(BLangMatch matchStmt, BLangAccessExpression accessExpr, BLangMatchTypedBindingPatternClause successPattern) { this.matchStmtStack.push(matchStmt); if (this.successPattern != null) { this.successPattern.body = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(matchStmt)); } this.successPattern = successPattern; } private Name getFieldName(BLangAccessExpression accessExpr) { Name field = Names.EMPTY; if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { field = new Name(((BLangFieldBasedAccess) accessExpr).field.value); } else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr; if (indexBasedExpression.getKind() == NodeKind.LITERAL) { field = new Name(((BLangLiteral) indexBasedExpression).value.toString()); } } return field; } private boolean isAllTypesAreRecordsInUnion(LinkedHashSet<BType> memTypes) { for (BType memType : memTypes) { int typeTag = memType.tag; if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) { return false; } } return true; } private BLangMatchTypedBindingPatternClause getMatchErrorPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error"; BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(expr.pos, errorPatternVarName, symTable.errorType, null, new BVarSymbol(0, names.fromString(errorPatternVarName), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner)); BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, errorPatternVar.symbol); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause errorPattern = ASTBuilderUtil .createMatchStatementPattern(expr.pos, errorPatternVar, patternBody); return errorPattern; } private BLangMatchExprPatternClause getMatchNullPatternGivenExpression(DiagnosticPos pos, BLangExpression expr) { String nullPatternVarName = IGNORE.toString(); BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(pos, nullPatternVarName, symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName), this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner)); BLangMatchExprPatternClause nullPattern = (BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern(); nullPattern.variable = errorPatternVar; nullPattern.expr = expr; nullPattern.pos = pos; return nullPattern; } private BLangMatchTypedBindingPatternClause getMatchNullPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null"; BLangSimpleVariable nullPatternVar = ASTBuilderUtil.createVariable(expr.pos, nullPatternVarName, symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName), this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner)); BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, nullPatternVar.symbol); BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause nullPattern = ASTBuilderUtil .createMatchStatementPattern(expr.pos, nullPatternVar, patternBody); return nullPattern; } private BLangMatchStaticBindingPatternClause getMatchAllAndNilReturnPattern(BLangExpression expr, BLangSimpleVariable tempResultVar) { BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, createLiteral(expr.pos, symTable.nilType, Names.NIL_VALUE), false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt)); BLangMatchStaticBindingPatternClause matchAllPattern = (BLangMatchStaticBindingPatternClause) TreeBuilder.createMatchStatementStaticBindingPattern(); String matchAllVarName = "_"; matchAllPattern.literal = ASTBuilderUtil.createVariableRef(expr.pos, new BVarSymbol(0, names.fromString(matchAllVarName), this.env.scope.owner.pkgID, symTable.anyType, this.env.scope.owner)); matchAllPattern.body = patternBody; return matchAllPattern; } private BLangMatchTypedBindingPatternClause getSuccessPattern(BType type, BLangAccessExpression accessExpr, BLangSimpleVariable tempResultVar, boolean liftError) { type = types.getSafeType(type, true, liftError); String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success"; BVarSymbol successPatternSymbol; if (type.tag == TypeTags.INVOKABLE) { successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, names.fromString(successPatternVarName), this.env.scope.owner.pkgID, type, this.env.scope.owner); } else { successPatternSymbol = new BVarSymbol(0, names.fromString(successPatternVarName), this.env.scope.owner.pkgID, type, this.env.scope.owner); } BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName, type, null, successPatternSymbol); BLangAccessExpression tempAccessExpr = nodeCloner.clone(accessExpr); if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { ((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr; } if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) { ((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol = ((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol; } tempAccessExpr.expr = ASTBuilderUtil.createVariableRef(accessExpr.pos, successPatternVar.symbol); tempAccessExpr.errorSafeNavigation = false; tempAccessExpr.nilSafeNavigation = false; accessExpr.cloneRef = null; if (TypeTags.isXMLTypeTag(tempAccessExpr.expr.type.tag)) { tempAccessExpr.type = BUnionType.create(null, accessExpr.originalType, symTable.errorType, symTable.nilType); } else { tempAccessExpr.type = accessExpr.originalType; } tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess; BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol); BLangExpression assignmentRhsExpr = addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.type); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr, false); BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(assignmentStmt)); BLangMatchTypedBindingPatternClause successPattern = ASTBuilderUtil.createMatchStatementPattern(accessExpr.pos, successPatternVar, patternBody); this.safeNavigationAssignment = assignmentStmt; return successPattern; } private boolean safeNavigateLHS(BLangExpression expr) { if (expr.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR && expr.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) { return false; } BLangExpression varRef = ((BLangAccessExpression) expr).expr; if (varRef.type.isNullable()) { return true; } return safeNavigateLHS(varRef); } private BLangStatement rewriteSafeNavigationAssignment(BLangAccessExpression accessExpr, BLangExpression rhsExpr, boolean safeAssignment) { this.accessExprStack = new Stack<>(); List<BLangStatement> stmts = new ArrayList<>(); createLHSSafeNavigation(stmts, accessExpr.expr); BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, cloneExpression(accessExpr), rhsExpr); stmts.add(assignment); return ASTBuilderUtil.createBlockStmt(accessExpr.pos, stmts); } private void createLHSSafeNavigation(List<BLangStatement> stmts, BLangExpression expr) { NodeKind kind = expr.getKind(); boolean root = false; if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR || kind == NodeKind.INVOCATION) { BLangAccessExpression accessExpr = (BLangAccessExpression) expr; createLHSSafeNavigation(stmts, accessExpr.expr); accessExpr.expr = accessExprStack.pop(); } else { root = true; } if (expr.getKind() == NodeKind.INVOCATION) { BLangInvocation invocation = (BLangInvocation) expr; BVarSymbol interMediateSymbol = new BVarSymbol(0, names.fromString(GEN_VAR_PREFIX.value + "i_intermediate"), this.env.scope.owner.pkgID, invocation.type, this.env.scope.owner); BLangSimpleVariable intermediateVariable = ASTBuilderUtil.createVariable(expr.pos, interMediateSymbol.name.value, invocation.type, invocation, interMediateSymbol); BLangSimpleVariableDef intermediateVariableDefinition = ASTBuilderUtil.createVariableDef(invocation.pos, intermediateVariable); stmts.add(intermediateVariableDefinition); expr = ASTBuilderUtil.createVariableRef(invocation.pos, interMediateSymbol); } if (expr.type.isNullable()) { BLangTypeTestExpr isNillTest = ASTBuilderUtil.createTypeTestExpr(expr.pos, expr, getNillTypeNode()); isNillTest.type = symTable.booleanType; BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(expr.pos); expr = cloneExpression(expr); expr.type = types.getSafeType(expr.type, true, false); if (isDefaultableMappingType(expr.type) && !root) { BLangRecordLiteral jsonLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode(); jsonLiteral.type = expr.type; jsonLiteral.pos = expr.pos; BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(expr.pos, expr, jsonLiteral); thenStmt.addStatement(assignment); } else { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = ERROR_REASON_NULL_REFERENCE_ERROR; literal.type = symTable.stringType; BLangInvocation errorCtorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); errorCtorInvocation.pos = expr.pos; errorCtorInvocation.argExprs.add(literal); errorCtorInvocation.requiredArgs.add(literal); errorCtorInvocation.type = symTable.errorType; errorCtorInvocation.symbol = symTable.errorConstructor; BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode(); panicNode.expr = errorCtorInvocation; panicNode.pos = expr.pos; thenStmt.addStatement(panicNode); } BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(expr.pos, isNillTest, thenStmt, null); stmts.add(ifelse); } accessExprStack.push(expr); } BLangValueType getNillTypeNode() { BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); nillTypeNode.typeKind = TypeKind.NIL; nillTypeNode.type = symTable.nilType; return nillTypeNode; } private BLangVariableReference cloneExpression(BLangExpression expr) { switch (expr.getKind()) { case SIMPLE_VARIABLE_REF: return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol); case FIELD_BASED_ACCESS_EXPR: case INDEX_BASED_ACCESS_EXPR: case INVOCATION: return cloneAccessExpr((BLangAccessExpression) expr); default: throw new IllegalStateException(); } } private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) { if (originalAccessExpr.expr == null) { return originalAccessExpr; } BLangVariableReference varRef; NodeKind kind = originalAccessExpr.expr.getKind(); if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR || kind == NodeKind.INVOCATION) { varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr); } else { varRef = cloneExpression(originalAccessExpr.expr); } varRef.type = types.getSafeType(originalAccessExpr.expr.type, true, false); BLangAccessExpression accessExpr; switch (originalAccessExpr.getKind()) { case FIELD_BASED_ACCESS_EXPR: accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef, ((BLangFieldBasedAccess) originalAccessExpr).field); break; case INDEX_BASED_ACCESS_EXPR: accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef, ((BLangIndexBasedAccess) originalAccessExpr).indexExpr); break; case INVOCATION: accessExpr = null; break; default: throw new IllegalStateException(); } accessExpr.originalType = originalAccessExpr.originalType; accessExpr.pos = originalAccessExpr.pos; accessExpr.lhsVar = originalAccessExpr.lhsVar; accessExpr.symbol = originalAccessExpr.symbol; accessExpr.errorSafeNavigation = false; accessExpr.nilSafeNavigation = false; accessExpr.type = originalAccessExpr.originalType; return accessExpr; } private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) { BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L); return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD, symTable.intType, symTable.intType)); } private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) { BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L); return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB, symTable.intType, symTable.intType)); } private BLangLiteral getBooleanLiteral(boolean value) { BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression(); literal.value = value; literal.type = symTable.booleanType; return literal; } private boolean isDefaultableMappingType(BType type) { switch (types.getSafeType(type, true, false).tag) { case TypeTags.JSON: case TypeTags.MAP: case TypeTags.RECORD: return true; default: return false; } } private BLangFunction createInitFunctionForObjectType(BLangObjectTypeNode structureTypeNode, SymbolEnv env) { BLangFunction initFunction = TypeDefBuilderHelper.createInitFunctionForStructureType(structureTypeNode, env, Names.GENERATED_INIT_SUFFIX, names, symTable); BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) structureTypeNode.type.tsymbol); typeSymbol.generatedInitializerFunc = new BAttachedFunction(Names.GENERATED_INIT_SUFFIX, initFunction.symbol, (BInvokableType) initFunction.type); structureTypeNode.generatedInitFunction = initFunction; initFunction.returnTypeNode.type = symTable.nilType; return rewrite(initFunction, env); } private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) { /* * Desugar (lhsExpr && rhsExpr) to following if-else: * * logical AND: * ------------- * T $result$; * if (lhsExpr) { * $result$ = rhsExpr; * } else { * $result$ = false; * } * * logical OR: * ------------- * T $result$; * if (lhsExpr) { * $result$ = true; * } else { * $result$ = rhsExpr; * } * */ BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.type, null, binaryExpr.pos); BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos); BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol); BLangExpression thenResult; if (binaryExpr.opKind == OperatorKind.AND) { thenResult = binaryExpr.rhsExpr; } else { thenResult = getBooleanLiteral(true); } BLangAssignment thenAssignment = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult); thenBody.addStatement(thenAssignment); BLangExpression elseResult; BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol); if (binaryExpr.opKind == OperatorKind.AND) { elseResult = getBooleanLiteral(false); } else { elseResult = binaryExpr.rhsExpr; } BLangAssignment elseAssignment = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult); elseBody.addStatement(elseAssignment); BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol); BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody); BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse)); BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef); stmtExpr.type = binaryExpr.type; result = rewriteExpr(stmtExpr); } /** * Split packahe init function into several smaller functions. * * @param packageNode package node * @param env symbol environment * @return initial init function but trimmed in size */ private BLangFunction splitInitFunction(BLangPackage packageNode, SymbolEnv env) { int methodSize = INIT_METHOD_SPLIT_SIZE; BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) packageNode.initFunction.body; if (!isJvmTarget) { return packageNode.initFunction; } BLangFunction initFunction = packageNode.initFunction; List<BLangFunction> generatedFunctions = new ArrayList<>(); List<BLangStatement> stmts = new ArrayList<>(funcBody.stmts); funcBody.stmts.clear(); BLangFunction newFunc = initFunction; BLangBlockFunctionBody newFuncBody = (BLangBlockFunctionBody) newFunc.body; int varDefIndex = 0; for (int i = 0; i < stmts.size(); i++) { BLangStatement statement = stmts.get(i); if (statement.getKind() == NodeKind.VARIABLE_DEF) { break; } varDefIndex++; if (i > 0 && (i % methodSize == 0 || isAssignmentWithInitOrRecordLiteralExpr(statement))) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.add(stmts.get(i)); } List<BLangStatement> chunkStmts = new ArrayList<>(); for (int i = varDefIndex; i < stmts.size(); i++) { BLangStatement stmt = stmts.get(i); chunkStmts.add(stmt); varDefIndex++; if ((stmt.getKind() == NodeKind.ASSIGNMENT) && (((BLangAssignment) stmt).expr.getKind() == NodeKind.SERVICE_CONSTRUCTOR) && (newFuncBody.stmts.size() + chunkStmts.size() > methodSize)) { if (newFuncBody.stmts.size() + chunkStmts.size() > methodSize) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.addAll(chunkStmts); chunkStmts.clear(); } else if ((stmt.getKind() == NodeKind.ASSIGNMENT) && (((BLangAssignment) stmt).varRef instanceof BLangPackageVarRef) && Symbols.isFlagOn(((BLangPackageVarRef) ((BLangAssignment) stmt).varRef).varSymbol.flags, Flags.LISTENER) ) { break; } } newFuncBody.stmts.addAll(chunkStmts); for (int i = varDefIndex; i < stmts.size(); i++) { if (i > 0 && i % methodSize == 0) { generatedFunctions.add(newFunc); newFunc = createIntermediateInitFunction(packageNode, env); newFuncBody = (BLangBlockFunctionBody) newFunc.body; symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol); } newFuncBody.stmts.add(stmts.get(i)); } generatedFunctions.add(newFunc); for (int j = 0; j < generatedFunctions.size() - 1; j++) { BLangFunction thisFunction = generatedFunctions.get(j); BLangCheckedExpr checkedExpr = ASTBuilderUtil.createCheckExpr(initFunction.pos, createInvocationNode(generatedFunctions.get(j + 1).name.value, new ArrayList<>(), symTable.errorOrNilType), symTable.nilType); checkedExpr.equivalentErrorTypeList.add(symTable.errorType); BLangExpressionStmt expressionStmt = ASTBuilderUtil .createExpressionStmt(thisFunction.pos, (BLangBlockFunctionBody) thisFunction.body); expressionStmt.expr = checkedExpr; expressionStmt.expr.pos = initFunction.pos; if (j > 0) { thisFunction = rewrite(thisFunction, env); packageNode.functions.add(thisFunction); packageNode.topLevelNodes.add(thisFunction); } } if (generatedFunctions.size() > 1) { BLangFunction lastFunc = generatedFunctions.get(generatedFunctions.size() - 1); lastFunc = rewrite(lastFunc, env); packageNode.functions.add(lastFunc); packageNode.topLevelNodes.add(lastFunc); } return generatedFunctions.get(0); } private boolean isAssignmentWithInitOrRecordLiteralExpr(BLangStatement statement) { if (statement.getKind() == NodeKind.ASSIGNMENT) { NodeKind exprKind = ((BLangAssignment) statement).getExpression().getKind(); return exprKind == NodeKind.TYPE_INIT_EXPR || exprKind == NodeKind.RECORD_LITERAL_EXPR; } return false; } /** * Create an intermediate package init function. * * @param pkgNode package node * @param env symbol environment of package */ private BLangFunction createIntermediateInitFunction(BLangPackage pkgNode, SymbolEnv env) { String alias = pkgNode.symbol.pkgID.toString(); BLangFunction initFunction = ASTBuilderUtil .createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias, new Name(Names.INIT_FUNCTION_SUFFIX.value + this.initFuncIndex++), symTable); createInvokableSymbol(initFunction, env); return initFunction; } private BType getRestType(BInvokableSymbol invokableSymbol) { if (invokableSymbol != null && invokableSymbol.restParam != null) { return invokableSymbol.restParam.type; } return null; } private BType getRestType(BLangFunction function) { if (function != null && function.restParam != null) { return function.restParam.type; } return null; } private BVarSymbol getRestSymbol(BLangFunction function) { if (function != null && function.restParam != null) { return function.restParam.symbol; } return null; } private boolean isComputedKey(RecordLiteralNode.RecordField field) { if (!field.isKeyValueField()) { return false; } return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey; } private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) { List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields; BType type = mappingConstructorExpr.type; DiagnosticPos pos = mappingConstructorExpr.pos; List<RecordLiteralNode.RecordField> rewrittenFields = new ArrayList<>(fields.size()); for (RecordLiteralNode.RecordField field : fields) { if (field.isKeyValueField()) { BLangRecordLiteral.BLangRecordKeyValueField keyValueField = (BLangRecordLiteral.BLangRecordKeyValueField) field; BLangRecordLiteral.BLangRecordKey key = keyValueField.key; BLangExpression origKey = key.expr; BLangExpression keyExpr = key.computedKey ? origKey : origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos, ((BLangSimpleVarRef) origKey).variableName.value) : ((BLangLiteral) origKey); rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr), rewriteExpr(keyValueField.valueExpr))); } else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field; rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue( rewriteExpr(createStringLiteral(pos, varRefField.variableName.value)), rewriteExpr(varRefField))); } else { BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField = (BLangRecordLiteral.BLangRecordSpreadOperatorField) field; spreadOpField.expr = rewriteExpr(spreadOpField.expr); rewrittenFields.add(spreadOpField); } } fields.clear(); return type.tag == TypeTags.RECORD ? new BLangStructLiteral(pos, type, rewrittenFields) : new BLangMapLiteral(pos, type, rewrittenFields); } public BSymbol getTransactionSymbol(SymbolEnv env) { return env.enclPkg.imports .stream() .filter(importPackage -> importPackage.symbol.pkgID.orgName.value.equals(Names.TRANSACTION_ORG.value) && importPackage.symbol.pkgID.name.value.equals(Names.TRANSACTION_PACKAGE.value)) .findAny().get().symbol; } }
class Desugar extends BLangNodeVisitor { private static final CompilerContext.Key<Desugar> DESUGAR_KEY = new CompilerContext.Key<>(); private static final String BASE_64 = "base64"; private static final String ERROR_REASON_FUNCTION_NAME = "reason"; private static final String ERROR_DETAIL_FUNCTION_NAME = "detail"; private static final String TO_STRING_FUNCTION_NAME = "toString"; private static final String LENGTH_FUNCTION_NAME = "length"; private static final String ERROR_REASON_NULL_REFERENCE_ERROR = "NullReferenceException"; private static final String CLONE_WITH_TYPE = "cloneWithType"; private static final String SLICE_LANGLIB_METHOD = "slice"; private static final String PUSH_LANGLIB_METHOD = "push"; private static final String DESUGARED_VARARG_KEY = "$vararg$"; public static final String XML_INTERNAL_SELECT_DESCENDANTS = "selectDescendants"; public static final String XML_INTERNAL_CHILDREN = "children"; public static final String XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT = "getFilteredChildrenFlat"; public static final String XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING = "getElementNameNilLifting"; public static final String XML_INTERNAL_GET_ATTRIBUTE = "getAttribute"; public static final String XML_INTERNAL_GET_ELEMENTS = "getElements"; public static final String XML_GET_CONTENT_OF_TEXT = "getContent"; private SymbolTable symTable; private SymbolResolver symResolver; private final SymbolEnter symbolEnter; private ClosureDesugar closureDesugar; private QueryDesugar queryDesugar; private TransactionDesugar transactionDesugar; private AnnotationDesugar annotationDesugar; private Types types; private Names names; private ServiceDesugar serviceDesugar; private BLangNode result; private NodeCloner nodeCloner; private SemanticAnalyzer semanticAnalyzer; private BLangAnonymousModelHelper anonModelHelper; private BLangStatementLink currentLink; public Stack<BLangLockStmt> enclLocks = new Stack<>(); private SymbolEnv env; private int lambdaFunctionCount = 0; private int transactionIndex = 0; private int recordCount = 0; private int errorCount = 0; private int annonVarCount = 0; private int initFuncIndex = 0; private int indexExprCount = 0; private int letCount = 0; private int varargCount = 0; private Stack<BLangMatch> matchStmtStack = new Stack<>(); Stack<BLangExpression> accessExprStack = new Stack<>(); private BLangMatchTypedBindingPatternClause successPattern; private BLangAssignment safeNavigationAssignment; static boolean isJvmTarget = false; public static Desugar getInstance(CompilerContext context) { Desugar desugar = context.get(DESUGAR_KEY); if (desugar == null) { desugar = new Desugar(context); } return desugar; } private Desugar(CompilerContext context) { isJvmTarget = true; context.put(DESUGAR_KEY, this); this.symTable = SymbolTable.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.symbolEnter = SymbolEnter.getInstance(context); this.closureDesugar = ClosureDesugar.getInstance(context); this.queryDesugar = QueryDesugar.getInstance(context); this.transactionDesugar = TransactionDesugar.getInstance(context); this.annotationDesugar = AnnotationDesugar.getInstance(context); this.types = Types.getInstance(context); this.names = Names.getInstance(context); this.names = Names.getInstance(context); this.serviceDesugar = ServiceDesugar.getInstance(context); this.nodeCloner = NodeCloner.getInstance(context); this.semanticAnalyzer = SemanticAnalyzer.getInstance(context); this.anonModelHelper = BLangAnonymousModelHelper.getInstance(context); } public BLangPackage perform(BLangPackage pkgNode) { annotationDesugar.initializeAnnotationMap(pkgNode); SymbolEnv env = this.symTable.pkgEnvMap.get(pkgNode.symbol); return rewrite(pkgNode, env); } private void addAttachedFunctionsToPackageLevel(BLangPackage pkgNode, SymbolEnv env) { for (BLangTypeDefinition typeDef : pkgNode.typeDefinitions) { if (typeDef.typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) { continue; } if (typeDef.symbol.tag == SymTag.OBJECT) { BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeDef.typeNode; objectTypeNode.functions.forEach(f -> { if (!pkgNode.objAttachedFunctions.contains(f.symbol)) { pkgNode.functions.add(f); pkgNode.topLevelNodes.add(f); } }); if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) { continue; } BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(objectTypeNode, env); tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction, tempGeneratedInitFunction.symbol.scope, env); this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env); objectTypeNode.generatedInitFunction = tempGeneratedInitFunction; pkgNode.functions.add(objectTypeNode.generatedInitFunction); pkgNode.topLevelNodes.add(objectTypeNode.generatedInitFunction); if (objectTypeNode.initFunction != null) { pkgNode.functions.add(objectTypeNode.initFunction); pkgNode.topLevelNodes.add(objectTypeNode.initFunction); } } else if (typeDef.symbol.tag == SymTag.RECORD) { BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeDef.typeNode; recordTypeNode.initFunction = rewrite( TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable), env); pkgNode.functions.add(recordTypeNode.initFunction); pkgNode.topLevelNodes.add(recordTypeNode.initFunction); } } } /** * This method synthesizes an initializer method for objects which is responsible for initializing the default * values given to fields. When a user creates a new instance of the object, first, this synthesized initializer is * invoked on the newly created object instance. Then, if there is a user-defined init method (i.e., the init() * method), an method call expression for this init() method is added in the return statement of the synthesized * initializer. When desugaring, the following method adds params and return type for the synthesized initializer by * looking at the params and return type of the user-defined init() method. Therefore, when desugaring object type * nodes, one should always take care to call this method **after** desugaring the init() method (if there is * supposed to be one). * * @param objectTypeNode The object type node for which the initializer is created * @param env The env for the type node * @return The generated initializer method */ private BLangFunction createGeneratedInitializerFunction(BLangObjectTypeNode objectTypeNode, SymbolEnv env) { BLangFunction generatedInitFunc = createInitFunctionForObjectType(objectTypeNode, env); if (objectTypeNode.initFunction == null) { return generatedInitFunc; } BAttachedFunction initializerFunc = ((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc; BAttachedFunction generatedInitializerFunc = ((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc; addRequiredParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc, generatedInitializerFunc); addRestParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc, generatedInitializerFunc); generatedInitFunc.returnTypeNode = objectTypeNode.initFunction.returnTypeNode; generatedInitializerFunc.symbol.retType = generatedInitFunc.returnTypeNode.type; ((BInvokableType) generatedInitFunc.symbol.type).paramTypes = initializerFunc.type.paramTypes; ((BInvokableType) generatedInitFunc.symbol.type).retType = initializerFunc.type.retType; ((BInvokableType) generatedInitFunc.symbol.type).restType = initializerFunc.type.restType; generatedInitializerFunc.type = initializerFunc.type; generatedInitFunc.desugared = false; return generatedInitFunc; } private void addRequiredParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc, BAttachedFunction generatedInitializerFunc) { if (initFunction.requiredParams.isEmpty()) { return; } for (BLangSimpleVariable requiredParameter : initFunction.requiredParams) { BLangSimpleVariable var = ASTBuilderUtil.createVariable(initFunction.pos, requiredParameter.name.getValue(), requiredParameter.type, createRequiredParamExpr(requiredParameter.expr), new BVarSymbol(0, names.fromString(requiredParameter.name.getValue()), requiredParameter.symbol.pkgID, requiredParameter.type, requiredParameter.symbol.owner)); generatedInitFunc.requiredParams.add(var); generatedInitializerFunc.symbol.params.add(var.symbol); } } private BLangExpression createRequiredParamExpr(BLangExpression expr) { if (expr == null) { return null; } if (expr.getKind() == NodeKind.LAMBDA) { BLangFunction func = ((BLangLambdaFunction) expr).function; return createLambdaFunction(func.pos, func.name.value, func.requiredParams, func.returnTypeNode, func.body); } BLangExpression expression = this.nodeCloner.clone(expr); if (expression.getKind() == NodeKind.ARROW_EXPR) { BLangIdentifier func = (BLangIdentifier) ((BLangArrowFunction) expression).functionName; ((BLangArrowFunction) expression).functionName = ASTBuilderUtil.createIdentifier(func.pos, "$" + func.getValue() + "$"); } return expression; } private void addRestParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc, BAttachedFunction generatedInitializerFunc) { if (initFunction.restParam == null) { return; } BLangSimpleVariable restParam = initFunction.restParam; generatedInitFunc.restParam = ASTBuilderUtil.createVariable(initFunction.pos, restParam.name.getValue(), restParam.type, null, new BVarSymbol(0, names.fromString(restParam.name.getValue()), restParam.symbol.pkgID, restParam.type, restParam.symbol.owner)); generatedInitializerFunc.symbol.restParam = generatedInitFunc.restParam.symbol; } /** * Create package init functions. * * @param pkgNode package node * @param env symbol environment of package */ private void createPackageInitFunctions(BLangPackage pkgNode, SymbolEnv env) { String alias = pkgNode.symbol.pkgID.toString(); pkgNode.initFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias, Names.INIT_FUNCTION_SUFFIX, symTable); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body; for (BLangXMLNS xmlns : pkgNode.xmlnsList) { initFnBody.addStatement(createNamespaceDeclrStatement(xmlns)); } pkgNode.startFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias, Names.START_FUNCTION_SUFFIX, symTable); pkgNode.stopFunction = ASTBuilderUtil.createInitFunctionWithNilReturn(pkgNode.pos, alias, Names.STOP_FUNCTION_SUFFIX); createInvokableSymbol(pkgNode.initFunction, env); createInvokableSymbol(pkgNode.startFunction, env); createInvokableSymbol(pkgNode.stopFunction, env); } private void addUserDefinedModuleInitInvocationAndReturn(BLangPackage pkgNode) { Optional<BLangFunction> userDefInitOptional = pkgNode.functions.stream() .filter(bLangFunction -> !bLangFunction.attachedFunction && bLangFunction.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value)) .findFirst(); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body; if (!userDefInitOptional.isPresent()) { addNilReturnStatement(initFnBody); return; } BLangFunction userDefInit = userDefInitOptional.get(); BLangInvocation userDefInitInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); userDefInitInvocation.pos = pkgNode.initFunction.pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(userDefInit.name.value); userDefInitInvocation.name = name; userDefInitInvocation.symbol = userDefInit.symbol; BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); pkgAlias.setLiteral(false); pkgAlias.setValue(pkgNode.packageID.name.value); userDefInitInvocation.pkgAlias = pkgAlias; userDefInitInvocation.type = userDefInit.returnTypeNode.type; userDefInitInvocation.requiredArgs = Collections.emptyList(); BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode(); returnStmt.pos = pkgNode.initFunction.pos; returnStmt.expr = userDefInitInvocation; initFnBody.stmts.add(returnStmt); } /** * Create invokable symbol for function. * * @param bLangFunction function node * @param env Symbol environment */ private void createInvokableSymbol(BLangFunction bLangFunction, SymbolEnv env) { BType returnType = bLangFunction.returnTypeNode.type == null ? symResolver.resolveTypeNode(bLangFunction.returnTypeNode, env) : bLangFunction.returnTypeNode.type; BInvokableType invokableType = new BInvokableType(new ArrayList<>(), getRestType(bLangFunction), returnType, null); BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(bLangFunction.flagSet), new Name(bLangFunction.name.value), env.enclPkg.packageID, invokableType, env.enclPkg.symbol, true); functionSymbol.retType = returnType; for (BLangVariable param : bLangFunction.requiredParams) { functionSymbol.params.add(param.symbol); } functionSymbol.scope = new Scope(functionSymbol); bLangFunction.symbol = functionSymbol; } /** * Add nil return statement. * * @param bLangBlockStmt block statement node */ private void addNilReturnStatement(BlockNode bLangBlockStmt) { BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(((BLangNode) bLangBlockStmt).pos, symTable.nilType); bLangBlockStmt.addStatement(returnStmt); } /** * Create namespace declaration statement for XMNLNS. * * @param xmlns XMLNS node * @return XMLNS statement */ private BLangXMLNSStatement createNamespaceDeclrStatement(BLangXMLNS xmlns) { BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode(); xmlnsStmt.xmlnsDecl = xmlns; xmlnsStmt.pos = xmlns.pos; return xmlnsStmt; } @Override public void visit(BLangPackage pkgNode) { if (pkgNode.completedPhases.contains(CompilerPhase.DESUGAR)) { result = pkgNode; return; } createPackageInitFunctions(pkgNode, env); addAttachedFunctionsToPackageLevel(pkgNode, env); pkgNode.constants.stream() .filter(constant -> constant.expr.getKind() == NodeKind.LITERAL || constant.expr.getKind() == NodeKind.NUMERIC_LITERAL) .forEach(constant -> pkgNode.typeDefinitions.add(constant.associatedTypeDefinition)); BLangBlockStmt serviceAttachments = serviceDesugar.rewriteServiceVariables(pkgNode.services, env); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body; for (BLangConstant constant : pkgNode.constants) { if (constant.symbol.type.tag == TypeTags.MAP) { BLangSimpleVarRef constVarRef = ASTBuilderUtil.createVariableRef(constant.pos, constant.symbol); constant.expr = rewrite(constant.expr, SymbolEnv.createTypeEnv(constant.typeNode, pkgNode.initFunction.symbol.scope, env)); BLangInvocation frozenConstValExpr = createLangLibInvocationNode( "cloneReadOnly", constant.expr, new ArrayList<>(), constant.expr.type, constant.pos); BLangAssignment constInit = ASTBuilderUtil.createAssignmentStmt(constant.pos, constVarRef, frozenConstValExpr); initFnBody.stmts.add(constInit); } } pkgNode.globalVars.forEach(globalVar -> { BLangAssignment assignment = createAssignmentStmt(globalVar); if (assignment.expr != null) { initFnBody.stmts.add(assignment); } }); pkgNode.services.forEach(service -> serviceDesugar.engageCustomServiceDesugar(service, env)); annotationDesugar.rewritePackageAnnotations(pkgNode, env); addUserDefinedModuleInitInvocationAndReturn(pkgNode); pkgNode.typeDefinitions.sort(Comparator.comparing(t -> t.precedence)); pkgNode.typeDefinitions = rewrite(pkgNode.typeDefinitions, env); pkgNode.xmlnsList = rewrite(pkgNode.xmlnsList, env); pkgNode.constants = rewrite(pkgNode.constants, env); pkgNode.globalVars = rewrite(pkgNode.globalVars, env); pkgNode.functions = rewrite(pkgNode.functions, env); serviceDesugar.rewriteListeners(pkgNode.globalVars, env, pkgNode.startFunction, pkgNode.stopFunction); ASTBuilderUtil.appendStatements(serviceAttachments, (BLangBlockFunctionBody) pkgNode.initFunction.body); addNilReturnStatement((BLangBlockFunctionBody) pkgNode.startFunction.body); addNilReturnStatement((BLangBlockFunctionBody) pkgNode.stopFunction.body); pkgNode.initFunction = splitInitFunction(pkgNode, env); pkgNode.initFunction = rewrite(pkgNode.initFunction, env); pkgNode.startFunction = rewrite(pkgNode.startFunction, env); pkgNode.stopFunction = rewrite(pkgNode.stopFunction, env); closureDesugar.visit(pkgNode); for (BLangTestablePackage testablePkg : pkgNode.getTestablePkgs()) { rewrite(testablePkg, this.symTable.pkgEnvMap.get(testablePkg.symbol)); } pkgNode.completedPhases.add(CompilerPhase.DESUGAR); initFuncIndex = 0; result = pkgNode; } @Override public void visit(BLangImportPackage importPkgNode) { BPackageSymbol pkgSymbol = importPkgNode.symbol; SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol); rewrite(pkgEnv.node, pkgEnv); result = importPkgNode; } @Override public void visit(BLangTypeDefinition typeDef) { if (typeDef.typeNode.getKind() == NodeKind.OBJECT_TYPE || typeDef.typeNode.getKind() == NodeKind.RECORD_TYPE) { typeDef.typeNode = rewrite(typeDef.typeNode, env); } typeDef.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = typeDef; } @Override public void visit(BLangObjectTypeNode objectTypeNode) { objectTypeNode.fields.addAll(objectTypeNode.referencedFields); if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) { result = objectTypeNode; return; } for (BLangSimpleVariable bLangSimpleVariable : objectTypeNode.fields) { bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env); } Map<BSymbol, BLangStatement> initFuncStmts = objectTypeNode.generatedInitFunction.initFunctionStmts; for (BLangSimpleVariable field : objectTypeNode.fields) { if (!initFuncStmts.containsKey(field.symbol) && field.expr != null) { initFuncStmts.put(field.symbol, createStructFieldUpdate(objectTypeNode.generatedInitFunction, field, objectTypeNode.generatedInitFunction.receiver.symbol)); } } BLangStatement[] initStmts = initFuncStmts.values().toArray(new BLangStatement[0]); BLangBlockFunctionBody generatedInitFnBody = (BLangBlockFunctionBody) objectTypeNode.generatedInitFunction.body; int i; for (i = 0; i < initStmts.length; i++) { generatedInitFnBody.stmts.add(i, initStmts[i]); } if (objectTypeNode.initFunction != null) { ((BLangReturn) generatedInitFnBody.stmts.get(i)).expr = createUserDefinedInitInvocation(objectTypeNode); } for (BLangFunction fn : objectTypeNode.functions) { rewrite(fn, this.env); } rewrite(objectTypeNode.generatedInitFunction, this.env); rewrite(objectTypeNode.initFunction, this.env); result = objectTypeNode; } private BLangInvocation createUserDefinedInitInvocation(BLangObjectTypeNode objectTypeNode) { ArrayList<BLangExpression> paramRefs = new ArrayList<>(); for (BLangSimpleVariable var : objectTypeNode.generatedInitFunction.requiredParams) { paramRefs.add(ASTBuilderUtil.createVariableRef(objectTypeNode.pos, var.symbol)); } BLangInvocation invocation = ASTBuilderUtil.createInvocationExprMethod(objectTypeNode.pos, ((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc.symbol, paramRefs, Collections.emptyList(), symResolver); if (objectTypeNode.generatedInitFunction.restParam != null) { BLangSimpleVarRef restVarRef = ASTBuilderUtil.createVariableRef(objectTypeNode.pos, objectTypeNode.generatedInitFunction.restParam.symbol); BLangRestArgsExpression bLangRestArgsExpression = new BLangRestArgsExpression(); bLangRestArgsExpression.expr = restVarRef; bLangRestArgsExpression.pos = objectTypeNode.generatedInitFunction.pos; bLangRestArgsExpression.type = objectTypeNode.generatedInitFunction.restParam.type; bLangRestArgsExpression.expectedType = bLangRestArgsExpression.type; invocation.restArgs.add(bLangRestArgsExpression); } invocation.exprSymbol = ((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc.symbol.receiverSymbol; return rewriteExpr(invocation); } @Override public void visit(BLangRecordTypeNode recordTypeNode) { recordTypeNode.fields.addAll(recordTypeNode.referencedFields); for (BLangSimpleVariable bLangSimpleVariable : recordTypeNode.fields) { bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env); } if (recordTypeNode.initFunction == null) { recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); env.enclPkg.addFunction(recordTypeNode.initFunction); env.enclPkg.topLevelNodes.add(recordTypeNode.initFunction); } for (BLangSimpleVariable field : recordTypeNode.fields) { if (!recordTypeNode.initFunction.initFunctionStmts.containsKey(field.symbol) && !Symbols.isOptional(field.symbol) && field.expr != null) { recordTypeNode.initFunction.initFunctionStmts .put(field.symbol, createStructFieldUpdate(recordTypeNode.initFunction, field, recordTypeNode.initFunction.receiver.symbol)); } } BLangStatement[] initStmts = recordTypeNode.initFunction.initFunctionStmts .values().toArray(new BLangStatement[0]); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) recordTypeNode.initFunction.body; for (int i = 0; i < recordTypeNode.initFunction.initFunctionStmts.size(); i++) { initFnBody.stmts.add(i, initStmts[i]); } if (recordTypeNode.isAnonymous && recordTypeNode.isLocal) { BLangUserDefinedType userDefinedType = desugarLocalAnonRecordTypeNode(recordTypeNode); TypeDefBuilderHelper.addTypeDefinition(recordTypeNode.type, recordTypeNode.type.tsymbol, recordTypeNode, env); recordTypeNode.desugared = true; result = userDefinedType; return; } result = recordTypeNode; } private BLangUserDefinedType desugarLocalAnonRecordTypeNode(BLangRecordTypeNode recordTypeNode) { return ASTBuilderUtil.createUserDefineTypeNode(recordTypeNode.symbol.name.value, recordTypeNode.type, recordTypeNode.pos); } @Override public void visit(BLangArrayType arrayType) { arrayType.elemtype = rewrite(arrayType.elemtype, env); result = arrayType; } @Override public void visit(BLangConstrainedType constrainedType) { constrainedType.constraint = rewrite(constrainedType.constraint, env); result = constrainedType; } @Override public void visit(BLangStreamType streamType) { streamType.constraint = rewrite(streamType.constraint, env); streamType.error = rewrite(streamType.error, env); result = streamType; } @Override public void visit(BLangTableTypeNode tableTypeNode) { tableTypeNode.constraint = rewrite(tableTypeNode.constraint, env); tableTypeNode.tableKeyTypeConstraint = rewrite(tableTypeNode.tableKeyTypeConstraint, env); result = tableTypeNode; } @Override public void visit(BLangTableKeyTypeConstraint keyTypeConstraint) { keyTypeConstraint.keyType = rewrite(keyTypeConstraint.keyType, env); result = keyTypeConstraint; } @Override public void visit(BLangValueType valueType) { result = valueType; } @Override public void visit(BLangUserDefinedType userDefinedType) { result = userDefinedType; } @Override public void visit(BLangUnionTypeNode unionTypeNode) { List<BLangType> rewrittenMembers = new ArrayList<>(); unionTypeNode.memberTypeNodes.forEach(typeNode -> rewrittenMembers.add(rewrite(typeNode, env))); unionTypeNode.memberTypeNodes = rewrittenMembers; result = unionTypeNode; } @Override public void visit(BLangIntersectionTypeNode intersectionTypeNode) { List<BLangType> rewrittenConstituents = new ArrayList<>(); for (BLangType constituentTypeNode : intersectionTypeNode.constituentTypeNodes) { rewrittenConstituents.add(rewrite(constituentTypeNode, env)); } intersectionTypeNode.constituentTypeNodes = rewrittenConstituents; result = intersectionTypeNode; } @Override public void visit(BLangErrorType errorType) { errorType.detailType = rewrite(errorType.detailType, env); result = errorType; } @Override public void visit(BLangFunctionTypeNode functionTypeNode) { functionTypeNode.params.forEach(param -> rewrite(param.typeNode, env)); functionTypeNode.returnTypeNode = rewrite(functionTypeNode.returnTypeNode, env); result = functionTypeNode; } @Override public void visit(BLangBuiltInRefTypeNode refTypeNode) { result = refTypeNode; } @Override public void visit(BLangTupleTypeNode tupleTypeNode) { List<BLangType> rewrittenMembers = new ArrayList<>(); tupleTypeNode.memberTypeNodes.forEach(member -> rewrittenMembers.add(rewrite(member, env))); tupleTypeNode.memberTypeNodes = rewrittenMembers; tupleTypeNode.restParamType = rewrite(tupleTypeNode.restParamType, env); result = tupleTypeNode; } @Override public void visit(BLangBlockFunctionBody body) { SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); body.stmts = rewriteStmt(body.stmts, bodyEnv); result = body; } @Override public void visit(BLangExprFunctionBody exprBody) { BLangBlockFunctionBody body = ASTBuilderUtil.createBlockFunctionBody(exprBody.pos, new ArrayList<>()); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(exprBody.pos, body); returnStmt.expr = rewriteExpr(exprBody.expr); result = body; } @Override public void visit(BLangExternalFunctionBody body) { for (BLangAnnotationAttachment attachment : body.annAttachments) { rewrite(attachment, env); } result = body; } @Override public void visit(BLangFunction funcNode) { SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env); if (!funcNode.interfaceFunction) { addReturnIfNotPresent(funcNode); } funcNode.originalFuncSymbol = funcNode.symbol; funcNode.symbol = ASTBuilderUtil.duplicateInvokableSymbol(funcNode.symbol); funcNode.requiredParams = rewrite(funcNode.requiredParams, funcEnv); funcNode.restParam = rewrite(funcNode.restParam, funcEnv); funcNode.workers = rewrite(funcNode.workers, funcEnv); if (funcNode.returnTypeNode != null && funcNode.returnTypeNode.getKind() != null) { funcNode.returnTypeNode = rewrite(funcNode.returnTypeNode, funcEnv); } funcNode.body = rewrite(funcNode.body, funcEnv); funcNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); if (funcNode.returnTypeNode != null) { funcNode.returnTypeAnnAttachments.forEach(attachment -> rewrite(attachment, env)); } result = funcNode; } @Override public void visit(BLangResource resourceNode) { } public void visit(BLangAnnotation annotationNode) { annotationNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); } public void visit(BLangAnnotationAttachment annAttachmentNode) { annAttachmentNode.expr = rewrite(annAttachmentNode.expr, env); result = annAttachmentNode; } @Override public void visit(BLangSimpleVariable varNode) { if (((varNode.symbol.owner.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE) && (varNode.symbol.owner.tag & SymTag.LET) != SymTag.LET) { varNode.expr = null; result = varNode; return; } if (varNode.typeNode != null && varNode.typeNode.getKind() != null) { varNode.typeNode = rewrite(varNode.typeNode, env); } BLangExpression bLangExpression = rewriteExpr(varNode.expr); if (bLangExpression != null) { bLangExpression = addConversionExprIfRequired(bLangExpression, varNode.type); } varNode.expr = bLangExpression; varNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = varNode; } @Override public void visit(BLangLetExpression letExpression) { SymbolEnv prevEnv = this.env; this.env = letExpression.env; BLangExpression expr = letExpression.expr; BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(letExpression.pos); for (BLangLetVariable letVariable : letExpression.letVarDeclarations) { BLangNode node = rewrite((BLangNode) letVariable.definitionNode, env); if (node.getKind() == NodeKind.BLOCK) { blockStmt.stmts.addAll(((BLangBlockStmt) node).stmts); } else { blockStmt.addStatement((BLangSimpleVariableDef) node); } } BLangSimpleVariableDef tempVarDef = createVarDef(String.format("$let_var_%d_$", letCount++), expr.type, expr, expr.pos); BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempVarDef.var.symbol); blockStmt.addStatement(tempVarDef); BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef); stmtExpr.type = expr.type; result = rewrite(stmtExpr, env); this.env = prevEnv; } @Override public void visit(BLangTupleVariable varNode) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varNode.pos); String name = "$tuple$"; final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(varNode.pos, name, symTable.arrayAllType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, symTable.arrayAllType, this.env.scope.owner)); tuple.expr = varNode.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varNode.pos, blockStmt); variableDef.var = tuple; createVarDefStmts(varNode, blockStmt, tuple.symbol, null); createRestFieldVarDefStmts(varNode, blockStmt, tuple.symbol); result = rewrite(blockStmt, env); } @Override public void visit(BLangRecordVariable varNode) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varNode.pos); final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(varNode.pos, "$map$0", symTable.mapAllType, null, new BVarSymbol(0, names.fromString("$map$0"), this.env.scope.owner.pkgID, symTable.mapAllType, this.env.scope.owner)); mapVariable.expr = varNode.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varNode.pos, blockStmt); variableDef.var = mapVariable; createVarDefStmts(varNode, blockStmt, mapVariable.symbol, null); result = rewrite(blockStmt, env); } @Override public void visit(BLangErrorVariable varNode) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varNode.pos); BVarSymbol errorVarSymbol = new BVarSymbol(0, names.fromString("$error$"), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner); final BLangSimpleVariable error = ASTBuilderUtil.createVariable(varNode.pos, errorVarSymbol.name.value, symTable.errorType, null, errorVarSymbol); error.expr = varNode.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varNode.pos, blockStmt); variableDef.var = error; createVarDefStmts(varNode, blockStmt, error.symbol, null); result = rewrite(blockStmt, env); } @Override public void visit(BLangBlockStmt block) { SymbolEnv blockEnv = SymbolEnv.createBlockEnv(block, env); block.stmts = rewriteStmt(block.stmts, blockEnv); result = block; } @Override public void visit(BLangSimpleVariableDef varDefNode) { varDefNode.var = rewrite(varDefNode.var, env); result = varDefNode; } @Override public void visit(BLangTupleVariableDef varDefNode) { result = rewrite(varDefNode.var, env); } private void createRestFieldVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt blockStmt, BVarSymbol tupleVarSymbol) { final BLangSimpleVariable arrayVar = (BLangSimpleVariable) parentTupleVariable.restVariable; boolean isTupleType = parentTupleVariable.type.tag == TypeTags.TUPLE; DiagnosticPos pos = blockStmt.pos; if (arrayVar != null) { BLangArrayLiteral arrayExpr = createArrayLiteralExprNode(); arrayExpr.type = arrayVar.type; arrayVar.expr = arrayExpr; BLangSimpleVariableDef arrayVarDef = ASTBuilderUtil.createVariableDefStmt(arrayVar.pos, blockStmt); arrayVarDef.var = arrayVar; BLangExpression tupleExpr = parentTupleVariable.expr; BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, arrayVar.symbol); BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); startIndexLiteral.value = (long) (isTupleType ? ((BTupleType) parentTupleVariable.type).tupleTypes.size() : parentTupleVariable.memberVariables.size()); startIndexLiteral.type = symTable.intType; BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr); BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral, getModifiedIntRangeEndExpr(lengthInvocation)); BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode(); foreach.pos = pos; foreach.collection = intRangeInvocation; types.setForeachTypedBindingPatternType(foreach); final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType); foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name), this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner); BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol); foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable); foreach.isDeclaredWithVar = true; BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos); BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(arrayVarRef, createLengthInvocation(pos, arrayVarRef)); indexAccessExpr.type = (isTupleType ? ((BTupleType) parentTupleVariable.type).restType : symTable.anyType); createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null); foreach.body = foreachBody; blockStmt.addStatement(foreach); } } @Override public void visit(BLangRecordVariableDef varDefNode) { result = rewrite(varDefNode.var, env); } @Override public void visit(BLangErrorVariableDef varDefNode) { result = rewrite(varDefNode.errorVariable, env); } /** * This method iterate through each member of the tupleVar and create the relevant var def statements. This method * does the check for node kind of each member and call the related var def creation method. * * Example: * ((string, float) int)) ((a, b), c)) = (tuple) * * (a, b) is again a tuple, so it is a recursive var def creation. * * c is a simple var, so a simple var def will be created. * */ private void createVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt parentBlockStmt, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { final List<BLangVariable> memberVars = parentTupleVariable.memberVariables; for (int index = 0; index < memberVars.size(); index++) { BLangVariable variable = memberVars.get(index); BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.intType, (long) index); if (NodeKind.VARIABLE == variable.getKind()) { createSimpleVarDefStmt((BLangSimpleVariable) variable, parentBlockStmt, indexExpr, tupleVarSymbol, parentIndexAccessExpr); continue; } if (variable.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariable tupleVariable = (BLangTupleVariable) variable; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos, new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangTupleVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); continue; } if (variable.getKind() == NodeKind.RECORD_VARIABLE) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangRecordVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); continue; } if (variable.getKind() == NodeKind.ERROR_VARIABLE) { BType accessedElemType = symTable.errorType; if (tupleVarSymbol.type.tag == TypeTags.ARRAY) { BArrayType arrayType = (BArrayType) tupleVarSymbol.type; accessedElemType = arrayType.eType; } BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, accessedElemType, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); } } } /** * Overloaded method to handle record variables. * This method iterate through each member of the recordVar and create the relevant var def statements. This method * does the check for node kind of each member and call the related var def creation method. * * Example: * type Foo record { * string name; * (int, string) age; * Address address; * }; * * Foo {name: a, age: (b, c), address: d} = {record literal} * * a is a simple var, so a simple var def will be created. * * (b, c) is a tuple, so it is a recursive var def creation. * * d is a record, so it is a recursive var def creation. * */ private void createVarDefStmts(BLangRecordVariable parentRecordVariable, BLangBlockStmt parentBlockStmt, BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { List<BLangRecordVariableKeyValue> variableList = parentRecordVariable.variableList; for (BLangRecordVariableKeyValue recordFieldKeyValue : variableList) { BLangVariable variable = recordFieldKeyValue.valueBindingPattern; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.stringType, recordFieldKeyValue.key.value); if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.VARIABLE) { createSimpleVarDefStmt((BLangSimpleVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt, indexExpr, recordVarSymbol, parentIndexAccessExpr); continue; } if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariable tupleVariable = (BLangTupleVariable) recordFieldKeyValue.valueBindingPattern; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos, new BArrayType(symTable.anyType), recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangTupleVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentRecordVariable.pos, symTable.mapType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangRecordVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (variable.getKind() == NodeKind.ERROR_VARIABLE) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentRecordVariable.pos, variable.type, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, recordVarSymbol, arrayAccessExpr); } } if (parentRecordVariable.restParam != null) { DiagnosticPos pos = parentBlockStmt.pos; BMapType restParamType = (BMapType) ((BLangVariable) parentRecordVariable.restParam).type; BLangSimpleVarRef variableReference; if (parentIndexAccessExpr != null) { BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1", parentIndexAccessExpr.type, null, new BVarSymbol(0, names.fromString("$map$1"), this.env.scope.owner.pkgID, parentIndexAccessExpr.type, this.env.scope.owner)); mapVariable.expr = parentIndexAccessExpr; BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); variableDef.var = mapVariable; variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol); } else { variableReference = ASTBuilderUtil.createVariableRef(pos, ((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol); } List<String> keysToRemove = parentRecordVariable.variableList.stream() .map(var -> var.getKey().getValue()) .collect(Collectors.toList()); BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos, keysToRemove, restParamType, parentBlockStmt); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol); BLangSimpleVariable restParam = (BLangSimpleVariable) parentRecordVariable.restParam; BLangSimpleVariableDef restParamVarDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); restParamVarDef.var = restParam; restParamVarDef.var.type = restParamType; restParam.expr = varRef; } } /** * This method will create the relevant var def statements for reason and details of the error variable. * The var def statements are created by creating the reason() and detail() builtin methods. */ private void createVarDefStmts(BLangErrorVariable parentErrorVariable, BLangBlockStmt parentBlockStmt, BVarSymbol errorVariableSymbol, BLangIndexBasedAccess parentIndexBasedAccess) { BVarSymbol convertedErrorVarSymbol; if (parentIndexBasedAccess != null) { BType prevType = parentIndexBasedAccess.type; parentIndexBasedAccess.type = symTable.anyType; BLangSimpleVariableDef errorVarDef = createVarDef("$error$" + errorCount++, symTable.errorType, addConversionExprIfRequired(parentIndexBasedAccess, symTable.errorType), parentErrorVariable.pos); parentIndexBasedAccess.type = prevType; parentBlockStmt.addStatement(errorVarDef); convertedErrorVarSymbol = errorVarDef.var.symbol; } else { convertedErrorVarSymbol = errorVariableSymbol; } parentErrorVariable.reason.expr = generateErrorReasonBuiltinFunction(parentErrorVariable.reason.pos, parentErrorVariable.reason.type, convertedErrorVarSymbol, null); if (names.fromIdNode((parentErrorVariable.reason).name) == Names.IGNORE) { parentErrorVariable.reason = null; } else { BLangSimpleVariableDef reasonVariableDef = ASTBuilderUtil.createVariableDefStmt(parentErrorVariable.reason.pos, parentBlockStmt); reasonVariableDef.var = parentErrorVariable.reason; } if ((parentErrorVariable.detail == null || parentErrorVariable.detail.isEmpty()) && parentErrorVariable.restDetail == null) { return; } BType detailMapType; BType detailType = ((BErrorType) parentErrorVariable.type).detailType; if (detailType.tag == TypeTags.MAP) { detailMapType = detailType; } else { detailMapType = symTable.detailType; } parentErrorVariable.detailExpr = generateErrorDetailBuiltinFunction( parentErrorVariable.pos, convertedErrorVarSymbol, null); BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail", parentErrorVariable.detailExpr.type, parentErrorVariable.detailExpr, parentErrorVariable.pos); detailTempVarDef.type = parentErrorVariable.detailExpr.type; parentBlockStmt.addStatement(detailTempVarDef); this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol); for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : parentErrorVariable.detail) { BLangExpression detailEntryVar = createErrorDetailVar(detailEntry, detailTempVarDef.var.symbol); createAndAddBoundVariableDef(parentBlockStmt, detailEntry, detailEntryVar); } if (parentErrorVariable.restDetail != null && !parentErrorVariable.restDetail.name.value.equals(IGNORE.value)) { DiagnosticPos pos = parentErrorVariable.restDetail.pos; BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef( pos, detailTempVarDef.var.symbol); List<String> keysToRemove = parentErrorVariable.detail.stream() .map(detail -> detail.key.getValue()) .collect(Collectors.toList()); BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVariable.pos, keysToRemove, parentErrorVariable.restDetail.type, parentBlockStmt); BLangSimpleVariableDef variableDefStmt = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); variableDefStmt.var = ASTBuilderUtil.createVariable(pos, parentErrorVariable.restDetail.name.value, filteredDetail.type, ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol), parentErrorVariable.restDetail.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos, ASTBuilderUtil.createVariableRef(pos, parentErrorVariable.restDetail.symbol), ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol)); parentBlockStmt.addStatement(assignmentStmt); } rewrite(parentBlockStmt, env); } private BLangSimpleVariableDef forceCastIfApplicable(BVarSymbol errorVarySymbol, DiagnosticPos pos, BType targetType) { BVarSymbol errorVarSym = new BVarSymbol(Flags.PUBLIC, names.fromString("$cast$temp$"), this.env.enclPkg.packageID, targetType, this.env.scope.owner); BLangSimpleVarRef variableRef = ASTBuilderUtil.createVariableRef(pos, errorVarySymbol); BLangExpression expr; if (targetType.tag == TypeTags.RECORD) { expr = variableRef; } else { expr = addConversionExprIfRequired(variableRef, targetType); } BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(pos, errorVarSym.name.value, targetType, expr, errorVarSym); return ASTBuilderUtil.createVariableDef(pos, errorVar); } private BLangSimpleVariable generateRestFilter(BLangSimpleVarRef mapVarRef, DiagnosticPos pos, List<String> keysToRemove, BType targetType, BLangBlockStmt parentBlockStmt) { BLangExpression typeCastExpr = addConversionExprIfRequired(mapVarRef, targetType); int restNum = annonVarCount++; String name = "$map$ref$" + restNum; BLangSimpleVariable mapVariable = defVariable(pos, targetType, parentBlockStmt, typeCastExpr, name); BLangInvocation entriesInvocation = generateMapEntriesInvocation( ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol), typeCastExpr.type); String entriesVarName = "$map$ref$entries$" + restNum; BType entriesType = new BMapType(TypeTags.MAP, new BTupleType(Arrays.asList(symTable.stringType, ((BMapType) targetType).constraint)), null); BLangSimpleVariable entriesInvocationVar = defVariable(pos, entriesType, parentBlockStmt, addConversionExprIfRequired(entriesInvocation, entriesType), entriesVarName); BLangLambdaFunction filter = createFuncToFilterOutRestParam(keysToRemove, pos); BLangInvocation filterInvocation = generateMapFilterInvocation(pos, entriesInvocationVar, filter); String filteredEntriesName = "$filtered$detail$entries" + restNum; BLangSimpleVariable filteredVar = defVariable(pos, entriesType, parentBlockStmt, filterInvocation, filteredEntriesName); String filteredVarName = "$detail$filtered" + restNum; BLangLambdaFunction backToMapLambda = generateEntriesToMapLambda(pos); BLangInvocation mapInvocation = generateMapMapInvocation(pos, filteredVar, backToMapLambda); BLangSimpleVariable filtered = defVariable(pos, targetType, parentBlockStmt, mapInvocation, filteredVarName); String filteredRestVarName = "$restVar$" + restNum; BLangInvocation constructed = generateCloneWithTypeInvocation(pos, targetType, filtered.symbol); return defVariable(pos, targetType, parentBlockStmt, addConversionExprIfRequired(constructed, targetType), filteredRestVarName); } private BLangInvocation generateMapEntriesInvocation(BLangExpression expr, BType type) { BLangInvocation invocationNode = createInvocationNode("entries", new ArrayList<>(), type); invocationNode.expr = expr; invocationNode.symbol = symResolver.lookupLangLibMethod(type, names.fromString("entries")); invocationNode.requiredArgs = Lists.of(expr); invocationNode.type = invocationNode.symbol.type.getReturnType(); invocationNode.langLibInvocation = true; return invocationNode; } private BLangInvocation generateMapMapInvocation(DiagnosticPos pos, BLangSimpleVariable filteredVar, BLangLambdaFunction backToMapLambda) { BLangInvocation invocationNode = createInvocationNode("map", new ArrayList<>(), filteredVar.type); invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol); invocationNode.symbol = symResolver.lookupLangLibMethod(filteredVar.type, names.fromString("map")); invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol)); invocationNode.type = invocationNode.symbol.type.getReturnType(); invocationNode.requiredArgs.add(backToMapLambda); return invocationNode; } private BLangLambdaFunction generateEntriesToMapLambda(DiagnosticPos pos) { String anonfuncName = "$anonGetValFunc$" + lambdaFunctionCount++; BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName); BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID, getStringAnyTupleType(), this.env.scope.owner); BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(), null, keyValSymbol); function.requiredParams.add(inputParameter); BLangValueType anyType = new BLangValueType(); anyType.typeKind = TypeKind.ANY; anyType.type = symTable.anyType; function.returnTypeNode = anyType; BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>()); function.body = functionBlock; BLangIndexBasedAccess indexBasesAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol, ASTBuilderUtil .createLiteral(pos, symTable.intType, (long) 1)); BLangSimpleVariableDef tupSecondElem = createVarDef("val", indexBasesAccessExpr.type, indexBasesAccessExpr, pos); functionBlock.addStatement(tupSecondElem); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock); returnStmt.expr = ASTBuilderUtil.createVariableRef(pos, tupSecondElem.var.symbol); BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet), new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true); functionSymbol.retType = function.returnTypeNode.type; functionSymbol.params = function.requiredParams.stream() .map(param -> param.symbol) .collect(Collectors.toList()); functionSymbol.scope = env.scope; functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()), symTable.anyType, null); function.symbol = functionSymbol; rewrite(function, env); env.enclPkg.addFunction(function); return createLambdaFunction(function, functionSymbol); } private BLangInvocation generateMapFilterInvocation(DiagnosticPos pos, BLangSimpleVariable entriesInvocationVar, BLangLambdaFunction filter) { BLangInvocation invocationNode = createInvocationNode("filter", new ArrayList<>(), entriesInvocationVar.type); invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol); invocationNode.symbol = symResolver.lookupLangLibMethod(entriesInvocationVar.type, names.fromString("filter")); invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol)); invocationNode.type = invocationNode.symbol.type.getReturnType(); invocationNode.requiredArgs.add(filter); return invocationNode; } private BLangSimpleVariable defVariable(DiagnosticPos pos, BType varType, BLangBlockStmt parentBlockStmt, BLangExpression expression, String name) { Name varName = names.fromString(name); BLangSimpleVariable detailMap = ASTBuilderUtil.createVariable(pos, name, varType, expression, new BVarSymbol(Flags.PUBLIC, varName, env.enclPkg.packageID, varType, env.scope.owner)); BLangSimpleVariableDef constructedMap = ASTBuilderUtil.createVariableDef(pos, detailMap); constructedMap.type = varType; parentBlockStmt.addStatement(constructedMap); env.scope.define(varName, detailMap.symbol); return detailMap; } private void createAndAddBoundVariableDef(BLangBlockStmt parentBlockStmt, BLangErrorVariable.BLangErrorDetailEntry detailEntry, BLangExpression detailEntryVar) { if (detailEntry.valueBindingPattern.getKind() == NodeKind.VARIABLE) { BLangSimpleVariableDef errorDetailVar = createVarDef( ((BLangSimpleVariable) detailEntry.valueBindingPattern).name.value, detailEntry.valueBindingPattern.type, detailEntryVar, detailEntry.valueBindingPattern.pos); parentBlockStmt.addStatement(errorDetailVar); } else if (detailEntry.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) { BLangRecordVariableDef recordVariableDef = ASTBuilderUtil.createRecordVariableDef( detailEntry.valueBindingPattern.pos, (BLangRecordVariable) detailEntry.valueBindingPattern); recordVariableDef.var.expr = detailEntryVar; recordVariableDef.type = symTable.recordType; parentBlockStmt.addStatement(recordVariableDef); } else if (detailEntry.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariableDef tupleVariableDef = ASTBuilderUtil.createTupleVariableDef( detailEntry.valueBindingPattern.pos, (BLangTupleVariable) detailEntry.valueBindingPattern); parentBlockStmt.addStatement(tupleVariableDef); } } private BLangExpression createErrorDetailVar(BLangErrorVariable.BLangErrorDetailEntry detailEntry, BVarSymbol tempDetailVarSymbol) { BLangExpression detailEntryVar = createIndexBasedAccessExpr( detailEntry.valueBindingPattern.type, detailEntry.valueBindingPattern.pos, createStringLiteral(detailEntry.key.pos, detailEntry.key.value), tempDetailVarSymbol, null); if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar; bLangIndexBasedAccess.originalType = symTable.pureType; } return detailEntryVar; } private BLangExpression constructStringTemplateConcatExpression(List<BLangExpression> exprs) { BLangExpression concatExpr = null; BLangExpression currentExpr; for (BLangExpression expr : exprs) { currentExpr = expr; if (expr.type.tag != TypeTags.STRING && expr.type.tag != TypeTags.XML) { currentExpr = getToStringInvocationOnExpr(expr); } if (concatExpr == null) { concatExpr = currentExpr; continue; } BType binaryExprType = TypeTags.isXMLTypeTag(concatExpr.type.tag) || TypeTags.isXMLTypeTag(currentExpr.type.tag) ? symTable.xmlType : symTable.stringType; concatExpr = ASTBuilderUtil.createBinaryExpr(concatExpr.pos, concatExpr, currentExpr, binaryExprType, OperatorKind.ADD, null); } return concatExpr; } private BLangInvocation getToStringInvocationOnExpr(BLangExpression expression) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langValueModuleSymbol.scope .lookup(names.fromString(TO_STRING_FUNCTION_NAME)).symbol; List<BLangExpression> requiredArgs = new ArrayList<BLangExpression>() {{ add(addConversionExprIfRequired(expression, symbol.params.get(0).type)); }}; return ASTBuilderUtil.createInvocationExprMethod(expression.pos, symbol, requiredArgs, new ArrayList<>(), symResolver); } private BLangInvocation generateErrorDetailBuiltinFunction(DiagnosticPos pos, BVarSymbol errorVarySymbol, BLangIndexBasedAccess parentIndexBasedAccess) { BLangExpression onExpr = parentIndexBasedAccess != null ? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarySymbol); return createLangLibInvocationNode(ERROR_DETAIL_FUNCTION_NAME, onExpr, new ArrayList<>(), null, pos); } private BLangInvocation generateErrorReasonBuiltinFunction(DiagnosticPos pos, BType reasonType, BVarSymbol errorVarSymbol, BLangIndexBasedAccess parentIndexBasedAccess) { BLangExpression onExpr = parentIndexBasedAccess != null ? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarSymbol); return createLangLibInvocationNode(ERROR_REASON_FUNCTION_NAME, onExpr, new ArrayList<>(), reasonType, pos); } private BLangInvocation generateCloneWithTypeInvocation(DiagnosticPos pos, BType targetType, BVarSymbol source) { BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol); BLangInvocation invocationNode = createInvocationNode(CLONE_WITH_TYPE, new ArrayList<>(), typedescType); BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = targetType; typedescExpr.type = typedescType; invocationNode.expr = typedescExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(typedescType, names.fromString(CLONE_WITH_TYPE)); invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, source), typedescExpr); invocationNode.type = BUnionType.create(null, targetType, symTable.errorType); return invocationNode; } private BLangLambdaFunction createFuncToFilterOutRestParam(List<String> toRemoveList, DiagnosticPos pos) { String anonfuncName = "$anonRestParamFilterFunc$" + lambdaFunctionCount++; BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName); BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID, getStringAnyTupleType(), this.env.scope.owner); BLangBlockFunctionBody functionBlock = createAnonymousFunctionBlock(pos, function, keyValSymbol); BLangIndexBasedAccess indexBasesAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol, ASTBuilderUtil .createLiteral(pos, symTable.intType, (long) 0)); BLangSimpleVariableDef tupFirstElem = createVarDef("key", indexBasesAccessExpr.type, indexBasesAccessExpr, pos); functionBlock.addStatement(tupFirstElem); for (String toRemoveItem : toRemoveList) { createIfStmt(pos, tupFirstElem.var.symbol, functionBlock, toRemoveItem); } BInvokableSymbol functionSymbol = createReturnTrueStatement(pos, function, functionBlock); return createLambdaFunction(function, functionSymbol); } private BLangLambdaFunction createFuncToFilterOutRestParam(BLangRecordVariable recordVariable, DiagnosticPos pos) { List<String> fieldNamesToRemove = recordVariable.variableList.stream() .map(var -> var.getKey().getValue()) .collect(Collectors.toList()); return createFuncToFilterOutRestParam(fieldNamesToRemove, pos); } private void createIfStmt(DiagnosticPos pos, BVarSymbol inputParamSymbol, BLangBlockFunctionBody blockStmt, String key) { BLangSimpleVarRef firstElemRef = ASTBuilderUtil.createVariableRef(pos, inputParamSymbol); BLangExpression converted = addConversionExprIfRequired(firstElemRef, symTable.stringType); BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, blockStmt); BLangBlockStmt ifBlock = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>()); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, ifBlock); returnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, false); ifStmt.body = ifBlock; BLangGroupExpr groupExpr = new BLangGroupExpr(); groupExpr.type = symTable.booleanType; BLangBinaryExpr binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, converted, ASTBuilderUtil.createLiteral(pos, symTable.stringType, key), symTable.booleanType, OperatorKind.EQUAL, null); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator( binaryExpr.opKind, binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type); groupExpr.expression = binaryExpr; ifStmt.expr = groupExpr; } BLangLambdaFunction createLambdaFunction(BLangFunction function, BInvokableSymbol functionSymbol) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaFunction.function = function; lambdaFunction.type = functionSymbol.type; return lambdaFunction; } private BInvokableSymbol createReturnTrueStatement(DiagnosticPos pos, BLangFunction function, BLangBlockFunctionBody functionBlock) { BLangReturn trueReturnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock); trueReturnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true); BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet), new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true); functionSymbol.retType = function.returnTypeNode.type; functionSymbol.params = function.requiredParams.stream() .map(param -> param.symbol) .collect(Collectors.toList()); functionSymbol.scope = env.scope; functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()), getRestType(functionSymbol), symTable.booleanType, null); function.symbol = functionSymbol; rewrite(function, env); env.enclPkg.addFunction(function); return functionSymbol; } private BLangBlockFunctionBody createAnonymousFunctionBlock(DiagnosticPos pos, BLangFunction function, BVarSymbol keyValSymbol) { BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(), null, keyValSymbol); function.requiredParams.add(inputParameter); BLangValueType booleanTypeKind = new BLangValueType(); booleanTypeKind.typeKind = TypeKind.BOOLEAN; booleanTypeKind.type = symTable.booleanType; function.returnTypeNode = booleanTypeKind; BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>()); function.body = functionBlock; return functionBlock; } private BTupleType getStringAnyTupleType() { ArrayList<BType> typeList = new ArrayList<BType>() {{ add(symTable.stringType); add(symTable.anyType); }}; return new BTupleType(typeList); } /** * This method creates a simple variable def and assigns and array expression based on the given indexExpr. * * case 1: when there is no parent array access expression, but with the indexExpr : 1 * string s = x[1]; * * case 2: when there is a parent array expression : x[2] and indexExpr : 3 * string s = x[2][3]; * * case 3: when there is no parent array access expression, but with the indexExpr : name * string s = x[name]; * * case 4: when there is a parent map expression : x[name] and indexExpr : fName * string s = x[name][fName]; * * case 5: when there is a parent map expression : x[name] and indexExpr : 1 * string s = x[name][1]; */ private void createSimpleVarDefStmt(BLangSimpleVariable simpleVariable, BLangBlockStmt parentBlockStmt, BLangLiteral indexExpr, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentArrayAccessExpr) { Name varName = names.fromIdNode(simpleVariable.name); if (varName == Names.IGNORE) { return; } final BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDefStmt(simpleVariable.pos, parentBlockStmt); simpleVariableDef.var = simpleVariable; simpleVariable.expr = createIndexBasedAccessExpr(simpleVariable.type, simpleVariable.pos, indexExpr, tupleVarSymbol, parentArrayAccessExpr); } @Override public void visit(BLangAssignment assignNode) { if (safeNavigateLHS(assignNode.varRef)) { BLangAccessExpression accessExpr = (BLangAccessExpression) assignNode.varRef; accessExpr.leafNode = true; result = rewriteSafeNavigationAssignment(accessExpr, assignNode.expr, assignNode.safeAssignment); result = rewrite(result, env); return; } assignNode.varRef = rewriteExpr(assignNode.varRef); assignNode.expr = rewriteExpr(assignNode.expr); assignNode.expr = addConversionExprIfRequired(rewriteExpr(assignNode.expr), assignNode.varRef.type); result = assignNode; } @Override public void visit(BLangTupleDestructure tupleDestructure) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(tupleDestructure.pos); BType runTimeType = new BArrayType(symTable.anyType); String name = "tuple"; final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(tupleDestructure.pos, name, runTimeType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType, this.env.scope.owner)); tuple.expr = tupleDestructure.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(tupleDestructure.pos, blockStmt); variableDef.var = tuple; createVarRefAssignmentStmts(tupleDestructure.varRef, blockStmt, tuple.symbol, null); createRestFieldAssignmentStmt(tupleDestructure, blockStmt, tuple.symbol); result = rewrite(blockStmt, env); } private void createRestFieldAssignmentStmt(BLangTupleDestructure tupleDestructure, BLangBlockStmt blockStmt, BVarSymbol tupleVarSymbol) { BLangTupleVarRef tupleVarRef = tupleDestructure.varRef; DiagnosticPos pos = blockStmt.pos; if (tupleVarRef.restParam != null) { BLangExpression tupleExpr = tupleDestructure.expr; BLangSimpleVarRef restParam = (BLangSimpleVarRef) tupleVarRef.restParam; BArrayType restParamType = (BArrayType) restParam.type; BLangArrayLiteral arrayExpr = createArrayLiteralExprNode(); arrayExpr.type = restParamType; BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt); restParamAssignment.varRef = restParam; restParamAssignment.varRef.type = restParamType; restParamAssignment.expr = arrayExpr; BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); startIndexLiteral.value = (long) tupleVarRef.expressions.size(); startIndexLiteral.type = symTable.intType; BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr); BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral, getModifiedIntRangeEndExpr(lengthInvocation)); BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode(); foreach.pos = pos; foreach.collection = intRangeInvocation; types.setForeachTypedBindingPatternType(foreach); final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType); foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name), this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner); BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol); foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable); foreach.isDeclaredWithVar = true; BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos); BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(restParam, createLengthInvocation(pos, restParam)); indexAccessExpr.type = restParamType.eType; createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null); foreach.body = foreachBody; blockStmt.addStatement(foreach); } } private BLangInvocation createLengthInvocation(DiagnosticPos pos, BLangExpression collection) { BInvokableSymbol lengthInvokableSymbol = (BInvokableSymbol) symResolver .lookupLangLibMethod(collection.type, names.fromString(LENGTH_FUNCTION_NAME)); BLangInvocation lengthInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, lengthInvokableSymbol, Lists.of(collection), symResolver); lengthInvocation.argExprs = lengthInvocation.requiredArgs; lengthInvocation.type = lengthInvokableSymbol.type.getReturnType(); return lengthInvocation; } /** * This method iterate through each member of the tupleVarRef and create the relevant var ref assignment statements. * This method does the check for node kind of each member and call the related var ref creation method. * * Example: * ((a, b), c)) = (tuple) * * (a, b) is again a tuple, so it is a recursive var ref creation. * * c is a simple var, so a simple var def will be created. * */ private void createVarRefAssignmentStmts(BLangTupleVarRef parentTupleVariable, BLangBlockStmt parentBlockStmt, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { final List<BLangExpression> expressions = parentTupleVariable.expressions; for (int index = 0; index < expressions.size(); index++) { BLangExpression expression = expressions.get(index); if (NodeKind.SIMPLE_VARIABLE_REF == expression.getKind() || NodeKind.FIELD_BASED_ACCESS_EXPR == expression.getKind() || NodeKind.INDEX_BASED_ACCESS_EXPR == expression.getKind() || NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == expression.getKind()) { BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(expression.pos, symTable.intType, (long) index); createSimpleVarRefAssignmentStmt((BLangVariableReference) expression, parentBlockStmt, indexExpr, tupleVarSymbol, parentIndexAccessExpr); continue; } if (expression.getKind() == NodeKind.TUPLE_VARIABLE_REF) { BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) expression; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(tupleVarRef.pos, symTable.intType, (long) index); BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVarRef.pos, new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangTupleVarRef) expression, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); continue; } if (expression.getKind() == NodeKind.RECORD_VARIABLE_REF) { BLangRecordVarRef recordVarRef = (BLangRecordVarRef) expression; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(recordVarRef.pos, symTable.intType, (long) index); BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangRecordVarRef) expression, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode( (BRecordType) recordVarRef.type, env.enclPkg.packageID, symTable, recordVarRef.pos); recordTypeNode.initFunction = TypeDefBuilderHelper .createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper .addTypeDefinition(recordVarRef.type, recordVarRef.type.tsymbol, recordTypeNode, env); continue; } if (expression.getKind() == NodeKind.ERROR_VARIABLE_REF) { BLangErrorVarRef errorVarRef = (BLangErrorVarRef) expression; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(errorVarRef.pos, symTable.intType, (long) index); BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, expression.type, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangErrorVarRef) expression, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); } } } /** * This method creates a assignment statement and assigns and array expression based on the given indexExpr. * */ private void createSimpleVarRefAssignmentStmt(BLangVariableReference simpleVarRef, BLangBlockStmt parentBlockStmt, BLangExpression indexExpr, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentArrayAccessExpr) { if (simpleVarRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { Name varName = names.fromIdNode(((BLangSimpleVarRef) simpleVarRef).variableName); if (varName == Names.IGNORE) { return; } } BLangExpression assignmentExpr = createIndexBasedAccessExpr(simpleVarRef.type, simpleVarRef.pos, indexExpr, tupleVarSymbol, parentArrayAccessExpr); assignmentExpr = addConversionExprIfRequired(assignmentExpr, simpleVarRef.type); final BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(parentBlockStmt.pos, parentBlockStmt); assignmentStmt.varRef = simpleVarRef; assignmentStmt.expr = assignmentExpr; } private BLangExpression createIndexBasedAccessExpr(BType varType, DiagnosticPos varPos, BLangExpression indexExpr, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentExpr) { BLangIndexBasedAccess arrayAccess = ASTBuilderUtil.createIndexBasesAccessExpr(varPos, symTable.anyType, tupleVarSymbol, indexExpr); arrayAccess.originalType = varType; if (parentExpr != null) { arrayAccess.expr = parentExpr; } final BLangExpression assignmentExpr; if (types.isValueType(varType)) { BLangTypeConversionExpr castExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); castExpr.expr = arrayAccess; castExpr.type = varType; assignmentExpr = castExpr; } else { assignmentExpr = arrayAccess; } return assignmentExpr; } @Override public void visit(BLangRecordDestructure recordDestructure) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(recordDestructure.pos); BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null); String name = "$map$0"; final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(recordDestructure.pos, name, runTimeType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType, this.env.scope.owner)); mapVariable.expr = recordDestructure.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil. createVariableDefStmt(recordDestructure.pos, blockStmt); variableDef.var = mapVariable; createVarRefAssignmentStmts(recordDestructure.varRef, blockStmt, mapVariable.symbol, null); result = rewrite(blockStmt, env); } @Override public void visit(BLangErrorDestructure errorDestructure) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(errorDestructure.pos); String name = "$error$"; final BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(errorDestructure.pos, name, symTable.errorType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner)); errorVar.expr = errorDestructure.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(errorDestructure.pos, blockStmt); variableDef.var = errorVar; createVarRefAssignmentStmts(errorDestructure.varRef, blockStmt, errorVar.symbol, null); result = rewrite(blockStmt, env); } private void createVarRefAssignmentStmts(BLangRecordVarRef parentRecordVarRef, BLangBlockStmt parentBlockStmt, BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { final List<BLangRecordVarRefKeyValue> variableRefList = parentRecordVarRef.recordRefFields; for (BLangRecordVarRefKeyValue varRefKeyValue : variableRefList) { BLangExpression variableReference = varRefKeyValue.variableReference; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variableReference.pos, symTable.stringType, varRefKeyValue.variableName.getValue()); if (NodeKind.SIMPLE_VARIABLE_REF == variableReference.getKind() || NodeKind.FIELD_BASED_ACCESS_EXPR == variableReference.getKind() || NodeKind.INDEX_BASED_ACCESS_EXPR == variableReference.getKind() || NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == variableReference.getKind()) { createSimpleVarRefAssignmentStmt((BLangVariableReference) variableReference, parentBlockStmt, indexExpr, recordVarSymbol, parentIndexAccessExpr); continue; } if (NodeKind.RECORD_VARIABLE_REF == variableReference.getKind()) { BLangRecordVarRef recordVariable = (BLangRecordVarRef) variableReference; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentRecordVarRef.pos, symTable.mapType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts(recordVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (NodeKind.TUPLE_VARIABLE_REF == variableReference.getKind()) { BLangTupleVarRef tupleVariable = (BLangTupleVarRef) variableReference; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos, symTable.tupleType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts(tupleVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (NodeKind.ERROR_VARIABLE_REF == variableReference.getKind()) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(variableReference.pos, symTable.errorType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangErrorVarRef) variableReference, parentBlockStmt, recordVarSymbol, arrayAccessExpr); } } if (parentRecordVarRef.restParam != null) { DiagnosticPos pos = parentBlockStmt.pos; BMapType restParamType = (BMapType) ((BLangSimpleVarRef) parentRecordVarRef.restParam).type; BLangSimpleVarRef variableReference; if (parentIndexAccessExpr != null) { BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1", restParamType, null, new BVarSymbol(0, names.fromString("$map$1"), this.env.scope.owner.pkgID, restParamType, this.env.scope.owner)); mapVariable.expr = parentIndexAccessExpr; BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); variableDef.var = mapVariable; variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol); } else { variableReference = ASTBuilderUtil.createVariableRef(pos, ((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol); } BLangSimpleVarRef restParam = (BLangSimpleVarRef) parentRecordVarRef.restParam; List<String> keysToRemove = parentRecordVarRef.recordRefFields.stream() .map(field -> field.variableName.value) .collect(Collectors.toList()); BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos, keysToRemove, restParamType, parentBlockStmt); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol); BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, parentBlockStmt); restParamAssignment.varRef = restParam; restParamAssignment.varRef.type = restParamType; restParamAssignment.expr = varRef; } } private void createVarRefAssignmentStmts(BLangErrorVarRef parentErrorVarRef, BLangBlockStmt parentBlockStmt, BVarSymbol errorVarySymbol, BLangIndexBasedAccess parentIndexAccessExpr) { if (parentErrorVarRef.reason.getKind() != NodeKind.SIMPLE_VARIABLE_REF || names.fromIdNode(((BLangSimpleVarRef) parentErrorVarRef.reason).variableName) != Names.IGNORE) { BLangAssignment reasonAssignment = ASTBuilderUtil .createAssignmentStmt(parentBlockStmt.pos, parentBlockStmt); reasonAssignment.expr = generateErrorReasonBuiltinFunction(parentErrorVarRef.reason.pos, symTable.stringType, errorVarySymbol, parentIndexAccessExpr); reasonAssignment.expr = addConversionExprIfRequired(reasonAssignment.expr, parentErrorVarRef.reason.type); reasonAssignment.varRef = parentErrorVarRef.reason; } if (parentErrorVarRef.detail.isEmpty() && isIgnoredErrorRefRestVar(parentErrorVarRef)) { return; } BLangInvocation errorDetailBuiltinFunction = generateErrorDetailBuiltinFunction(parentErrorVarRef.pos, errorVarySymbol, parentIndexAccessExpr); BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail$" + errorCount++, symTable.detailType, errorDetailBuiltinFunction, parentErrorVarRef.pos); detailTempVarDef.type = symTable.detailType; parentBlockStmt.addStatement(detailTempVarDef); this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol); List<String> extractedKeys = new ArrayList<>(); for (BLangNamedArgsExpression detail : parentErrorVarRef.detail) { extractedKeys.add(detail.name.value); BLangVariableReference ref = (BLangVariableReference) detail.expr; BLangExpression detailEntryVar = createIndexBasedAccessExpr(ref.type, ref.pos, createStringLiteral(detail.name.pos, detail.name.value), detailTempVarDef.var.symbol, null); if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar; bLangIndexBasedAccess.originalType = symTable.pureType; } BLangAssignment detailAssignment = ASTBuilderUtil.createAssignmentStmt(ref.pos, parentBlockStmt); detailAssignment.varRef = ref; detailAssignment.expr = detailEntryVar; } if (!isIgnoredErrorRefRestVar(parentErrorVarRef)) { BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos, detailTempVarDef.var.symbol); BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVarRef.restVar.pos, extractedKeys, parentErrorVarRef.restVar.type, parentBlockStmt); BLangAssignment restAssignment = ASTBuilderUtil.createAssignmentStmt(parentErrorVarRef.restVar.pos, parentBlockStmt); restAssignment.varRef = parentErrorVarRef.restVar; restAssignment.expr = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos, filteredDetail.symbol); } BErrorType errorType = (BErrorType) parentErrorVarRef.type; if (errorType.detailType.getKind() == TypeKind.RECORD) { BRecordTypeSymbol tsymbol = (BRecordTypeSymbol) errorType.detailType.tsymbol; tsymbol.initializerFunc = createRecordInitFunc(); tsymbol.scope.define(tsymbol.initializerFunc.funcName, tsymbol.initializerFunc.symbol); } } private boolean isIgnoredErrorRefRestVar(BLangErrorVarRef parentErrorVarRef) { if (parentErrorVarRef.restVar == null) { return true; } if (parentErrorVarRef.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { return (((BLangSimpleVarRef) parentErrorVarRef.restVar).variableName.value.equals(IGNORE.value)); } return false; } @Override public void visit(BLangRetry retryNode) { DiagnosticPos pos = retryNode.retryBody.pos; BLangBlockStmt retryBlockStmt = ASTBuilderUtil.createBlockStmt(retryNode.pos); BLangSimpleVariableDef retryManagerVarDef = createRetryManagerDef(retryNode.retrySpec, retryNode.pos); retryBlockStmt.stmts.add(retryManagerVarDef); BLangBlockFunctionBody retryBody = ASTBuilderUtil.createBlockFunctionBody(pos); BType retryReturnType = BUnionType.create(null, symTable.anyType, symTable.errorType); BLangType retryLambdaReturnType = ASTBuilderUtil.createTypeNode(retryReturnType); BLangLambdaFunction retryFunc = createLambdaFunction(pos, "$retryFunc$", Collections.emptyList(), retryLambdaReturnType, retryBody); retryBody.stmts.addAll(retryNode.retryBody.stmts); BVarSymbol retryFuncVarSymbol = new BVarSymbol(0, names.fromString("$retryFunc$"), env.scope.owner.pkgID, retryFunc.type, retryFunc.function.symbol); BLangSimpleVariable retryLambdaVariable = ASTBuilderUtil.createVariable(pos, "retryFunc", retryFunc.type, retryFunc, retryFuncVarSymbol); BLangSimpleVariableDef retryLambdaVariableDef = ASTBuilderUtil.createVariableDef(pos, retryLambdaVariable); BLangSimpleVarRef retryLambdaVarRef = new BLangSimpleVarRef.BLangLocalVarRef(retryLambdaVariable.symbol); retryLambdaVarRef.type = retryFuncVarSymbol.type; retryBlockStmt.stmts.add(retryLambdaVariableDef); BLangInvocation retryLambdaInvocation = new BLangInvocation.BFunctionPointerInvocation(pos, retryLambdaVarRef, retryLambdaVariable.symbol, retryReturnType); retryLambdaInvocation.argExprs = new ArrayList<>(); BLangTrapExpr retryFunctionTrapExpression = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode(); retryFunctionTrapExpression.type = retryReturnType; retryFunctionTrapExpression.expr = retryLambdaInvocation; retryFunc.capturedClosureEnv = env; BVarSymbol retryFunctionVarSymbol = new BVarSymbol(0, new Name("$result$"), env.scope.owner.pkgID, retryReturnType, env.scope.owner); BLangSimpleVariable retryFunctionVariable = ASTBuilderUtil.createVariable(pos, "$result$", retryReturnType, retryFunctionTrapExpression, retryFunctionVarSymbol); BLangSimpleVariableDef retryFunctionVariableDef = ASTBuilderUtil.createVariableDef(pos, retryFunctionVariable); retryBlockStmt.stmts.add(retryFunctionVariableDef); BLangSimpleVarRef retryFunctionVariableRef = new BLangSimpleVarRef.BLangLocalVarRef(retryFunctionVariable.symbol); retryFunctionVariableRef.type = retryFunctionVariable.symbol.type; BLangWhile whileNode = createRetryWhileLoop(pos, retryManagerVarDef, retryFunctionTrapExpression, retryFunctionVariableRef); retryBlockStmt.stmts.add(whileNode); if (retryNode.retryBodyReturns) { BLangInvokableNode encInvokable = env.enclInvokable; BLangReturn returnNode = ASTBuilderUtil.createReturnStmt(pos, addConversionExprIfRequired(retryFunctionVariableRef, encInvokable.returnTypeNode.type)); retryBlockStmt.stmts.add(returnNode); } result = rewrite(retryBlockStmt, env); } protected BLangWhile createRetryWhileLoop(DiagnosticPos retryBlockPos, BLangSimpleVariableDef retryManagerVarDef, BLangExpression trapExpr, BLangSimpleVarRef result) { BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode(); whileNode.pos = retryBlockPos; BLangTypeTestExpr isErrorCheck = createTypeCheckExpr(retryBlockPos, result, getErrorTypeNode()); BLangSimpleVarRef retryManagerVarRef = new BLangLocalVarRef(retryManagerVarDef.var.symbol); retryManagerVarRef.type = retryManagerVarDef.var.symbol.type; BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(retryBlockPos, retryManagerVarRef, result); whileNode.expr = ASTBuilderUtil.createBinaryExpr(retryBlockPos, isErrorCheck, shouldRetryInvocation, symTable.booleanType, OperatorKind.AND, null); BLangBlockStmt whileBlockStmnt = ASTBuilderUtil.createBlockStmt(retryBlockPos); BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(retryBlockPos, result, trapExpr); whileBlockStmnt.stmts.add(assignment); whileNode.body = whileBlockStmnt; return whileNode; } protected BLangSimpleVariableDef createRetryManagerDef(BLangRetrySpec retrySpec, DiagnosticPos pos) { BTypeSymbol retryManagerTypeSymbol = (BObjectTypeSymbol) symTable.langInternalModuleSymbol .scope.lookup(names.fromString("DefaultRetryManager")).symbol; BType retryManagerType = retryManagerTypeSymbol.type; if (retrySpec.retryManagerType != null) { retryManagerType = retrySpec.retryManagerType.type; } BVarSymbol retryMangerSymbol = new BVarSymbol(0, names.fromString("$retryManager$"), env.scope.owner.pkgID, retryManagerType, this.env.scope.owner); BLangTypeInit managerInit = ASTBuilderUtil.createEmptyTypeInit(pos, retryManagerType); managerInit.initInvocation.requiredArgs = retrySpec.argExprs; BLangSimpleVariable retryManagerVariable = ASTBuilderUtil.createVariable(pos, "$retryManager$", retryManagerType, managerInit, retryMangerSymbol); return ASTBuilderUtil.createVariableDef(pos, retryManagerVariable); } BLangInvocation createRetryManagerShouldRetryInvocation(DiagnosticPos pos, BLangSimpleVarRef managerVarRef, BLangSimpleVarRef trapResultRef) { BInvokableSymbol shouldRetryFuncSymbol = getShouldRetryFunc((BVarSymbol) managerVarRef.symbol).symbol; BLangInvocation shouldRetryInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); shouldRetryInvocation.pos = pos; shouldRetryInvocation.expr = managerVarRef; shouldRetryInvocation.requiredArgs = Lists.of(trapResultRef); shouldRetryInvocation.argExprs = shouldRetryInvocation.requiredArgs; shouldRetryInvocation.symbol = shouldRetryFuncSymbol; shouldRetryInvocation.type = shouldRetryFuncSymbol.retType; shouldRetryInvocation.langLibInvocation = false; return shouldRetryInvocation; } private BAttachedFunction getShouldRetryFunc(BVarSymbol retryManagerSymbol) { BObjectTypeSymbol typeSymbol = (BObjectTypeSymbol) retryManagerSymbol.type.tsymbol; for (BAttachedFunction bAttachedFunction : typeSymbol.attachedFuncs) { if (bAttachedFunction.funcName.value.equals(RETRY_MANAGER_OBJECT_SHOULD_RETRY_FUNC)) { return bAttachedFunction; } } return null; } protected BLangTypeTestExpr createTypeCheckExpr(DiagnosticPos pos, BLangExpression expr, BLangType type) { BLangTypeTestExpr testExpr = ASTBuilderUtil.createTypeTestExpr(pos, expr, type); testExpr.type = symTable.booleanType; return testExpr; } @Override public void visit(BLangRetryTransaction retryTransaction) { BLangStatementExpression retryTransactionStmtExpr = transactionDesugar.desugar(retryTransaction, env); BLangExpressionStmt transactionExprStmt = (BLangExpressionStmt) TreeBuilder.createExpressionStatementNode(); transactionExprStmt.pos = retryTransaction.pos; transactionExprStmt.expr = retryTransactionStmtExpr; transactionExprStmt.type = symTable.nilType; result = rewrite(transactionExprStmt, env); } @Override public void visit(BLangContinue nextNode) { result = nextNode; } @Override public void visit(BLangBreak breakNode) { result = breakNode; } @Override public void visit(BLangReturn returnNode) { if (returnNode.expr != null) { returnNode.expr = rewriteExpr(returnNode.expr); } result = returnNode; } @Override public void visit(BLangPanic panicNode) { panicNode.expr = rewriteExpr(panicNode.expr); result = panicNode; } @Override public void visit(BLangXMLNSStatement xmlnsStmtNode) { xmlnsStmtNode.xmlnsDecl = rewrite(xmlnsStmtNode.xmlnsDecl, env); result = xmlnsStmtNode; } @Override public void visit(BLangXMLNS xmlnsNode) { BLangXMLNS generatedXMLNSNode; xmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI); BSymbol ownerSymbol = xmlnsNode.symbol.owner; if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) { generatedXMLNSNode = new BLangLocalXMLNS(); } else { generatedXMLNSNode = new BLangPackageXMLNS(); } generatedXMLNSNode.namespaceURI = xmlnsNode.namespaceURI; generatedXMLNSNode.prefix = xmlnsNode.prefix; generatedXMLNSNode.symbol = xmlnsNode.symbol; result = generatedXMLNSNode; } public void visit(BLangCompoundAssignment compoundAssignment) { BLangVariableReference varRef = compoundAssignment.varRef; if (compoundAssignment.varRef.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) { if (varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { varRef = ASTBuilderUtil.createVariableRef(compoundAssignment.varRef.pos, varRef.symbol); varRef.lhsVar = true; } result = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, rewriteExpr(varRef), rewriteExpr(compoundAssignment.modifiedExpr)); return; } List<BLangStatement> statements = new ArrayList<>(); List<BLangSimpleVarRef> varRefs = new ArrayList<>(); List<BType> types = new ArrayList<>(); do { BLangSimpleVariableDef tempIndexVarDef = createVarDef("$temp" + ++indexExprCount + "$", ((BLangIndexBasedAccess) varRef).indexExpr.type, ((BLangIndexBasedAccess) varRef).indexExpr, compoundAssignment.pos); BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(tempIndexVarDef.pos, tempIndexVarDef.var.symbol); statements.add(0, tempIndexVarDef); varRefs.add(0, tempVarRef); types.add(0, varRef.type); varRef = (BLangVariableReference) ((BLangIndexBasedAccess) varRef).expr; } while (varRef.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR); BLangVariableReference var = varRef; for (int ref = 0; ref < varRefs.size(); ref++) { var = ASTBuilderUtil.createIndexAccessExpr(var, varRefs.get(ref)); var.type = types.get(ref); } var.type = compoundAssignment.varRef.type; BLangExpression rhsExpression = ASTBuilderUtil.createBinaryExpr(compoundAssignment.pos, var, compoundAssignment.expr, compoundAssignment.type, compoundAssignment.opKind, null); rhsExpression.type = compoundAssignment.modifiedExpr.type; BLangAssignment assignStmt = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, var, rhsExpression); statements.add(assignStmt); BLangBlockStmt bLangBlockStmt = ASTBuilderUtil.createBlockStmt(compoundAssignment.pos, statements); result = rewrite(bLangBlockStmt, env); } @Override public void visit(BLangExpressionStmt exprStmtNode) { exprStmtNode.expr = rewriteExpr(exprStmtNode.expr); result = exprStmtNode; } @Override public void visit(BLangIf ifNode) { ifNode.expr = rewriteExpr(ifNode.expr); ifNode.body = rewrite(ifNode.body, env); ifNode.elseStmt = rewrite(ifNode.elseStmt, env); result = ifNode; } @Override public void visit(BLangMatch matchStmt) { BLangBlockStmt matchBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode(); matchBlockStmt.pos = matchStmt.pos; String matchExprVarName = GEN_VAR_PREFIX.value; BLangSimpleVariable matchExprVar = ASTBuilderUtil.createVariable(matchStmt.expr.pos, matchExprVarName, matchStmt.expr.type, matchStmt.expr, new BVarSymbol(0, names.fromString(matchExprVarName), this.env.scope.owner.pkgID, matchStmt.expr.type, this.env.scope.owner)); BLangSimpleVariableDef matchExprVarDef = ASTBuilderUtil.createVariableDef(matchBlockStmt.pos, matchExprVar); matchBlockStmt.stmts.add(matchExprVarDef); matchBlockStmt.stmts.add(generateIfElseStmt(matchStmt, matchExprVar)); rewrite(matchBlockStmt, this.env); result = matchBlockStmt; } @Override public void visit(BLangForeach foreach) { BLangBlockStmt blockNode; BVarSymbol dataSymbol = new BVarSymbol(0, names.fromString("$data$"), this.env.scope.owner.pkgID, foreach.collection.type, this.env.scope.owner); BLangSimpleVariable dataVariable = ASTBuilderUtil.createVariable(foreach.pos, "$data$", foreach.collection.type, foreach.collection, dataSymbol); BLangSimpleVariableDef dataVarDef = ASTBuilderUtil.createVariableDef(foreach.pos, dataVariable); BVarSymbol collectionSymbol = dataVariable.symbol; switch (foreach.collection.type.tag) { case TypeTags.STRING: case TypeTags.ARRAY: case TypeTags.TUPLE: case TypeTags.XML: case TypeTags.MAP: case TypeTags.TABLE: case TypeTags.STREAM: case TypeTags.RECORD: BInvokableSymbol iteratorSymbol = getLangLibIteratorInvokableSymbol(collectionSymbol); blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, true); break; case TypeTags.OBJECT: iteratorSymbol = getIterableObjectIteratorInvokableSymbol(collectionSymbol); blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, false); break; default: blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos); blockNode.stmts.add(0, dataVarDef); break; } rewrite(blockNode, this.env); result = blockNode; } private BLangBlockStmt desugarForeachWithIteratorDef(BLangForeach foreach, BLangSimpleVariableDef dataVariableDefinition, BVarSymbol collectionSymbol, BInvokableSymbol iteratorInvokableSymbol, boolean isIteratorFuncFromLangLib) { BLangSimpleVariableDef iteratorVarDef = getIteratorVariableDefinition(foreach.pos, collectionSymbol, iteratorInvokableSymbol, isIteratorFuncFromLangLib); BLangBlockStmt blockNode = desugarForeachToWhile(foreach, iteratorVarDef); blockNode.stmts.add(0, dataVariableDefinition); return blockNode; } public BInvokableSymbol getIterableObjectIteratorInvokableSymbol(BVarSymbol collectionSymbol) { BObjectTypeSymbol typeSymbol = (BObjectTypeSymbol) collectionSymbol.type.tsymbol; BAttachedFunction iteratorFunc = null; for (BAttachedFunction func : typeSymbol.attachedFuncs) { if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) { iteratorFunc = func; break; } } BAttachedFunction function = iteratorFunc; return function.symbol; } BInvokableSymbol getLangLibIteratorInvokableSymbol(BVarSymbol collectionSymbol) { return (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionSymbol.type, names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)); }
class Desugar extends BLangNodeVisitor { private static final CompilerContext.Key<Desugar> DESUGAR_KEY = new CompilerContext.Key<>(); private static final String BASE_64 = "base64"; private static final String ERROR_REASON_FUNCTION_NAME = "reason"; private static final String ERROR_DETAIL_FUNCTION_NAME = "detail"; private static final String TO_STRING_FUNCTION_NAME = "toString"; private static final String LENGTH_FUNCTION_NAME = "length"; private static final String ERROR_REASON_NULL_REFERENCE_ERROR = "NullReferenceException"; private static final String CLONE_WITH_TYPE = "cloneWithType"; private static final String SLICE_LANGLIB_METHOD = "slice"; private static final String PUSH_LANGLIB_METHOD = "push"; private static final String DESUGARED_VARARG_KEY = "$vararg$"; public static final String XML_INTERNAL_SELECT_DESCENDANTS = "selectDescendants"; public static final String XML_INTERNAL_CHILDREN = "children"; public static final String XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT = "getFilteredChildrenFlat"; public static final String XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING = "getElementNameNilLifting"; public static final String XML_INTERNAL_GET_ATTRIBUTE = "getAttribute"; public static final String XML_INTERNAL_GET_ELEMENTS = "getElements"; public static final String XML_GET_CONTENT_OF_TEXT = "getContent"; private SymbolTable symTable; private SymbolResolver symResolver; private final SymbolEnter symbolEnter; private ClosureDesugar closureDesugar; private QueryDesugar queryDesugar; private TransactionDesugar transactionDesugar; private AnnotationDesugar annotationDesugar; private Types types; private Names names; private ServiceDesugar serviceDesugar; private BLangNode result; private NodeCloner nodeCloner; private SemanticAnalyzer semanticAnalyzer; private BLangAnonymousModelHelper anonModelHelper; private ResolvedTypeBuilder typeBuilder; private boolean withinRetryBlock = false; private BLangStatementLink currentLink; public Stack<BLangLockStmt> enclLocks = new Stack<>(); private SymbolEnv env; private int lambdaFunctionCount = 0; private int transactionIndex = 0; private int recordCount = 0; private int errorCount = 0; private int annonVarCount = 0; private int initFuncIndex = 0; private int indexExprCount = 0; private int letCount = 0; private int varargCount = 0; private Stack<BLangMatch> matchStmtStack = new Stack<>(); Stack<BLangExpression> accessExprStack = new Stack<>(); private BLangMatchTypedBindingPatternClause successPattern; private BLangAssignment safeNavigationAssignment; static boolean isJvmTarget = false; public static Desugar getInstance(CompilerContext context) { Desugar desugar = context.get(DESUGAR_KEY); if (desugar == null) { desugar = new Desugar(context); } return desugar; } private Desugar(CompilerContext context) { isJvmTarget = true; context.put(DESUGAR_KEY, this); this.symTable = SymbolTable.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.symbolEnter = SymbolEnter.getInstance(context); this.closureDesugar = ClosureDesugar.getInstance(context); this.queryDesugar = QueryDesugar.getInstance(context); this.transactionDesugar = TransactionDesugar.getInstance(context); this.annotationDesugar = AnnotationDesugar.getInstance(context); this.types = Types.getInstance(context); this.names = Names.getInstance(context); this.names = Names.getInstance(context); this.serviceDesugar = ServiceDesugar.getInstance(context); this.nodeCloner = NodeCloner.getInstance(context); this.semanticAnalyzer = SemanticAnalyzer.getInstance(context); this.anonModelHelper = BLangAnonymousModelHelper.getInstance(context); this.typeBuilder = new ResolvedTypeBuilder(); } public BLangPackage perform(BLangPackage pkgNode) { annotationDesugar.initializeAnnotationMap(pkgNode); SymbolEnv env = this.symTable.pkgEnvMap.get(pkgNode.symbol); return rewrite(pkgNode, env); } private void addAttachedFunctionsToPackageLevel(BLangPackage pkgNode, SymbolEnv env) { for (BLangTypeDefinition typeDef : pkgNode.typeDefinitions) { if (typeDef.typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) { continue; } if (typeDef.symbol.tag == SymTag.OBJECT) { BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeDef.typeNode; objectTypeNode.functions.forEach(f -> { if (!pkgNode.objAttachedFunctions.contains(f.symbol)) { pkgNode.functions.add(f); pkgNode.topLevelNodes.add(f); } }); if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) { continue; } BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(objectTypeNode, env); tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction, tempGeneratedInitFunction.symbol.scope, env); this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env); objectTypeNode.generatedInitFunction = tempGeneratedInitFunction; pkgNode.functions.add(objectTypeNode.generatedInitFunction); pkgNode.topLevelNodes.add(objectTypeNode.generatedInitFunction); if (objectTypeNode.initFunction != null) { pkgNode.functions.add(objectTypeNode.initFunction); pkgNode.topLevelNodes.add(objectTypeNode.initFunction); } } else if (typeDef.symbol.tag == SymTag.RECORD) { BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeDef.typeNode; recordTypeNode.initFunction = rewrite( TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable), env); pkgNode.functions.add(recordTypeNode.initFunction); pkgNode.topLevelNodes.add(recordTypeNode.initFunction); } } } /** * This method synthesizes an initializer method for objects which is responsible for initializing the default * values given to fields. When a user creates a new instance of the object, first, this synthesized initializer is * invoked on the newly created object instance. Then, if there is a user-defined init method (i.e., the init() * method), an method call expression for this init() method is added in the return statement of the synthesized * initializer. When desugaring, the following method adds params and return type for the synthesized initializer by * looking at the params and return type of the user-defined init() method. Therefore, when desugaring object type * nodes, one should always take care to call this method **after** desugaring the init() method (if there is * supposed to be one). * * @param objectTypeNode The object type node for which the initializer is created * @param env The env for the type node * @return The generated initializer method */ private BLangFunction createGeneratedInitializerFunction(BLangObjectTypeNode objectTypeNode, SymbolEnv env) { BLangFunction generatedInitFunc = createInitFunctionForObjectType(objectTypeNode, env); if (objectTypeNode.initFunction == null) { return generatedInitFunc; } BAttachedFunction initializerFunc = ((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc; BAttachedFunction generatedInitializerFunc = ((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc; addRequiredParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc, generatedInitializerFunc); addRestParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc, generatedInitializerFunc); generatedInitFunc.returnTypeNode = objectTypeNode.initFunction.returnTypeNode; generatedInitializerFunc.symbol.retType = generatedInitFunc.returnTypeNode.type; ((BInvokableType) generatedInitFunc.symbol.type).paramTypes = initializerFunc.type.paramTypes; ((BInvokableType) generatedInitFunc.symbol.type).retType = initializerFunc.type.retType; ((BInvokableType) generatedInitFunc.symbol.type).restType = initializerFunc.type.restType; generatedInitializerFunc.type = initializerFunc.type; generatedInitFunc.desugared = false; return generatedInitFunc; } private void addRequiredParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc, BAttachedFunction generatedInitializerFunc) { if (initFunction.requiredParams.isEmpty()) { return; } for (BLangSimpleVariable requiredParameter : initFunction.requiredParams) { BLangSimpleVariable var = ASTBuilderUtil.createVariable(initFunction.pos, requiredParameter.name.getValue(), requiredParameter.type, createRequiredParamExpr(requiredParameter.expr), new BVarSymbol(0, names.fromString(requiredParameter.name.getValue()), requiredParameter.symbol.pkgID, requiredParameter.type, requiredParameter.symbol.owner)); generatedInitFunc.requiredParams.add(var); generatedInitializerFunc.symbol.params.add(var.symbol); } } private BLangExpression createRequiredParamExpr(BLangExpression expr) { if (expr == null) { return null; } if (expr.getKind() == NodeKind.LAMBDA) { BLangFunction func = ((BLangLambdaFunction) expr).function; return createLambdaFunction(func.pos, func.name.value, func.requiredParams, func.returnTypeNode, func.body); } BLangExpression expression = this.nodeCloner.clone(expr); if (expression.getKind() == NodeKind.ARROW_EXPR) { BLangIdentifier func = (BLangIdentifier) ((BLangArrowFunction) expression).functionName; ((BLangArrowFunction) expression).functionName = ASTBuilderUtil.createIdentifier(func.pos, "$" + func.getValue() + "$"); } return expression; } private void addRestParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc, BAttachedFunction generatedInitializerFunc) { if (initFunction.restParam == null) { return; } BLangSimpleVariable restParam = initFunction.restParam; generatedInitFunc.restParam = ASTBuilderUtil.createVariable(initFunction.pos, restParam.name.getValue(), restParam.type, null, new BVarSymbol(0, names.fromString(restParam.name.getValue()), restParam.symbol.pkgID, restParam.type, restParam.symbol.owner)); generatedInitializerFunc.symbol.restParam = generatedInitFunc.restParam.symbol; } /** * Create package init functions. * * @param pkgNode package node * @param env symbol environment of package */ private void createPackageInitFunctions(BLangPackage pkgNode, SymbolEnv env) { String alias = pkgNode.symbol.pkgID.toString(); pkgNode.initFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias, Names.INIT_FUNCTION_SUFFIX, symTable); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body; for (BLangXMLNS xmlns : pkgNode.xmlnsList) { initFnBody.addStatement(createNamespaceDeclrStatement(xmlns)); } pkgNode.startFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias, Names.START_FUNCTION_SUFFIX, symTable); pkgNode.stopFunction = ASTBuilderUtil.createInitFunctionWithNilReturn(pkgNode.pos, alias, Names.STOP_FUNCTION_SUFFIX); createInvokableSymbol(pkgNode.initFunction, env); createInvokableSymbol(pkgNode.startFunction, env); createInvokableSymbol(pkgNode.stopFunction, env); } private void addUserDefinedModuleInitInvocationAndReturn(BLangPackage pkgNode) { Optional<BLangFunction> userDefInitOptional = pkgNode.functions.stream() .filter(bLangFunction -> !bLangFunction.attachedFunction && bLangFunction.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value)) .findFirst(); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body; if (!userDefInitOptional.isPresent()) { addNilReturnStatement(initFnBody); return; } BLangFunction userDefInit = userDefInitOptional.get(); BLangInvocation userDefInitInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); userDefInitInvocation.pos = pkgNode.initFunction.pos; BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode(); name.setLiteral(false); name.setValue(userDefInit.name.value); userDefInitInvocation.name = name; userDefInitInvocation.symbol = userDefInit.symbol; BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode(); pkgAlias.setLiteral(false); pkgAlias.setValue(pkgNode.packageID.name.value); userDefInitInvocation.pkgAlias = pkgAlias; userDefInitInvocation.type = userDefInit.returnTypeNode.type; userDefInitInvocation.requiredArgs = Collections.emptyList(); BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode(); returnStmt.pos = pkgNode.initFunction.pos; returnStmt.expr = userDefInitInvocation; initFnBody.stmts.add(returnStmt); } /** * Create invokable symbol for function. * * @param bLangFunction function node * @param env Symbol environment */ private void createInvokableSymbol(BLangFunction bLangFunction, SymbolEnv env) { BType returnType = bLangFunction.returnTypeNode.type == null ? symResolver.resolveTypeNode(bLangFunction.returnTypeNode, env) : bLangFunction.returnTypeNode.type; BInvokableType invokableType = new BInvokableType(new ArrayList<>(), getRestType(bLangFunction), returnType, null); BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(bLangFunction.flagSet), new Name(bLangFunction.name.value), env.enclPkg.packageID, invokableType, env.enclPkg.symbol, true); functionSymbol.retType = returnType; for (BLangVariable param : bLangFunction.requiredParams) { functionSymbol.params.add(param.symbol); } functionSymbol.scope = new Scope(functionSymbol); bLangFunction.symbol = functionSymbol; } /** * Add nil return statement. * * @param bLangBlockStmt block statement node */ private void addNilReturnStatement(BlockNode bLangBlockStmt) { BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(((BLangNode) bLangBlockStmt).pos, symTable.nilType); bLangBlockStmt.addStatement(returnStmt); } /** * Create namespace declaration statement for XMNLNS. * * @param xmlns XMLNS node * @return XMLNS statement */ private BLangXMLNSStatement createNamespaceDeclrStatement(BLangXMLNS xmlns) { BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode(); xmlnsStmt.xmlnsDecl = xmlns; xmlnsStmt.pos = xmlns.pos; return xmlnsStmt; } @Override public void visit(BLangPackage pkgNode) { if (pkgNode.completedPhases.contains(CompilerPhase.DESUGAR)) { result = pkgNode; return; } createPackageInitFunctions(pkgNode, env); addAttachedFunctionsToPackageLevel(pkgNode, env); pkgNode.constants.stream() .filter(constant -> constant.expr.getKind() == NodeKind.LITERAL || constant.expr.getKind() == NodeKind.NUMERIC_LITERAL) .forEach(constant -> pkgNode.typeDefinitions.add(constant.associatedTypeDefinition)); BLangBlockStmt serviceAttachments = serviceDesugar.rewriteServiceVariables(pkgNode.services, env); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body; for (BLangConstant constant : pkgNode.constants) { if (constant.symbol.type.tag == TypeTags.MAP) { BLangSimpleVarRef constVarRef = ASTBuilderUtil.createVariableRef(constant.pos, constant.symbol); constant.expr = rewrite(constant.expr, SymbolEnv.createTypeEnv(constant.typeNode, pkgNode.initFunction.symbol.scope, env)); BLangInvocation frozenConstValExpr = createLangLibInvocationNode( "cloneReadOnly", constant.expr, new ArrayList<>(), constant.expr.type, constant.pos); BLangAssignment constInit = ASTBuilderUtil.createAssignmentStmt(constant.pos, constVarRef, frozenConstValExpr); initFnBody.stmts.add(constInit); } } pkgNode.globalVars.forEach(globalVar -> { BLangAssignment assignment = createAssignmentStmt(globalVar); if (assignment.expr != null) { initFnBody.stmts.add(assignment); } }); pkgNode.services.forEach(service -> serviceDesugar.engageCustomServiceDesugar(service, env)); annotationDesugar.rewritePackageAnnotations(pkgNode, env); addUserDefinedModuleInitInvocationAndReturn(pkgNode); pkgNode.typeDefinitions.sort(Comparator.comparing(t -> t.precedence)); pkgNode.typeDefinitions = rewrite(pkgNode.typeDefinitions, env); pkgNode.xmlnsList = rewrite(pkgNode.xmlnsList, env); pkgNode.constants = rewrite(pkgNode.constants, env); pkgNode.globalVars = rewrite(pkgNode.globalVars, env); pkgNode.functions = rewrite(pkgNode.functions, env); serviceDesugar.rewriteListeners(pkgNode.globalVars, env, pkgNode.startFunction, pkgNode.stopFunction); ASTBuilderUtil.appendStatements(serviceAttachments, (BLangBlockFunctionBody) pkgNode.initFunction.body); addNilReturnStatement((BLangBlockFunctionBody) pkgNode.startFunction.body); addNilReturnStatement((BLangBlockFunctionBody) pkgNode.stopFunction.body); pkgNode.initFunction = splitInitFunction(pkgNode, env); pkgNode.initFunction = rewrite(pkgNode.initFunction, env); pkgNode.startFunction = rewrite(pkgNode.startFunction, env); pkgNode.stopFunction = rewrite(pkgNode.stopFunction, env); closureDesugar.visit(pkgNode); for (BLangTestablePackage testablePkg : pkgNode.getTestablePkgs()) { rewrite(testablePkg, this.symTable.pkgEnvMap.get(testablePkg.symbol)); } pkgNode.completedPhases.add(CompilerPhase.DESUGAR); initFuncIndex = 0; result = pkgNode; } @Override public void visit(BLangImportPackage importPkgNode) { BPackageSymbol pkgSymbol = importPkgNode.symbol; SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol); rewrite(pkgEnv.node, pkgEnv); result = importPkgNode; } @Override public void visit(BLangTypeDefinition typeDef) { if (typeDef.typeNode.getKind() == NodeKind.OBJECT_TYPE || typeDef.typeNode.getKind() == NodeKind.RECORD_TYPE) { typeDef.typeNode = rewrite(typeDef.typeNode, env); } typeDef.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = typeDef; } @Override public void visit(BLangObjectTypeNode objectTypeNode) { objectTypeNode.fields.addAll(objectTypeNode.referencedFields); if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) { result = objectTypeNode; return; } for (BLangSimpleVariable bLangSimpleVariable : objectTypeNode.fields) { bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env); } Map<BSymbol, BLangStatement> initFuncStmts = objectTypeNode.generatedInitFunction.initFunctionStmts; for (BLangSimpleVariable field : objectTypeNode.fields) { if (!initFuncStmts.containsKey(field.symbol) && field.expr != null) { initFuncStmts.put(field.symbol, createStructFieldUpdate(objectTypeNode.generatedInitFunction, field, objectTypeNode.generatedInitFunction.receiver.symbol)); } } BLangStatement[] initStmts = initFuncStmts.values().toArray(new BLangStatement[0]); BLangBlockFunctionBody generatedInitFnBody = (BLangBlockFunctionBody) objectTypeNode.generatedInitFunction.body; int i; for (i = 0; i < initStmts.length; i++) { generatedInitFnBody.stmts.add(i, initStmts[i]); } if (objectTypeNode.initFunction != null) { ((BLangReturn) generatedInitFnBody.stmts.get(i)).expr = createUserDefinedInitInvocation(objectTypeNode); } for (BLangFunction fn : objectTypeNode.functions) { rewrite(fn, this.env); } rewrite(objectTypeNode.generatedInitFunction, this.env); rewrite(objectTypeNode.initFunction, this.env); result = objectTypeNode; } private BLangInvocation createUserDefinedInitInvocation(BLangObjectTypeNode objectTypeNode) { ArrayList<BLangExpression> paramRefs = new ArrayList<>(); for (BLangSimpleVariable var : objectTypeNode.generatedInitFunction.requiredParams) { paramRefs.add(ASTBuilderUtil.createVariableRef(objectTypeNode.pos, var.symbol)); } BLangInvocation invocation = ASTBuilderUtil.createInvocationExprMethod(objectTypeNode.pos, ((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc.symbol, paramRefs, Collections.emptyList(), symResolver); if (objectTypeNode.generatedInitFunction.restParam != null) { BLangSimpleVarRef restVarRef = ASTBuilderUtil.createVariableRef(objectTypeNode.pos, objectTypeNode.generatedInitFunction.restParam.symbol); BLangRestArgsExpression bLangRestArgsExpression = new BLangRestArgsExpression(); bLangRestArgsExpression.expr = restVarRef; bLangRestArgsExpression.pos = objectTypeNode.generatedInitFunction.pos; bLangRestArgsExpression.type = objectTypeNode.generatedInitFunction.restParam.type; bLangRestArgsExpression.expectedType = bLangRestArgsExpression.type; invocation.restArgs.add(bLangRestArgsExpression); } invocation.exprSymbol = ((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc.symbol.receiverSymbol; return rewriteExpr(invocation); } @Override public void visit(BLangRecordTypeNode recordTypeNode) { recordTypeNode.fields.addAll(recordTypeNode.referencedFields); for (BLangSimpleVariable bLangSimpleVariable : recordTypeNode.fields) { bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env); } if (recordTypeNode.initFunction == null) { recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); env.enclPkg.addFunction(recordTypeNode.initFunction); env.enclPkg.topLevelNodes.add(recordTypeNode.initFunction); } for (BLangSimpleVariable field : recordTypeNode.fields) { if (!recordTypeNode.initFunction.initFunctionStmts.containsKey(field.symbol) && !Symbols.isOptional(field.symbol) && field.expr != null) { recordTypeNode.initFunction.initFunctionStmts .put(field.symbol, createStructFieldUpdate(recordTypeNode.initFunction, field, recordTypeNode.initFunction.receiver.symbol)); } } BLangStatement[] initStmts = recordTypeNode.initFunction.initFunctionStmts .values().toArray(new BLangStatement[0]); BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) recordTypeNode.initFunction.body; for (int i = 0; i < recordTypeNode.initFunction.initFunctionStmts.size(); i++) { initFnBody.stmts.add(i, initStmts[i]); } if (recordTypeNode.isAnonymous && recordTypeNode.isLocal) { BLangUserDefinedType userDefinedType = desugarLocalAnonRecordTypeNode(recordTypeNode); TypeDefBuilderHelper.addTypeDefinition(recordTypeNode.type, recordTypeNode.type.tsymbol, recordTypeNode, env); recordTypeNode.desugared = true; result = userDefinedType; return; } result = recordTypeNode; } private BLangUserDefinedType desugarLocalAnonRecordTypeNode(BLangRecordTypeNode recordTypeNode) { return ASTBuilderUtil.createUserDefineTypeNode(recordTypeNode.symbol.name.value, recordTypeNode.type, recordTypeNode.pos); } @Override public void visit(BLangArrayType arrayType) { arrayType.elemtype = rewrite(arrayType.elemtype, env); result = arrayType; } @Override public void visit(BLangConstrainedType constrainedType) { constrainedType.constraint = rewrite(constrainedType.constraint, env); result = constrainedType; } @Override public void visit(BLangStreamType streamType) { streamType.constraint = rewrite(streamType.constraint, env); streamType.error = rewrite(streamType.error, env); result = streamType; } @Override public void visit(BLangTableTypeNode tableTypeNode) { tableTypeNode.constraint = rewrite(tableTypeNode.constraint, env); tableTypeNode.tableKeyTypeConstraint = rewrite(tableTypeNode.tableKeyTypeConstraint, env); result = tableTypeNode; } @Override public void visit(BLangTableKeyTypeConstraint keyTypeConstraint) { keyTypeConstraint.keyType = rewrite(keyTypeConstraint.keyType, env); result = keyTypeConstraint; } @Override public void visit(BLangValueType valueType) { result = valueType; } @Override public void visit(BLangUserDefinedType userDefinedType) { result = userDefinedType; } @Override public void visit(BLangUnionTypeNode unionTypeNode) { List<BLangType> rewrittenMembers = new ArrayList<>(); unionTypeNode.memberTypeNodes.forEach(typeNode -> rewrittenMembers.add(rewrite(typeNode, env))); unionTypeNode.memberTypeNodes = rewrittenMembers; result = unionTypeNode; } @Override public void visit(BLangIntersectionTypeNode intersectionTypeNode) { List<BLangType> rewrittenConstituents = new ArrayList<>(); for (BLangType constituentTypeNode : intersectionTypeNode.constituentTypeNodes) { rewrittenConstituents.add(rewrite(constituentTypeNode, env)); } intersectionTypeNode.constituentTypeNodes = rewrittenConstituents; result = intersectionTypeNode; } @Override public void visit(BLangErrorType errorType) { errorType.detailType = rewrite(errorType.detailType, env); result = errorType; } @Override public void visit(BLangFunctionTypeNode functionTypeNode) { functionTypeNode.params.forEach(param -> rewrite(param.typeNode, env)); functionTypeNode.returnTypeNode = rewrite(functionTypeNode.returnTypeNode, env); result = functionTypeNode; } @Override public void visit(BLangBuiltInRefTypeNode refTypeNode) { result = refTypeNode; } @Override public void visit(BLangTupleTypeNode tupleTypeNode) { List<BLangType> rewrittenMembers = new ArrayList<>(); tupleTypeNode.memberTypeNodes.forEach(member -> rewrittenMembers.add(rewrite(member, env))); tupleTypeNode.memberTypeNodes = rewrittenMembers; tupleTypeNode.restParamType = rewrite(tupleTypeNode.restParamType, env); result = tupleTypeNode; } @Override public void visit(BLangBlockFunctionBody body) { SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env); body.stmts = rewriteStmt(body.stmts, bodyEnv); result = body; } @Override public void visit(BLangExprFunctionBody exprBody) { BLangBlockFunctionBody body = ASTBuilderUtil.createBlockFunctionBody(exprBody.pos, new ArrayList<>()); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(exprBody.pos, body); returnStmt.expr = rewriteExpr(exprBody.expr); result = body; } @Override public void visit(BLangExternalFunctionBody body) { for (BLangAnnotationAttachment attachment : body.annAttachments) { rewrite(attachment, env); } result = body; } @Override public void visit(BLangFunction funcNode) { SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env); if (!funcNode.interfaceFunction) { addReturnIfNotPresent(funcNode); } funcNode.originalFuncSymbol = funcNode.symbol; funcNode.symbol = ASTBuilderUtil.duplicateInvokableSymbol(funcNode.symbol); funcNode.requiredParams = rewrite(funcNode.requiredParams, funcEnv); funcNode.restParam = rewrite(funcNode.restParam, funcEnv); funcNode.workers = rewrite(funcNode.workers, funcEnv); if (funcNode.returnTypeNode != null && funcNode.returnTypeNode.getKind() != null) { funcNode.returnTypeNode = rewrite(funcNode.returnTypeNode, funcEnv); } funcNode.body = rewrite(funcNode.body, funcEnv); funcNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); if (funcNode.returnTypeNode != null) { funcNode.returnTypeAnnAttachments.forEach(attachment -> rewrite(attachment, env)); } result = funcNode; } @Override public void visit(BLangResource resourceNode) { } public void visit(BLangAnnotation annotationNode) { annotationNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); } public void visit(BLangAnnotationAttachment annAttachmentNode) { annAttachmentNode.expr = rewrite(annAttachmentNode.expr, env); result = annAttachmentNode; } @Override public void visit(BLangSimpleVariable varNode) { if (((varNode.symbol.owner.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE) && (varNode.symbol.owner.tag & SymTag.LET) != SymTag.LET) { varNode.expr = null; result = varNode; return; } if (varNode.typeNode != null && varNode.typeNode.getKind() != null) { varNode.typeNode = rewrite(varNode.typeNode, env); } BLangExpression bLangExpression = rewriteExpr(varNode.expr); if (bLangExpression != null) { bLangExpression = addConversionExprIfRequired(bLangExpression, varNode.type); } varNode.expr = bLangExpression; varNode.annAttachments.forEach(attachment -> rewrite(attachment, env)); result = varNode; } @Override public void visit(BLangLetExpression letExpression) { SymbolEnv prevEnv = this.env; this.env = letExpression.env; BLangExpression expr = letExpression.expr; BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(letExpression.pos); for (BLangLetVariable letVariable : letExpression.letVarDeclarations) { BLangNode node = rewrite((BLangNode) letVariable.definitionNode, env); if (node.getKind() == NodeKind.BLOCK) { blockStmt.stmts.addAll(((BLangBlockStmt) node).stmts); } else { blockStmt.addStatement((BLangSimpleVariableDef) node); } } BLangSimpleVariableDef tempVarDef = createVarDef(String.format("$let_var_%d_$", letCount++), expr.type, expr, expr.pos); BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempVarDef.var.symbol); blockStmt.addStatement(tempVarDef); BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef); stmtExpr.type = expr.type; result = rewrite(stmtExpr, env); this.env = prevEnv; } @Override public void visit(BLangTupleVariable varNode) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varNode.pos); String name = "$tuple$"; final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(varNode.pos, name, symTable.arrayAllType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, symTable.arrayAllType, this.env.scope.owner)); tuple.expr = varNode.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varNode.pos, blockStmt); variableDef.var = tuple; createVarDefStmts(varNode, blockStmt, tuple.symbol, null); createRestFieldVarDefStmts(varNode, blockStmt, tuple.symbol); result = rewrite(blockStmt, env); } @Override public void visit(BLangRecordVariable varNode) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varNode.pos); final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(varNode.pos, "$map$0", symTable.mapAllType, null, new BVarSymbol(0, names.fromString("$map$0"), this.env.scope.owner.pkgID, symTable.mapAllType, this.env.scope.owner)); mapVariable.expr = varNode.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varNode.pos, blockStmt); variableDef.var = mapVariable; createVarDefStmts(varNode, blockStmt, mapVariable.symbol, null); result = rewrite(blockStmt, env); } @Override public void visit(BLangErrorVariable varNode) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varNode.pos); BVarSymbol errorVarSymbol = new BVarSymbol(0, names.fromString("$error$"), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner); final BLangSimpleVariable error = ASTBuilderUtil.createVariable(varNode.pos, errorVarSymbol.name.value, symTable.errorType, null, errorVarSymbol); error.expr = varNode.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varNode.pos, blockStmt); variableDef.var = error; createVarDefStmts(varNode, blockStmt, error.symbol, null); result = rewrite(blockStmt, env); } @Override public void visit(BLangBlockStmt block) { SymbolEnv blockEnv = SymbolEnv.createBlockEnv(block, env); block.stmts = rewriteStmt(block.stmts, blockEnv); result = block; } @Override public void visit(BLangSimpleVariableDef varDefNode) { varDefNode.var = rewrite(varDefNode.var, env); result = varDefNode; } @Override public void visit(BLangTupleVariableDef varDefNode) { result = rewrite(varDefNode.var, env); } private void createRestFieldVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt blockStmt, BVarSymbol tupleVarSymbol) { final BLangSimpleVariable arrayVar = (BLangSimpleVariable) parentTupleVariable.restVariable; boolean isTupleType = parentTupleVariable.type.tag == TypeTags.TUPLE; DiagnosticPos pos = blockStmt.pos; if (arrayVar != null) { BLangArrayLiteral arrayExpr = createArrayLiteralExprNode(); arrayExpr.type = arrayVar.type; arrayVar.expr = arrayExpr; BLangSimpleVariableDef arrayVarDef = ASTBuilderUtil.createVariableDefStmt(arrayVar.pos, blockStmt); arrayVarDef.var = arrayVar; BLangExpression tupleExpr = parentTupleVariable.expr; BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, arrayVar.symbol); BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); startIndexLiteral.value = (long) (isTupleType ? ((BTupleType) parentTupleVariable.type).tupleTypes.size() : parentTupleVariable.memberVariables.size()); startIndexLiteral.type = symTable.intType; BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr); BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral, getModifiedIntRangeEndExpr(lengthInvocation)); BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode(); foreach.pos = pos; foreach.collection = intRangeInvocation; types.setForeachTypedBindingPatternType(foreach); final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType); foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name), this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner); BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol); foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable); foreach.isDeclaredWithVar = true; BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos); BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(arrayVarRef, createLengthInvocation(pos, arrayVarRef)); indexAccessExpr.type = (isTupleType ? ((BTupleType) parentTupleVariable.type).restType : symTable.anyType); createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null); foreach.body = foreachBody; blockStmt.addStatement(foreach); } } @Override public void visit(BLangRecordVariableDef varDefNode) { result = rewrite(varDefNode.var, env); } @Override public void visit(BLangErrorVariableDef varDefNode) { result = rewrite(varDefNode.errorVariable, env); } /** * This method iterate through each member of the tupleVar and create the relevant var def statements. This method * does the check for node kind of each member and call the related var def creation method. * * Example: * ((string, float) int)) ((a, b), c)) = (tuple) * * (a, b) is again a tuple, so it is a recursive var def creation. * * c is a simple var, so a simple var def will be created. * */ private void createVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt parentBlockStmt, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { final List<BLangVariable> memberVars = parentTupleVariable.memberVariables; for (int index = 0; index < memberVars.size(); index++) { BLangVariable variable = memberVars.get(index); BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.intType, (long) index); if (NodeKind.VARIABLE == variable.getKind()) { createSimpleVarDefStmt((BLangSimpleVariable) variable, parentBlockStmt, indexExpr, tupleVarSymbol, parentIndexAccessExpr); continue; } if (variable.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariable tupleVariable = (BLangTupleVariable) variable; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos, new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangTupleVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); continue; } if (variable.getKind() == NodeKind.RECORD_VARIABLE) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangRecordVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); continue; } if (variable.getKind() == NodeKind.ERROR_VARIABLE) { BType accessedElemType = symTable.errorType; if (tupleVarSymbol.type.tag == TypeTags.ARRAY) { BArrayType arrayType = (BArrayType) tupleVarSymbol.type; accessedElemType = arrayType.eType; } BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, accessedElemType, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); } } } /** * Overloaded method to handle record variables. * This method iterate through each member of the recordVar and create the relevant var def statements. This method * does the check for node kind of each member and call the related var def creation method. * * Example: * type Foo record { * string name; * (int, string) age; * Address address; * }; * * Foo {name: a, age: (b, c), address: d} = {record literal} * * a is a simple var, so a simple var def will be created. * * (b, c) is a tuple, so it is a recursive var def creation. * * d is a record, so it is a recursive var def creation. * */ private void createVarDefStmts(BLangRecordVariable parentRecordVariable, BLangBlockStmt parentBlockStmt, BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { List<BLangRecordVariableKeyValue> variableList = parentRecordVariable.variableList; for (BLangRecordVariableKeyValue recordFieldKeyValue : variableList) { BLangVariable variable = recordFieldKeyValue.valueBindingPattern; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.stringType, recordFieldKeyValue.key.value); if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.VARIABLE) { createSimpleVarDefStmt((BLangSimpleVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt, indexExpr, recordVarSymbol, parentIndexAccessExpr); continue; } if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariable tupleVariable = (BLangTupleVariable) recordFieldKeyValue.valueBindingPattern; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos, new BArrayType(symTable.anyType), recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangTupleVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentRecordVariable.pos, symTable.mapType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangRecordVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (variable.getKind() == NodeKind.ERROR_VARIABLE) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentRecordVariable.pos, variable.type, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, recordVarSymbol, arrayAccessExpr); } } if (parentRecordVariable.restParam != null) { DiagnosticPos pos = parentBlockStmt.pos; BMapType restParamType = (BMapType) ((BLangVariable) parentRecordVariable.restParam).type; BLangSimpleVarRef variableReference; if (parentIndexAccessExpr != null) { BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1", parentIndexAccessExpr.type, null, new BVarSymbol(0, names.fromString("$map$1"), this.env.scope.owner.pkgID, parentIndexAccessExpr.type, this.env.scope.owner)); mapVariable.expr = parentIndexAccessExpr; BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); variableDef.var = mapVariable; variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol); } else { variableReference = ASTBuilderUtil.createVariableRef(pos, ((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol); } List<String> keysToRemove = parentRecordVariable.variableList.stream() .map(var -> var.getKey().getValue()) .collect(Collectors.toList()); BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos, keysToRemove, restParamType, parentBlockStmt); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol); BLangSimpleVariable restParam = (BLangSimpleVariable) parentRecordVariable.restParam; BLangSimpleVariableDef restParamVarDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); restParamVarDef.var = restParam; restParamVarDef.var.type = restParamType; restParam.expr = varRef; } } /** * This method will create the relevant var def statements for reason and details of the error variable. * The var def statements are created by creating the reason() and detail() builtin methods. */ private void createVarDefStmts(BLangErrorVariable parentErrorVariable, BLangBlockStmt parentBlockStmt, BVarSymbol errorVariableSymbol, BLangIndexBasedAccess parentIndexBasedAccess) { BVarSymbol convertedErrorVarSymbol; if (parentIndexBasedAccess != null) { BType prevType = parentIndexBasedAccess.type; parentIndexBasedAccess.type = symTable.anyType; BLangSimpleVariableDef errorVarDef = createVarDef("$error$" + errorCount++, symTable.errorType, addConversionExprIfRequired(parentIndexBasedAccess, symTable.errorType), parentErrorVariable.pos); parentIndexBasedAccess.type = prevType; parentBlockStmt.addStatement(errorVarDef); convertedErrorVarSymbol = errorVarDef.var.symbol; } else { convertedErrorVarSymbol = errorVariableSymbol; } parentErrorVariable.reason.expr = generateErrorReasonBuiltinFunction(parentErrorVariable.reason.pos, parentErrorVariable.reason.type, convertedErrorVarSymbol, null); if (names.fromIdNode((parentErrorVariable.reason).name) == Names.IGNORE) { parentErrorVariable.reason = null; } else { BLangSimpleVariableDef reasonVariableDef = ASTBuilderUtil.createVariableDefStmt(parentErrorVariable.reason.pos, parentBlockStmt); reasonVariableDef.var = parentErrorVariable.reason; } if ((parentErrorVariable.detail == null || parentErrorVariable.detail.isEmpty()) && parentErrorVariable.restDetail == null) { return; } BType detailMapType; BType detailType = ((BErrorType) parentErrorVariable.type).detailType; if (detailType.tag == TypeTags.MAP) { detailMapType = detailType; } else { detailMapType = symTable.detailType; } parentErrorVariable.detailExpr = generateErrorDetailBuiltinFunction( parentErrorVariable.pos, convertedErrorVarSymbol, null); BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail", parentErrorVariable.detailExpr.type, parentErrorVariable.detailExpr, parentErrorVariable.pos); detailTempVarDef.type = parentErrorVariable.detailExpr.type; parentBlockStmt.addStatement(detailTempVarDef); this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol); for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : parentErrorVariable.detail) { BLangExpression detailEntryVar = createErrorDetailVar(detailEntry, detailTempVarDef.var.symbol); createAndAddBoundVariableDef(parentBlockStmt, detailEntry, detailEntryVar); } if (parentErrorVariable.restDetail != null && !parentErrorVariable.restDetail.name.value.equals(IGNORE.value)) { DiagnosticPos pos = parentErrorVariable.restDetail.pos; BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef( pos, detailTempVarDef.var.symbol); List<String> keysToRemove = parentErrorVariable.detail.stream() .map(detail -> detail.key.getValue()) .collect(Collectors.toList()); BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVariable.pos, keysToRemove, parentErrorVariable.restDetail.type, parentBlockStmt); BLangSimpleVariableDef variableDefStmt = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); variableDefStmt.var = ASTBuilderUtil.createVariable(pos, parentErrorVariable.restDetail.name.value, filteredDetail.type, ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol), parentErrorVariable.restDetail.symbol); BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos, ASTBuilderUtil.createVariableRef(pos, parentErrorVariable.restDetail.symbol), ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol)); parentBlockStmt.addStatement(assignmentStmt); } rewrite(parentBlockStmt, env); } private BLangSimpleVariableDef forceCastIfApplicable(BVarSymbol errorVarySymbol, DiagnosticPos pos, BType targetType) { BVarSymbol errorVarSym = new BVarSymbol(Flags.PUBLIC, names.fromString("$cast$temp$"), this.env.enclPkg.packageID, targetType, this.env.scope.owner); BLangSimpleVarRef variableRef = ASTBuilderUtil.createVariableRef(pos, errorVarySymbol); BLangExpression expr; if (targetType.tag == TypeTags.RECORD) { expr = variableRef; } else { expr = addConversionExprIfRequired(variableRef, targetType); } BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(pos, errorVarSym.name.value, targetType, expr, errorVarSym); return ASTBuilderUtil.createVariableDef(pos, errorVar); } private BLangSimpleVariable generateRestFilter(BLangSimpleVarRef mapVarRef, DiagnosticPos pos, List<String> keysToRemove, BType targetType, BLangBlockStmt parentBlockStmt) { BLangExpression typeCastExpr = addConversionExprIfRequired(mapVarRef, targetType); int restNum = annonVarCount++; String name = "$map$ref$" + restNum; BLangSimpleVariable mapVariable = defVariable(pos, targetType, parentBlockStmt, typeCastExpr, name); BLangInvocation entriesInvocation = generateMapEntriesInvocation( ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol), typeCastExpr.type); String entriesVarName = "$map$ref$entries$" + restNum; BType entriesType = new BMapType(TypeTags.MAP, new BTupleType(Arrays.asList(symTable.stringType, ((BMapType) targetType).constraint)), null); BLangSimpleVariable entriesInvocationVar = defVariable(pos, entriesType, parentBlockStmt, addConversionExprIfRequired(entriesInvocation, entriesType), entriesVarName); BLangLambdaFunction filter = createFuncToFilterOutRestParam(keysToRemove, pos); BLangInvocation filterInvocation = generateMapFilterInvocation(pos, entriesInvocationVar, filter); String filteredEntriesName = "$filtered$detail$entries" + restNum; BLangSimpleVariable filteredVar = defVariable(pos, entriesType, parentBlockStmt, filterInvocation, filteredEntriesName); String filteredVarName = "$detail$filtered" + restNum; BLangLambdaFunction backToMapLambda = generateEntriesToMapLambda(pos); BLangInvocation mapInvocation = generateMapMapInvocation(pos, filteredVar, backToMapLambda); BLangSimpleVariable filtered = defVariable(pos, targetType, parentBlockStmt, mapInvocation, filteredVarName); String filteredRestVarName = "$restVar$" + restNum; BLangInvocation constructed = generateCloneWithTypeInvocation(pos, targetType, filtered.symbol); return defVariable(pos, targetType, parentBlockStmt, addConversionExprIfRequired(constructed, targetType), filteredRestVarName); } private BLangInvocation generateMapEntriesInvocation(BLangExpression expr, BType type) { BLangInvocation invocationNode = createInvocationNode("entries", new ArrayList<>(), type); invocationNode.expr = expr; invocationNode.symbol = symResolver.lookupLangLibMethod(type, names.fromString("entries")); invocationNode.requiredArgs = Lists.of(expr); invocationNode.type = invocationNode.symbol.type.getReturnType(); invocationNode.langLibInvocation = true; return invocationNode; } private BLangInvocation generateMapMapInvocation(DiagnosticPos pos, BLangSimpleVariable filteredVar, BLangLambdaFunction backToMapLambda) { BLangInvocation invocationNode = createInvocationNode("map", new ArrayList<>(), filteredVar.type); invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol); invocationNode.symbol = symResolver.lookupLangLibMethod(filteredVar.type, names.fromString("map")); invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol)); invocationNode.type = invocationNode.symbol.type.getReturnType(); invocationNode.requiredArgs.add(backToMapLambda); return invocationNode; } private BLangLambdaFunction generateEntriesToMapLambda(DiagnosticPos pos) { String anonfuncName = "$anonGetValFunc$" + lambdaFunctionCount++; BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName); BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID, getStringAnyTupleType(), this.env.scope.owner); BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(), null, keyValSymbol); function.requiredParams.add(inputParameter); BLangValueType anyType = new BLangValueType(); anyType.typeKind = TypeKind.ANY; anyType.type = symTable.anyType; function.returnTypeNode = anyType; BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>()); function.body = functionBlock; BLangIndexBasedAccess indexBasesAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol, ASTBuilderUtil .createLiteral(pos, symTable.intType, (long) 1)); BLangSimpleVariableDef tupSecondElem = createVarDef("val", indexBasesAccessExpr.type, indexBasesAccessExpr, pos); functionBlock.addStatement(tupSecondElem); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock); returnStmt.expr = ASTBuilderUtil.createVariableRef(pos, tupSecondElem.var.symbol); BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet), new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true); functionSymbol.retType = function.returnTypeNode.type; functionSymbol.params = function.requiredParams.stream() .map(param -> param.symbol) .collect(Collectors.toList()); functionSymbol.scope = env.scope; functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()), symTable.anyType, null); function.symbol = functionSymbol; rewrite(function, env); env.enclPkg.addFunction(function); return createLambdaFunction(function, functionSymbol); } private BLangInvocation generateMapFilterInvocation(DiagnosticPos pos, BLangSimpleVariable entriesInvocationVar, BLangLambdaFunction filter) { BLangInvocation invocationNode = createInvocationNode("filter", new ArrayList<>(), entriesInvocationVar.type); invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol); invocationNode.symbol = symResolver.lookupLangLibMethod(entriesInvocationVar.type, names.fromString("filter")); invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol)); invocationNode.type = invocationNode.symbol.type.getReturnType(); invocationNode.requiredArgs.add(filter); return invocationNode; } private BLangSimpleVariable defVariable(DiagnosticPos pos, BType varType, BLangBlockStmt parentBlockStmt, BLangExpression expression, String name) { Name varName = names.fromString(name); BLangSimpleVariable detailMap = ASTBuilderUtil.createVariable(pos, name, varType, expression, new BVarSymbol(Flags.PUBLIC, varName, env.enclPkg.packageID, varType, env.scope.owner)); BLangSimpleVariableDef constructedMap = ASTBuilderUtil.createVariableDef(pos, detailMap); constructedMap.type = varType; parentBlockStmt.addStatement(constructedMap); env.scope.define(varName, detailMap.symbol); return detailMap; } private void createAndAddBoundVariableDef(BLangBlockStmt parentBlockStmt, BLangErrorVariable.BLangErrorDetailEntry detailEntry, BLangExpression detailEntryVar) { if (detailEntry.valueBindingPattern.getKind() == NodeKind.VARIABLE) { BLangSimpleVariableDef errorDetailVar = createVarDef( ((BLangSimpleVariable) detailEntry.valueBindingPattern).name.value, detailEntry.valueBindingPattern.type, detailEntryVar, detailEntry.valueBindingPattern.pos); parentBlockStmt.addStatement(errorDetailVar); } else if (detailEntry.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) { BLangRecordVariableDef recordVariableDef = ASTBuilderUtil.createRecordVariableDef( detailEntry.valueBindingPattern.pos, (BLangRecordVariable) detailEntry.valueBindingPattern); recordVariableDef.var.expr = detailEntryVar; recordVariableDef.type = symTable.recordType; parentBlockStmt.addStatement(recordVariableDef); } else if (detailEntry.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) { BLangTupleVariableDef tupleVariableDef = ASTBuilderUtil.createTupleVariableDef( detailEntry.valueBindingPattern.pos, (BLangTupleVariable) detailEntry.valueBindingPattern); parentBlockStmt.addStatement(tupleVariableDef); } } private BLangExpression createErrorDetailVar(BLangErrorVariable.BLangErrorDetailEntry detailEntry, BVarSymbol tempDetailVarSymbol) { BLangExpression detailEntryVar = createIndexBasedAccessExpr( detailEntry.valueBindingPattern.type, detailEntry.valueBindingPattern.pos, createStringLiteral(detailEntry.key.pos, detailEntry.key.value), tempDetailVarSymbol, null); if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar; bLangIndexBasedAccess.originalType = symTable.pureType; } return detailEntryVar; } private BLangExpression constructStringTemplateConcatExpression(List<BLangExpression> exprs) { BLangExpression concatExpr = null; BLangExpression currentExpr; for (BLangExpression expr : exprs) { currentExpr = expr; if (expr.type.tag != TypeTags.STRING && expr.type.tag != TypeTags.XML) { currentExpr = getToStringInvocationOnExpr(expr); } if (concatExpr == null) { concatExpr = currentExpr; continue; } BType binaryExprType = TypeTags.isXMLTypeTag(concatExpr.type.tag) || TypeTags.isXMLTypeTag(currentExpr.type.tag) ? symTable.xmlType : symTable.stringType; concatExpr = ASTBuilderUtil.createBinaryExpr(concatExpr.pos, concatExpr, currentExpr, binaryExprType, OperatorKind.ADD, null); } return concatExpr; } private BLangInvocation getToStringInvocationOnExpr(BLangExpression expression) { BInvokableSymbol symbol = (BInvokableSymbol) symTable.langValueModuleSymbol.scope .lookup(names.fromString(TO_STRING_FUNCTION_NAME)).symbol; List<BLangExpression> requiredArgs = new ArrayList<BLangExpression>() {{ add(addConversionExprIfRequired(expression, symbol.params.get(0).type)); }}; return ASTBuilderUtil.createInvocationExprMethod(expression.pos, symbol, requiredArgs, new ArrayList<>(), symResolver); } private BLangInvocation generateErrorDetailBuiltinFunction(DiagnosticPos pos, BVarSymbol errorVarySymbol, BLangIndexBasedAccess parentIndexBasedAccess) { BLangExpression onExpr = parentIndexBasedAccess != null ? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarySymbol); return createLangLibInvocationNode(ERROR_DETAIL_FUNCTION_NAME, onExpr, new ArrayList<>(), null, pos); } private BLangInvocation generateErrorReasonBuiltinFunction(DiagnosticPos pos, BType reasonType, BVarSymbol errorVarSymbol, BLangIndexBasedAccess parentIndexBasedAccess) { BLangExpression onExpr = parentIndexBasedAccess != null ? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarSymbol); return createLangLibInvocationNode(ERROR_REASON_FUNCTION_NAME, onExpr, new ArrayList<>(), reasonType, pos); } private BLangInvocation generateCloneWithTypeInvocation(DiagnosticPos pos, BType targetType, BVarSymbol source) { BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol); BLangInvocation invocationNode = createInvocationNode(CLONE_WITH_TYPE, new ArrayList<>(), typedescType); BLangTypedescExpr typedescExpr = new BLangTypedescExpr(); typedescExpr.resolvedType = targetType; typedescExpr.type = typedescType; invocationNode.expr = typedescExpr; invocationNode.symbol = symResolver.lookupLangLibMethod(typedescType, names.fromString(CLONE_WITH_TYPE)); invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, source), typedescExpr); invocationNode.type = BUnionType.create(null, targetType, symTable.errorType); return invocationNode; } private BLangLambdaFunction createFuncToFilterOutRestParam(List<String> toRemoveList, DiagnosticPos pos) { String anonfuncName = "$anonRestParamFilterFunc$" + lambdaFunctionCount++; BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName); BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID, getStringAnyTupleType(), this.env.scope.owner); BLangBlockFunctionBody functionBlock = createAnonymousFunctionBlock(pos, function, keyValSymbol); BLangIndexBasedAccess indexBasesAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol, ASTBuilderUtil .createLiteral(pos, symTable.intType, (long) 0)); BLangSimpleVariableDef tupFirstElem = createVarDef("key", indexBasesAccessExpr.type, indexBasesAccessExpr, pos); functionBlock.addStatement(tupFirstElem); for (String toRemoveItem : toRemoveList) { createIfStmt(pos, tupFirstElem.var.symbol, functionBlock, toRemoveItem); } BInvokableSymbol functionSymbol = createReturnTrueStatement(pos, function, functionBlock); return createLambdaFunction(function, functionSymbol); } private BLangLambdaFunction createFuncToFilterOutRestParam(BLangRecordVariable recordVariable, DiagnosticPos pos) { List<String> fieldNamesToRemove = recordVariable.variableList.stream() .map(var -> var.getKey().getValue()) .collect(Collectors.toList()); return createFuncToFilterOutRestParam(fieldNamesToRemove, pos); } private void createIfStmt(DiagnosticPos pos, BVarSymbol inputParamSymbol, BLangBlockFunctionBody blockStmt, String key) { BLangSimpleVarRef firstElemRef = ASTBuilderUtil.createVariableRef(pos, inputParamSymbol); BLangExpression converted = addConversionExprIfRequired(firstElemRef, symTable.stringType); BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, blockStmt); BLangBlockStmt ifBlock = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>()); BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, ifBlock); returnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, false); ifStmt.body = ifBlock; BLangGroupExpr groupExpr = new BLangGroupExpr(); groupExpr.type = symTable.booleanType; BLangBinaryExpr binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, converted, ASTBuilderUtil.createLiteral(pos, symTable.stringType, key), symTable.booleanType, OperatorKind.EQUAL, null); binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator( binaryExpr.opKind, binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type); groupExpr.expression = binaryExpr; ifStmt.expr = groupExpr; } BLangLambdaFunction createLambdaFunction(BLangFunction function, BInvokableSymbol functionSymbol) { BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode(); lambdaFunction.function = function; lambdaFunction.type = functionSymbol.type; return lambdaFunction; } private BInvokableSymbol createReturnTrueStatement(DiagnosticPos pos, BLangFunction function, BLangBlockFunctionBody functionBlock) { BLangReturn trueReturnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock); trueReturnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true); BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet), new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true); functionSymbol.retType = function.returnTypeNode.type; functionSymbol.params = function.requiredParams.stream() .map(param -> param.symbol) .collect(Collectors.toList()); functionSymbol.scope = env.scope; functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()), getRestType(functionSymbol), symTable.booleanType, null); function.symbol = functionSymbol; rewrite(function, env); env.enclPkg.addFunction(function); return functionSymbol; } private BLangBlockFunctionBody createAnonymousFunctionBlock(DiagnosticPos pos, BLangFunction function, BVarSymbol keyValSymbol) { BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(), null, keyValSymbol); function.requiredParams.add(inputParameter); BLangValueType booleanTypeKind = new BLangValueType(); booleanTypeKind.typeKind = TypeKind.BOOLEAN; booleanTypeKind.type = symTable.booleanType; function.returnTypeNode = booleanTypeKind; BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>()); function.body = functionBlock; return functionBlock; } private BTupleType getStringAnyTupleType() { ArrayList<BType> typeList = new ArrayList<BType>() {{ add(symTable.stringType); add(symTable.anyType); }}; return new BTupleType(typeList); } /** * This method creates a simple variable def and assigns and array expression based on the given indexExpr. * * case 1: when there is no parent array access expression, but with the indexExpr : 1 * string s = x[1]; * * case 2: when there is a parent array expression : x[2] and indexExpr : 3 * string s = x[2][3]; * * case 3: when there is no parent array access expression, but with the indexExpr : name * string s = x[name]; * * case 4: when there is a parent map expression : x[name] and indexExpr : fName * string s = x[name][fName]; * * case 5: when there is a parent map expression : x[name] and indexExpr : 1 * string s = x[name][1]; */ private void createSimpleVarDefStmt(BLangSimpleVariable simpleVariable, BLangBlockStmt parentBlockStmt, BLangLiteral indexExpr, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentArrayAccessExpr) { Name varName = names.fromIdNode(simpleVariable.name); if (varName == Names.IGNORE) { return; } final BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDefStmt(simpleVariable.pos, parentBlockStmt); simpleVariableDef.var = simpleVariable; simpleVariable.expr = createIndexBasedAccessExpr(simpleVariable.type, simpleVariable.pos, indexExpr, tupleVarSymbol, parentArrayAccessExpr); } @Override public void visit(BLangAssignment assignNode) { if (safeNavigateLHS(assignNode.varRef)) { BLangAccessExpression accessExpr = (BLangAccessExpression) assignNode.varRef; accessExpr.leafNode = true; result = rewriteSafeNavigationAssignment(accessExpr, assignNode.expr, assignNode.safeAssignment); result = rewrite(result, env); return; } assignNode.varRef = rewriteExpr(assignNode.varRef); assignNode.expr = rewriteExpr(assignNode.expr); assignNode.expr = addConversionExprIfRequired(rewriteExpr(assignNode.expr), assignNode.varRef.type); result = assignNode; } @Override public void visit(BLangTupleDestructure tupleDestructure) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(tupleDestructure.pos); BType runTimeType = new BArrayType(symTable.anyType); String name = "tuple"; final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(tupleDestructure.pos, name, runTimeType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType, this.env.scope.owner)); tuple.expr = tupleDestructure.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(tupleDestructure.pos, blockStmt); variableDef.var = tuple; createVarRefAssignmentStmts(tupleDestructure.varRef, blockStmt, tuple.symbol, null); createRestFieldAssignmentStmt(tupleDestructure, blockStmt, tuple.symbol); result = rewrite(blockStmt, env); } private void createRestFieldAssignmentStmt(BLangTupleDestructure tupleDestructure, BLangBlockStmt blockStmt, BVarSymbol tupleVarSymbol) { BLangTupleVarRef tupleVarRef = tupleDestructure.varRef; DiagnosticPos pos = blockStmt.pos; if (tupleVarRef.restParam != null) { BLangExpression tupleExpr = tupleDestructure.expr; BLangSimpleVarRef restParam = (BLangSimpleVarRef) tupleVarRef.restParam; BArrayType restParamType = (BArrayType) restParam.type; BLangArrayLiteral arrayExpr = createArrayLiteralExprNode(); arrayExpr.type = restParamType; BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt); restParamAssignment.varRef = restParam; restParamAssignment.varRef.type = restParamType; restParamAssignment.expr = arrayExpr; BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression(); startIndexLiteral.value = (long) tupleVarRef.expressions.size(); startIndexLiteral.type = symTable.intType; BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr); BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral, getModifiedIntRangeEndExpr(lengthInvocation)); BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode(); foreach.pos = pos; foreach.collection = intRangeInvocation; types.setForeachTypedBindingPatternType(foreach); final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType); foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name), this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner); BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol); foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable); foreach.isDeclaredWithVar = true; BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos); BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(restParam, createLengthInvocation(pos, restParam)); indexAccessExpr.type = restParamType.eType; createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null); foreach.body = foreachBody; blockStmt.addStatement(foreach); } } private BLangInvocation createLengthInvocation(DiagnosticPos pos, BLangExpression collection) { BInvokableSymbol lengthInvokableSymbol = (BInvokableSymbol) symResolver .lookupLangLibMethod(collection.type, names.fromString(LENGTH_FUNCTION_NAME)); BLangInvocation lengthInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, lengthInvokableSymbol, Lists.of(collection), symResolver); lengthInvocation.argExprs = lengthInvocation.requiredArgs; lengthInvocation.type = lengthInvokableSymbol.type.getReturnType(); return lengthInvocation; } /** * This method iterate through each member of the tupleVarRef and create the relevant var ref assignment statements. * This method does the check for node kind of each member and call the related var ref creation method. * * Example: * ((a, b), c)) = (tuple) * * (a, b) is again a tuple, so it is a recursive var ref creation. * * c is a simple var, so a simple var def will be created. * */ private void createVarRefAssignmentStmts(BLangTupleVarRef parentTupleVariable, BLangBlockStmt parentBlockStmt, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { final List<BLangExpression> expressions = parentTupleVariable.expressions; for (int index = 0; index < expressions.size(); index++) { BLangExpression expression = expressions.get(index); if (NodeKind.SIMPLE_VARIABLE_REF == expression.getKind() || NodeKind.FIELD_BASED_ACCESS_EXPR == expression.getKind() || NodeKind.INDEX_BASED_ACCESS_EXPR == expression.getKind() || NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == expression.getKind()) { BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(expression.pos, symTable.intType, (long) index); createSimpleVarRefAssignmentStmt((BLangVariableReference) expression, parentBlockStmt, indexExpr, tupleVarSymbol, parentIndexAccessExpr); continue; } if (expression.getKind() == NodeKind.TUPLE_VARIABLE_REF) { BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) expression; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(tupleVarRef.pos, symTable.intType, (long) index); BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVarRef.pos, new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangTupleVarRef) expression, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); continue; } if (expression.getKind() == NodeKind.RECORD_VARIABLE_REF) { BLangRecordVarRef recordVarRef = (BLangRecordVarRef) expression; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(recordVarRef.pos, symTable.intType, (long) index); BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangRecordVarRef) expression, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode( (BRecordType) recordVarRef.type, env.enclPkg.packageID, symTable, recordVarRef.pos); recordTypeNode.initFunction = TypeDefBuilderHelper .createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper .addTypeDefinition(recordVarRef.type, recordVarRef.type.tsymbol, recordTypeNode, env); continue; } if (expression.getKind() == NodeKind.ERROR_VARIABLE_REF) { BLangErrorVarRef errorVarRef = (BLangErrorVarRef) expression; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(errorVarRef.pos, symTable.intType, (long) index); BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentTupleVariable.pos, expression.type, tupleVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangErrorVarRef) expression, parentBlockStmt, tupleVarSymbol, arrayAccessExpr); } } } /** * This method creates a assignment statement and assigns and array expression based on the given indexExpr. * */ private void createSimpleVarRefAssignmentStmt(BLangVariableReference simpleVarRef, BLangBlockStmt parentBlockStmt, BLangExpression indexExpr, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentArrayAccessExpr) { if (simpleVarRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { Name varName = names.fromIdNode(((BLangSimpleVarRef) simpleVarRef).variableName); if (varName == Names.IGNORE) { return; } } BLangExpression assignmentExpr = createIndexBasedAccessExpr(simpleVarRef.type, simpleVarRef.pos, indexExpr, tupleVarSymbol, parentArrayAccessExpr); assignmentExpr = addConversionExprIfRequired(assignmentExpr, simpleVarRef.type); final BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(parentBlockStmt.pos, parentBlockStmt); assignmentStmt.varRef = simpleVarRef; assignmentStmt.expr = assignmentExpr; } private BLangExpression createIndexBasedAccessExpr(BType varType, DiagnosticPos varPos, BLangExpression indexExpr, BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentExpr) { BLangIndexBasedAccess arrayAccess = ASTBuilderUtil.createIndexBasesAccessExpr(varPos, symTable.anyType, tupleVarSymbol, indexExpr); arrayAccess.originalType = varType; if (parentExpr != null) { arrayAccess.expr = parentExpr; } final BLangExpression assignmentExpr; if (types.isValueType(varType)) { BLangTypeConversionExpr castExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode(); castExpr.expr = arrayAccess; castExpr.type = varType; assignmentExpr = castExpr; } else { assignmentExpr = arrayAccess; } return assignmentExpr; } @Override public void visit(BLangRecordDestructure recordDestructure) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(recordDestructure.pos); BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null); String name = "$map$0"; final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(recordDestructure.pos, name, runTimeType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType, this.env.scope.owner)); mapVariable.expr = recordDestructure.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil. createVariableDefStmt(recordDestructure.pos, blockStmt); variableDef.var = mapVariable; createVarRefAssignmentStmts(recordDestructure.varRef, blockStmt, mapVariable.symbol, null); result = rewrite(blockStmt, env); } @Override public void visit(BLangErrorDestructure errorDestructure) { final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(errorDestructure.pos); String name = "$error$"; final BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(errorDestructure.pos, name, symTable.errorType, null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner)); errorVar.expr = errorDestructure.expr; final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(errorDestructure.pos, blockStmt); variableDef.var = errorVar; createVarRefAssignmentStmts(errorDestructure.varRef, blockStmt, errorVar.symbol, null); result = rewrite(blockStmt, env); } private void createVarRefAssignmentStmts(BLangRecordVarRef parentRecordVarRef, BLangBlockStmt parentBlockStmt, BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) { final List<BLangRecordVarRefKeyValue> variableRefList = parentRecordVarRef.recordRefFields; for (BLangRecordVarRefKeyValue varRefKeyValue : variableRefList) { BLangExpression variableReference = varRefKeyValue.variableReference; BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variableReference.pos, symTable.stringType, varRefKeyValue.variableName.getValue()); if (NodeKind.SIMPLE_VARIABLE_REF == variableReference.getKind() || NodeKind.FIELD_BASED_ACCESS_EXPR == variableReference.getKind() || NodeKind.INDEX_BASED_ACCESS_EXPR == variableReference.getKind() || NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == variableReference.getKind()) { createSimpleVarRefAssignmentStmt((BLangVariableReference) variableReference, parentBlockStmt, indexExpr, recordVarSymbol, parentIndexAccessExpr); continue; } if (NodeKind.RECORD_VARIABLE_REF == variableReference.getKind()) { BLangRecordVarRef recordVariable = (BLangRecordVarRef) variableReference; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr( parentRecordVarRef.pos, symTable.mapType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts(recordVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (NodeKind.TUPLE_VARIABLE_REF == variableReference.getKind()) { BLangTupleVarRef tupleVariable = (BLangTupleVarRef) variableReference; BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos, symTable.tupleType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts(tupleVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr); continue; } if (NodeKind.ERROR_VARIABLE_REF == variableReference.getKind()) { BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(variableReference.pos, symTable.errorType, recordVarSymbol, indexExpr); if (parentIndexAccessExpr != null) { arrayAccessExpr.expr = parentIndexAccessExpr; } createVarRefAssignmentStmts((BLangErrorVarRef) variableReference, parentBlockStmt, recordVarSymbol, arrayAccessExpr); } } if (parentRecordVarRef.restParam != null) { DiagnosticPos pos = parentBlockStmt.pos; BMapType restParamType = (BMapType) ((BLangSimpleVarRef) parentRecordVarRef.restParam).type; BLangSimpleVarRef variableReference; if (parentIndexAccessExpr != null) { BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1", restParamType, null, new BVarSymbol(0, names.fromString("$map$1"), this.env.scope.owner.pkgID, restParamType, this.env.scope.owner)); mapVariable.expr = parentIndexAccessExpr; BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt); variableDef.var = mapVariable; variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol); } else { variableReference = ASTBuilderUtil.createVariableRef(pos, ((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol); } BLangSimpleVarRef restParam = (BLangSimpleVarRef) parentRecordVarRef.restParam; List<String> keysToRemove = parentRecordVarRef.recordRefFields.stream() .map(field -> field.variableName.value) .collect(Collectors.toList()); BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos, keysToRemove, restParamType, parentBlockStmt); BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol); BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, parentBlockStmt); restParamAssignment.varRef = restParam; restParamAssignment.varRef.type = restParamType; restParamAssignment.expr = varRef; } } private void createVarRefAssignmentStmts(BLangErrorVarRef parentErrorVarRef, BLangBlockStmt parentBlockStmt, BVarSymbol errorVarySymbol, BLangIndexBasedAccess parentIndexAccessExpr) { if (parentErrorVarRef.reason.getKind() != NodeKind.SIMPLE_VARIABLE_REF || names.fromIdNode(((BLangSimpleVarRef) parentErrorVarRef.reason).variableName) != Names.IGNORE) { BLangAssignment reasonAssignment = ASTBuilderUtil .createAssignmentStmt(parentBlockStmt.pos, parentBlockStmt); reasonAssignment.expr = generateErrorReasonBuiltinFunction(parentErrorVarRef.reason.pos, symTable.stringType, errorVarySymbol, parentIndexAccessExpr); reasonAssignment.expr = addConversionExprIfRequired(reasonAssignment.expr, parentErrorVarRef.reason.type); reasonAssignment.varRef = parentErrorVarRef.reason; } if (parentErrorVarRef.detail.isEmpty() && isIgnoredErrorRefRestVar(parentErrorVarRef)) { return; } BLangInvocation errorDetailBuiltinFunction = generateErrorDetailBuiltinFunction(parentErrorVarRef.pos, errorVarySymbol, parentIndexAccessExpr); BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail$" + errorCount++, symTable.detailType, errorDetailBuiltinFunction, parentErrorVarRef.pos); detailTempVarDef.type = symTable.detailType; parentBlockStmt.addStatement(detailTempVarDef); this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol); List<String> extractedKeys = new ArrayList<>(); for (BLangNamedArgsExpression detail : parentErrorVarRef.detail) { extractedKeys.add(detail.name.value); BLangVariableReference ref = (BLangVariableReference) detail.expr; BLangExpression detailEntryVar = createIndexBasedAccessExpr(ref.type, ref.pos, createStringLiteral(detail.name.pos, detail.name.value), detailTempVarDef.var.symbol, null); if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar; bLangIndexBasedAccess.originalType = symTable.pureType; } BLangAssignment detailAssignment = ASTBuilderUtil.createAssignmentStmt(ref.pos, parentBlockStmt); detailAssignment.varRef = ref; detailAssignment.expr = detailEntryVar; } if (!isIgnoredErrorRefRestVar(parentErrorVarRef)) { BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos, detailTempVarDef.var.symbol); BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVarRef.restVar.pos, extractedKeys, parentErrorVarRef.restVar.type, parentBlockStmt); BLangAssignment restAssignment = ASTBuilderUtil.createAssignmentStmt(parentErrorVarRef.restVar.pos, parentBlockStmt); restAssignment.varRef = parentErrorVarRef.restVar; restAssignment.expr = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos, filteredDetail.symbol); } BErrorType errorType = (BErrorType) parentErrorVarRef.type; if (errorType.detailType.getKind() == TypeKind.RECORD) { BRecordTypeSymbol tsymbol = (BRecordTypeSymbol) errorType.detailType.tsymbol; tsymbol.initializerFunc = createRecordInitFunc(); tsymbol.scope.define(tsymbol.initializerFunc.funcName, tsymbol.initializerFunc.symbol); } } private boolean isIgnoredErrorRefRestVar(BLangErrorVarRef parentErrorVarRef) { if (parentErrorVarRef.restVar == null) { return true; } if (parentErrorVarRef.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { return (((BLangSimpleVarRef) parentErrorVarRef.restVar).variableName.value.equals(IGNORE.value)); } return false; } @Override public void visit(BLangRetry retryNode) { DiagnosticPos pos = retryNode.retryBody.pos; BLangBlockStmt retryBlockStmt = ASTBuilderUtil.createBlockStmt(retryNode.pos); BLangSimpleVariableDef retryManagerVarDef = createRetryManagerDef(retryNode.retrySpec, retryNode.pos); retryBlockStmt.stmts.add(retryManagerVarDef); BLangBlockFunctionBody retryBody = ASTBuilderUtil.createBlockFunctionBody(pos); BType retryReturnType = BUnionType.create(null, symTable.anyType, symTable.errorType); BLangType retryLambdaReturnType = ASTBuilderUtil.createTypeNode(retryReturnType); BLangLambdaFunction retryFunc = createLambdaFunction(pos, "$retryFunc$", Collections.emptyList(), retryLambdaReturnType, retryBody); retryBody.stmts.addAll(retryNode.retryBody.stmts); retryFunc.function = resolveReturnTypeCast(retryFunc.function, env); BVarSymbol retryFuncVarSymbol = new BVarSymbol(0, names.fromString("$retryFunc$"), env.scope.owner.pkgID, retryFunc.type, retryFunc.function.symbol); BLangSimpleVariable retryLambdaVariable = ASTBuilderUtil.createVariable(pos, "$retryFunc$", retryFunc.type, retryFunc, retryFuncVarSymbol); BLangSimpleVariableDef retryLambdaVariableDef = ASTBuilderUtil.createVariableDef(pos, retryLambdaVariable); BLangSimpleVarRef retryLambdaVarRef = new BLangSimpleVarRef.BLangLocalVarRef(retryLambdaVariable.symbol); retryLambdaVarRef.type = retryFuncVarSymbol.type; retryBlockStmt.stmts.add(retryLambdaVariableDef); BLangInvocation retryLambdaInvocation = new BLangInvocation.BFunctionPointerInvocation(pos, retryLambdaVarRef, retryLambdaVariable.symbol, retryReturnType); retryLambdaInvocation.argExprs = new ArrayList<>(); BLangTrapExpr retryFunctionTrapExpression = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode(); retryFunctionTrapExpression.type = retryReturnType; retryFunctionTrapExpression.expr = retryLambdaInvocation; retryFunc.capturedClosureEnv = env; BVarSymbol retryFunctionVarSymbol = new BVarSymbol(0, new Name("$result$"), env.scope.owner.pkgID, retryReturnType, env.scope.owner); BLangSimpleVariable retryFunctionVariable = ASTBuilderUtil.createVariable(pos, "$result$", retryReturnType, retryFunctionTrapExpression, retryFunctionVarSymbol); BLangSimpleVariableDef retryFunctionVariableDef = ASTBuilderUtil.createVariableDef(pos, retryFunctionVariable); retryBlockStmt.stmts.add(retryFunctionVariableDef); BLangSimpleVarRef retryFunctionVariableRef = new BLangSimpleVarRef.BLangLocalVarRef(retryFunctionVariable.symbol); retryFunctionVariableRef.type = retryFunctionVariable.symbol.type; BLangWhile whileNode = createRetryWhileLoop(pos, retryManagerVarDef, retryFunctionTrapExpression, retryFunctionVariableRef); retryBlockStmt.stmts.add(whileNode); if (retryNode.retryBodyReturns) { BLangInvokableNode encInvokable = env.enclInvokable; BLangReturn returnNode = ASTBuilderUtil.createReturnStmt(pos, addConversionExprIfRequired(retryFunctionVariableRef, encInvokable.returnTypeNode.type)); retryBlockStmt.stmts.add(returnNode); } result = rewrite(retryBlockStmt, env); } protected BLangFunction resolveReturnTypeCast(BLangFunction function, SymbolEnv env) { boolean prevWithinRetryBlock = this.withinRetryBlock; this.withinRetryBlock = true; BLangFunction rewrittenFunc = rewrite(function, env); this.withinRetryBlock = prevWithinRetryBlock; return rewrittenFunc; } protected BLangWhile createRetryWhileLoop(DiagnosticPos retryBlockPos, BLangSimpleVariableDef retryManagerVarDef, BLangExpression trapExpr, BLangSimpleVarRef result) { BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode(); whileNode.pos = retryBlockPos; BLangTypeTestExpr isErrorCheck = createTypeCheckExpr(retryBlockPos, result, getErrorTypeNode()); BLangSimpleVarRef retryManagerVarRef = new BLangLocalVarRef(retryManagerVarDef.var.symbol); retryManagerVarRef.type = retryManagerVarDef.var.symbol.type; BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(retryBlockPos, retryManagerVarRef, result); whileNode.expr = ASTBuilderUtil.createBinaryExpr(retryBlockPos, isErrorCheck, shouldRetryInvocation, symTable.booleanType, OperatorKind.AND, null); BLangBlockStmt whileBlockStmnt = ASTBuilderUtil.createBlockStmt(retryBlockPos); BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(retryBlockPos, result, trapExpr); whileBlockStmnt.stmts.add(assignment); whileNode.body = whileBlockStmnt; return whileNode; } protected BLangSimpleVariableDef createRetryManagerDef(BLangRetrySpec retrySpec, DiagnosticPos pos) { BTypeSymbol retryManagerTypeSymbol = (BObjectTypeSymbol) symTable.langInternalModuleSymbol .scope.lookup(names.fromString("DefaultRetryManager")).symbol; BType retryManagerType = retryManagerTypeSymbol.type; if (retrySpec.retryManagerType != null) { retryManagerType = retrySpec.retryManagerType.type; } BVarSymbol retryMangerSymbol = new BVarSymbol(0, names.fromString("$retryManager$"), env.scope.owner.pkgID, retryManagerType, this.env.scope.owner); BLangTypeInit managerInit = ASTBuilderUtil.createEmptyTypeInit(pos, retryManagerType); managerInit.initInvocation.requiredArgs = retrySpec.argExprs; BLangSimpleVariable retryManagerVariable = ASTBuilderUtil.createVariable(pos, "$retryManager$", retryManagerType, managerInit, retryMangerSymbol); return ASTBuilderUtil.createVariableDef(pos, retryManagerVariable); } BLangInvocation createRetryManagerShouldRetryInvocation(DiagnosticPos pos, BLangSimpleVarRef managerVarRef, BLangSimpleVarRef trapResultRef) { BInvokableSymbol shouldRetryFuncSymbol = getShouldRetryFunc((BVarSymbol) managerVarRef.symbol).symbol; BLangInvocation shouldRetryInvocation = (BLangInvocation) TreeBuilder.createInvocationNode(); shouldRetryInvocation.pos = pos; shouldRetryInvocation.expr = managerVarRef; shouldRetryInvocation.requiredArgs = Lists.of(trapResultRef); shouldRetryInvocation.argExprs = shouldRetryInvocation.requiredArgs; shouldRetryInvocation.symbol = shouldRetryFuncSymbol; shouldRetryInvocation.type = shouldRetryFuncSymbol.retType; shouldRetryInvocation.langLibInvocation = false; return shouldRetryInvocation; } private BAttachedFunction getShouldRetryFunc(BVarSymbol retryManagerSymbol) { BObjectTypeSymbol typeSymbol = (BObjectTypeSymbol) retryManagerSymbol.type.tsymbol; for (BAttachedFunction bAttachedFunction : typeSymbol.attachedFuncs) { if (bAttachedFunction.funcName.value.equals(RETRY_MANAGER_OBJECT_SHOULD_RETRY_FUNC)) { return bAttachedFunction; } } return null; } protected BLangTypeTestExpr createTypeCheckExpr(DiagnosticPos pos, BLangExpression expr, BLangType type) { BLangTypeTestExpr testExpr = ASTBuilderUtil.createTypeTestExpr(pos, expr, type); testExpr.type = symTable.booleanType; return testExpr; } @Override public void visit(BLangRetryTransaction retryTransaction) { BLangStatementExpression retryTransactionStmtExpr = transactionDesugar.desugar(retryTransaction, env); if (!retryTransaction.transactionReturns) { BLangExpressionStmt transactionExprStmt = (BLangExpressionStmt) TreeBuilder.createExpressionStatementNode(); transactionExprStmt.pos = retryTransaction.pos; transactionExprStmt.expr = retryTransactionStmtExpr; transactionExprStmt.type = symTable.nilType; result = rewrite(transactionExprStmt, env); } else { BLangReturn bLangReturn = ASTBuilderUtil.createReturnStmt(retryTransaction.pos, retryTransactionStmtExpr); result = rewrite(bLangReturn, env); } } @Override public void visit(BLangContinue nextNode) { result = nextNode; } @Override public void visit(BLangBreak breakNode) { result = breakNode; } @Override public void visit(BLangReturn returnNode) { if (returnNode.expr != null) { if (this.withinRetryBlock) { returnNode.expr = rewriteExpr(addConversionExprIfRequired(returnNode.expr, symTable.anyOrErrorType)); } else { returnNode.expr = rewriteExpr(returnNode.expr); } } result = returnNode; } @Override public void visit(BLangPanic panicNode) { panicNode.expr = rewriteExpr(panicNode.expr); result = panicNode; } @Override public void visit(BLangXMLNSStatement xmlnsStmtNode) { xmlnsStmtNode.xmlnsDecl = rewrite(xmlnsStmtNode.xmlnsDecl, env); result = xmlnsStmtNode; } @Override public void visit(BLangXMLNS xmlnsNode) { BLangXMLNS generatedXMLNSNode; xmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI); BSymbol ownerSymbol = xmlnsNode.symbol.owner; if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) { generatedXMLNSNode = new BLangLocalXMLNS(); } else { generatedXMLNSNode = new BLangPackageXMLNS(); } generatedXMLNSNode.namespaceURI = xmlnsNode.namespaceURI; generatedXMLNSNode.prefix = xmlnsNode.prefix; generatedXMLNSNode.symbol = xmlnsNode.symbol; result = generatedXMLNSNode; } public void visit(BLangCompoundAssignment compoundAssignment) { BLangVariableReference varRef = compoundAssignment.varRef; if (compoundAssignment.varRef.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) { if (varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { varRef = ASTBuilderUtil.createVariableRef(compoundAssignment.varRef.pos, varRef.symbol); varRef.lhsVar = true; } result = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, rewriteExpr(varRef), rewriteExpr(compoundAssignment.modifiedExpr)); return; } List<BLangStatement> statements = new ArrayList<>(); List<BLangSimpleVarRef> varRefs = new ArrayList<>(); List<BType> types = new ArrayList<>(); do { BLangSimpleVariableDef tempIndexVarDef = createVarDef("$temp" + ++indexExprCount + "$", ((BLangIndexBasedAccess) varRef).indexExpr.type, ((BLangIndexBasedAccess) varRef).indexExpr, compoundAssignment.pos); BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(tempIndexVarDef.pos, tempIndexVarDef.var.symbol); statements.add(0, tempIndexVarDef); varRefs.add(0, tempVarRef); types.add(0, varRef.type); varRef = (BLangVariableReference) ((BLangIndexBasedAccess) varRef).expr; } while (varRef.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR); BLangVariableReference var = varRef; for (int ref = 0; ref < varRefs.size(); ref++) { var = ASTBuilderUtil.createIndexAccessExpr(var, varRefs.get(ref)); var.type = types.get(ref); } var.type = compoundAssignment.varRef.type; BLangExpression rhsExpression = ASTBuilderUtil.createBinaryExpr(compoundAssignment.pos, var, compoundAssignment.expr, compoundAssignment.type, compoundAssignment.opKind, null); rhsExpression.type = compoundAssignment.modifiedExpr.type; BLangAssignment assignStmt = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, var, rhsExpression); statements.add(assignStmt); BLangBlockStmt bLangBlockStmt = ASTBuilderUtil.createBlockStmt(compoundAssignment.pos, statements); result = rewrite(bLangBlockStmt, env); } @Override public void visit(BLangExpressionStmt exprStmtNode) { exprStmtNode.expr = rewriteExpr(exprStmtNode.expr); result = exprStmtNode; } @Override public void visit(BLangIf ifNode) { ifNode.expr = rewriteExpr(ifNode.expr); ifNode.body = rewrite(ifNode.body, env); ifNode.elseStmt = rewrite(ifNode.elseStmt, env); result = ifNode; } @Override public void visit(BLangMatch matchStmt) { BLangBlockStmt matchBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode(); matchBlockStmt.pos = matchStmt.pos; String matchExprVarName = GEN_VAR_PREFIX.value; BLangSimpleVariable matchExprVar = ASTBuilderUtil.createVariable(matchStmt.expr.pos, matchExprVarName, matchStmt.expr.type, matchStmt.expr, new BVarSymbol(0, names.fromString(matchExprVarName), this.env.scope.owner.pkgID, matchStmt.expr.type, this.env.scope.owner)); BLangSimpleVariableDef matchExprVarDef = ASTBuilderUtil.createVariableDef(matchBlockStmt.pos, matchExprVar); matchBlockStmt.stmts.add(matchExprVarDef); matchBlockStmt.stmts.add(generateIfElseStmt(matchStmt, matchExprVar)); rewrite(matchBlockStmt, this.env); result = matchBlockStmt; } @Override public void visit(BLangForeach foreach) { BLangBlockStmt blockNode; BVarSymbol dataSymbol = new BVarSymbol(0, names.fromString("$data$"), this.env.scope.owner.pkgID, foreach.collection.type, this.env.scope.owner); BLangSimpleVariable dataVariable = ASTBuilderUtil.createVariable(foreach.pos, "$data$", foreach.collection.type, foreach.collection, dataSymbol); BLangSimpleVariableDef dataVarDef = ASTBuilderUtil.createVariableDef(foreach.pos, dataVariable); BVarSymbol collectionSymbol = dataVariable.symbol; switch (foreach.collection.type.tag) { case TypeTags.STRING: case TypeTags.ARRAY: case TypeTags.TUPLE: case TypeTags.XML: case TypeTags.MAP: case TypeTags.TABLE: case TypeTags.STREAM: case TypeTags.RECORD: BInvokableSymbol iteratorSymbol = getLangLibIteratorInvokableSymbol(collectionSymbol); blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, true); break; case TypeTags.OBJECT: iteratorSymbol = getIterableObjectIteratorInvokableSymbol(collectionSymbol); blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, false); break; default: blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos); blockNode.stmts.add(0, dataVarDef); break; } rewrite(blockNode, this.env); result = blockNode; } private BLangBlockStmt desugarForeachWithIteratorDef(BLangForeach foreach, BLangSimpleVariableDef dataVariableDefinition, BVarSymbol collectionSymbol, BInvokableSymbol iteratorInvokableSymbol, boolean isIteratorFuncFromLangLib) { BLangSimpleVariableDef iteratorVarDef = getIteratorVariableDefinition(foreach.pos, collectionSymbol, iteratorInvokableSymbol, isIteratorFuncFromLangLib); BLangBlockStmt blockNode = desugarForeachToWhile(foreach, iteratorVarDef); blockNode.stmts.add(0, dataVariableDefinition); return blockNode; } public BInvokableSymbol getIterableObjectIteratorInvokableSymbol(BVarSymbol collectionSymbol) { BObjectTypeSymbol typeSymbol = (BObjectTypeSymbol) collectionSymbol.type.tsymbol; BAttachedFunction iteratorFunc = null; for (BAttachedFunction func : typeSymbol.attachedFuncs) { if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) { iteratorFunc = func; break; } } BAttachedFunction function = iteratorFunc; return function.symbol; } BInvokableSymbol getLangLibIteratorInvokableSymbol(BVarSymbol collectionSymbol) { return (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionSymbol.type, names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)); }
It was more so that people know what they are using. People are used to hamcrest assertions being static but I didn't want a magic `get(...)` that people don't know where it's coming from.
public void testHealth() { try { RestAssured.defaultParser = Parser.JSON; RestAssured.when().get("/health").then() .body("outcome", is("UP"), "checks.state", contains("UP"), "checks.name", contains("basic")); } finally { RestAssured.reset(); } }
RestAssured.when().get("/health").then()
public void testHealth() { try { RestAssured.defaultParser = Parser.JSON; RestAssured.when().get("/health").then() .body("outcome", is("UP"), "checks.state", contains("UP"), "checks.name", contains("basic")); } finally { RestAssured.reset(); } }
class HealthUnitTest { @Deployment public static JavaArchive deploy() { return ShrinkWrap.create(JavaArchive.class) .addClasses(BasicHealthCheck.class) .addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml"); } @Test }
class HealthUnitTest { @Deployment public static JavaArchive deploy() { return ShrinkWrap.create(JavaArchive.class) .addClasses(BasicHealthCheck.class) .addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml"); } @Test }
I got your point, but it would require to annotate the `inputStream` [here](https://github.com/julianhyde/sqlline/blob/sqlline-1.4.0/src/main/java/sqlline/SqlLine.java#L615) in `begin` method with `@Nullable`, which is outside the project. What could be possible workaround here ?
private Future<List<List<String>>> runQueryInBackground(String[] args) { return pool.submit( (Callable) () -> { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); InputStream inputStream = new ByteArrayInputStream(new byte[0]); BeamSqlLine.runSqlLine(args, inputStream, outputStream, null); return toLines(outputStream); }); }
BeamSqlLine.runSqlLine(args, inputStream, outputStream, null);
new ByteArrayOutputStream(); BeamSqlLine.runSqlLine(args, null, outputStream, null); return toLines(outputStream); }); } private long convertTimestampToMillis(String timestamp) throws ParseException { return dateFormat.parse(timestamp).getTime(); }
class BeamSqlLineIT implements Serializable { @Rule public transient TestPubsub eventsTopic = TestPubsub.create(); private static String project = ""; private static String createPubsubTableStatement = ""; private static String setProject = ""; private static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); @SuppressWarnings("initialization.fields.uninitialized") private ExecutorService pool; @BeforeClass public static void setUpClass() { project = TestPipeline.testingPipelineOptions().as(GcpOptions.class).getProject(); setProject = String.format("SET project = '%s';", project); createPubsubTableStatement = "CREATE EXTERNAL TABLE taxi_rides (\n" + " event_timestamp TIMESTAMP,\n" + " attributes MAP<VARCHAR, VARCHAR>,\n" + " payload ROW<\n" + " ride_id VARCHAR,\n" + " point_idx INT,\n" + " latitude DOUBLE,\n" + " longitude DOUBLE,\n" + " meter_reading DOUBLE,\n" + " meter_increment DOUBLE,\n" + " ride_status VARCHAR,\n" + " passenger_count TINYINT>)\n" + " TYPE pubsub \n" + " LOCATION '%s'\n" + " TBLPROPERTIES '{\"timestampAttributeKey\": \"ts\"}';"; dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); } @Before public void setUp() { pool = Executors.newFixedThreadPool(1); } @After public void tearDown() { pool.shutdown(); } @Test @Ignore("https: public void testSelectFromPubsub() throws Exception { String[] args = buildArgs( String.format(createPubsubTableStatement, eventsTopic.topicPath()), setProject, "SELECT event_timestamp, taxi_rides.payload.ride_status, taxi_rides.payload.latitude, " + "taxi_rides.payload.longitude from taxi_rides LIMIT 3;"); Future<List<List<String>>> expectedResult = runQueryInBackground(args); eventsTopic.assertSubscriptionEventuallyCreated(project, Duration.standardMinutes(1)); List<PubsubMessage> messages = ImmutableList.of( message( convertTimestampToMillis("2018-07-01 21:25:20"), taxiRideJSON("id1", 1, 40.702, -74.001, 1000, 10, "enroute", 2)), message( convertTimestampToMillis("2018-07-01 21:26:06"), taxiRideJSON("id2", 2, 40.703, -74.002, 1000, 10, "enroute", 4)), message( convertTimestampToMillis("2018-07-02 13:26:06"), taxiRideJSON("id3", 3, 30.0, -72.32324, 2000, 20, "enroute", 7))); eventsTopic.publish(messages); assertThat( Arrays.asList( Arrays.asList("2018-07-01 21:25:20", "enroute", "40.702", "-74.001"), Arrays.asList("2018-07-01 21:26:06", "enroute", "40.703", "-74.002"), Arrays.asList("2018-07-02 13:26:06", "enroute", "30.0", "-72.32324")), everyItem(IsIn.isOneOf(expectedResult.get(30, TimeUnit.SECONDS).toArray()))); } @Test @Ignore("https: public void testFilterForSouthManhattan() throws Exception { String[] args = buildArgs( String.format(createPubsubTableStatement, eventsTopic.topicPath()), setProject, "SELECT event_timestamp, taxi_rides.payload.ride_status, \n" + "taxi_rides.payload.latitude, taxi_rides.payload.longitude from taxi_rides\n" + " WHERE taxi_rides.payload.longitude > -74.747\n" + " AND taxi_rides.payload.longitude < -73.969\n" + " AND taxi_rides.payload.latitude > 40.699\n" + " AND taxi_rides.payload.latitude < 40.720 LIMIT 2;"); Future<List<List<String>>> expectedResult = runQueryInBackground(args); eventsTopic.assertSubscriptionEventuallyCreated(project, Duration.standardMinutes(1)); List<PubsubMessage> messages = ImmutableList.of( message( convertTimestampToMillis("2018-07-01 21:25:20"), taxiRideJSON("id1", 1, 40.701, -74.001, 1000, 10, "enroute", 2)), message( convertTimestampToMillis("2018-07-01 21:26:06"), taxiRideJSON("id2", 2, 40.702, -74.002, 1000, 10, "enroute", 4)), message( convertTimestampToMillis("2018-07-02 13:26:06"), taxiRideJSON("id3", 3, 30, -72.32324, 2000, 20, "enroute", 7)), message( convertTimestampToMillis("2018-07-02 14:28:22"), taxiRideJSON("id4", 4, 34, -73.32324, 2000, 20, "enroute", 8))); eventsTopic.publish(messages); assertThat( Arrays.asList( Arrays.asList("2018-07-01 21:25:20", "enroute", "40.701", "-74.001"), Arrays.asList("2018-07-01 21:26:06", "enroute", "40.702", "-74.002")), everyItem(IsIn.isOneOf(expectedResult.get(30, TimeUnit.SECONDS).toArray()))); } private String taxiRideJSON( String rideId, int pointIdex, double latitude, double longitude, int meterReading, int meterIncrement, String rideStatus, int passengerCount) { ObjectMapper mapper = new ObjectMapper(); ObjectNode objectNode = mapper.createObjectNode(); objectNode.put("ride_id", rideId); objectNode.put("point_idx", pointIdex); objectNode.put("latitude", latitude); objectNode.put("longitude", longitude); objectNode.put("meter_reading", meterReading); objectNode.put("meter_increment", meterIncrement); objectNode.put("ride_status", rideStatus); objectNode.put("passenger_count", passengerCount); return objectNode.toString(); } /** Suppressing this due to https: @SuppressWarnings("return.type.incompatible") private Future<List<List<String>>> runQueryInBackground(String[] args) { return pool.submit( (Callable) () -> { ByteArrayOutputStream outputStream = private PubsubMessage message(long timestampInMillis, String jsonPayload) { return new PubsubMessage( jsonPayload.getBytes(UTF_8), ImmutableMap.of("ts", String.valueOf(timestampInMillis))); } }
class BeamSqlLineIT implements Serializable { @Rule public transient TestPubsub eventsTopic = TestPubsub.create(); private static String project = ""; private static String createPubsubTableStatement = ""; private static String setProject = ""; private static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); @SuppressWarnings("initialization.fields.uninitialized") private ExecutorService pool; @BeforeClass public static void setUpClass() { project = TestPipeline.testingPipelineOptions().as(GcpOptions.class).getProject(); setProject = String.format("SET project = '%s';", project); createPubsubTableStatement = "CREATE EXTERNAL TABLE taxi_rides (\n" + " event_timestamp TIMESTAMP,\n" + " attributes MAP<VARCHAR, VARCHAR>,\n" + " payload ROW<\n" + " ride_id VARCHAR,\n" + " point_idx INT,\n" + " latitude DOUBLE,\n" + " longitude DOUBLE,\n" + " meter_reading DOUBLE,\n" + " meter_increment DOUBLE,\n" + " ride_status VARCHAR,\n" + " passenger_count TINYINT>)\n" + " TYPE pubsub \n" + " LOCATION '%s'\n" + " TBLPROPERTIES '{\"timestampAttributeKey\": \"ts\"}';"; dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); } @Before public void setUp() { pool = Executors.newFixedThreadPool(1); } @After public void tearDown() { pool.shutdown(); } @Test @Ignore("https: public void testSelectFromPubsub() throws Exception { String[] args = buildArgs( String.format(createPubsubTableStatement, eventsTopic.topicPath()), setProject, "SELECT event_timestamp, taxi_rides.payload.ride_status, taxi_rides.payload.latitude, " + "taxi_rides.payload.longitude from taxi_rides LIMIT 3;"); Future<List<List<String>>> expectedResult = runQueryInBackground(args); eventsTopic.assertSubscriptionEventuallyCreated(project, Duration.standardMinutes(1)); List<PubsubMessage> messages = ImmutableList.of( message( convertTimestampToMillis("2018-07-01 21:25:20"), taxiRideJSON("id1", 1, 40.702, -74.001, 1000, 10, "enroute", 2)), message( convertTimestampToMillis("2018-07-01 21:26:06"), taxiRideJSON("id2", 2, 40.703, -74.002, 1000, 10, "enroute", 4)), message( convertTimestampToMillis("2018-07-02 13:26:06"), taxiRideJSON("id3", 3, 30.0, -72.32324, 2000, 20, "enroute", 7))); eventsTopic.publish(messages); assertThat( Arrays.asList( Arrays.asList("2018-07-01 21:25:20", "enroute", "40.702", "-74.001"), Arrays.asList("2018-07-01 21:26:06", "enroute", "40.703", "-74.002"), Arrays.asList("2018-07-02 13:26:06", "enroute", "30.0", "-72.32324")), everyItem(IsIn.isOneOf(expectedResult.get(30, TimeUnit.SECONDS).toArray()))); } @Test @Ignore("https: public void testFilterForSouthManhattan() throws Exception { String[] args = buildArgs( String.format(createPubsubTableStatement, eventsTopic.topicPath()), setProject, "SELECT event_timestamp, taxi_rides.payload.ride_status, \n" + "taxi_rides.payload.latitude, taxi_rides.payload.longitude from taxi_rides\n" + " WHERE taxi_rides.payload.longitude > -74.747\n" + " AND taxi_rides.payload.longitude < -73.969\n" + " AND taxi_rides.payload.latitude > 40.699\n" + " AND taxi_rides.payload.latitude < 40.720 LIMIT 2;"); Future<List<List<String>>> expectedResult = runQueryInBackground(args); eventsTopic.assertSubscriptionEventuallyCreated(project, Duration.standardMinutes(1)); List<PubsubMessage> messages = ImmutableList.of( message( convertTimestampToMillis("2018-07-01 21:25:20"), taxiRideJSON("id1", 1, 40.701, -74.001, 1000, 10, "enroute", 2)), message( convertTimestampToMillis("2018-07-01 21:26:06"), taxiRideJSON("id2", 2, 40.702, -74.002, 1000, 10, "enroute", 4)), message( convertTimestampToMillis("2018-07-02 13:26:06"), taxiRideJSON("id3", 3, 30, -72.32324, 2000, 20, "enroute", 7)), message( convertTimestampToMillis("2018-07-02 14:28:22"), taxiRideJSON("id4", 4, 34, -73.32324, 2000, 20, "enroute", 8))); eventsTopic.publish(messages); assertThat( Arrays.asList( Arrays.asList("2018-07-01 21:25:20", "enroute", "40.701", "-74.001"), Arrays.asList("2018-07-01 21:26:06", "enroute", "40.702", "-74.002")), everyItem(IsIn.isOneOf(expectedResult.get(30, TimeUnit.SECONDS).toArray()))); } private String taxiRideJSON( String rideId, int pointIdex, double latitude, double longitude, int meterReading, int meterIncrement, String rideStatus, int passengerCount) { ObjectMapper mapper = new ObjectMapper(); ObjectNode objectNode = mapper.createObjectNode(); objectNode.put("ride_id", rideId); objectNode.put("point_idx", pointIdex); objectNode.put("latitude", latitude); objectNode.put("longitude", longitude); objectNode.put("meter_reading", meterReading); objectNode.put("meter_increment", meterIncrement); objectNode.put("ride_status", rideStatus); objectNode.put("passenger_count", passengerCount); return objectNode.toString(); } /** Suppressing this due to https: @SuppressWarnings("return.type.incompatible") private Future<List<List<String>>> runQueryInBackground(String[] args) { return pool.submit( (Callable) () -> { ByteArrayOutputStream outputStream = private PubsubMessage message(long timestampInMillis, String jsonPayload) { return new PubsubMessage( jsonPayload.getBytes(UTF_8), ImmutableMap.of("ts", String.valueOf(timestampInMillis))); } }
I this function internal can be represented as four-argument function: topN(columnRecall, sortKeyColumn, N, desc|asc) then use this function to implement max_by and min_by.
private static void analyzeBuiltinAggFunction(FunctionCallExpr functionCallExpr) { FunctionName fnName = functionCallExpr.getFnName(); FunctionParams fnParams = functionCallExpr.getParams(); if (fnParams.isStar() && !fnName.getFunction().equals(FunctionSet.COUNT)) { throw new SemanticException("'*' can only be used in conjunction with COUNT: " + functionCallExpr.toSql()); } if (fnName.getFunction().equals(FunctionSet.COUNT)) { if (functionCallExpr.getChildren().size() > 1 && !fnParams.isDistinct()) { throw new SemanticException( "COUNT must have DISTINCT for multiple arguments: " + functionCallExpr.toSql()); } return; } if (fnName.getFunction().equals(FunctionSet.GROUP_CONCAT)) { if (functionCallExpr.getChildren().size() > 2 || functionCallExpr.getChildren().isEmpty()) { throw new SemanticException( "group_concat requires one or two parameters: " + functionCallExpr.toSql()); } if (fnParams.isDistinct()) { throw new SemanticException("group_concat does not support DISTINCT"); } Expr arg0 = functionCallExpr.getChild(0); if (!arg0.getType().isStringType() && !arg0.getType().isNull()) { throw new SemanticException( "group_concat requires first parameter to be of getType() STRING: " + functionCallExpr.toSql()); } if (functionCallExpr.getChildren().size() == 2) { Expr arg1 = functionCallExpr.getChild(1); if (!arg1.getType().isStringType() && !arg1.getType().isNull()) { throw new SemanticException( "group_concat requires second parameter to be of getType() STRING: " + functionCallExpr.toSql()); } } return; } if (fnName.getFunction().equals(FunctionSet.LAG) || fnName.getFunction().equals(FunctionSet.LEAD)) { if (!functionCallExpr.isAnalyticFnCall()) { throw new SemanticException(fnName.getFunction() + " only used in analytic function"); } else { if (functionCallExpr.getChildren().size() > 2) { if (!functionCallExpr.getChild(2).isConstant()) { throw new SemanticException( "The default parameter (parameter 3) of LAG must be a constant: " + functionCallExpr.toSql()); } } return; } } if (FunctionSet.onlyAnalyticUsedFunctions.contains(fnName.getFunction())) { if (!functionCallExpr.isAnalyticFnCall()) { throw new SemanticException(fnName.getFunction() + " only used in analytic function"); } } Expr arg = functionCallExpr.getChild(0); if (arg == null) { return; } if (fnName.getFunction().equals(FunctionSet.ARRAY_AGG)) { if (fnParams.isDistinct()) { throw new SemanticException("array_agg does not support DISTINCT"); } if (arg.getType().isDecimalV3()) { throw new SemanticException("array_agg does not support DecimalV3"); } } if (fnName.getFunction().equals(FunctionSet.ARRAYS_OVERLAP)) { if (functionCallExpr.getChildren().size() != 2) { throw new SemanticException("arrays_overlap only support 2 parameters"); } } if (fnName.getFunction().equals(FunctionSet.ARRAY_FILTER)) { if (functionCallExpr.getChildren().size() != 2) { throw new SemanticException("array_filter only support 2 parameters"); } } if (fnName.getFunction().equals(FunctionSet.RETENTION)) { if (!arg.getType().isArrayType()) { throw new SemanticException("retention only support Array<BOOLEAN>"); } ArrayType type = (ArrayType) arg.getType(); if (!type.getItemType().isBoolean()) { throw new SemanticException("retention only support Array<BOOLEAN>"); } } if (fnName.getFunction().equals(FunctionSet.WINDOW_FUNNEL)) { Expr modeArg = functionCallExpr.getChild(2); if (modeArg instanceof IntLiteral) { IntLiteral modeIntLiteral = (IntLiteral) modeArg; long modeValue = modeIntLiteral.getValue(); if (modeValue < 0 || modeValue > 3) { throw new SemanticException("mode argument's range must be [0-3]"); } } else { throw new SemanticException("mode argument must be numerical type"); } Expr windowArg = functionCallExpr.getChild(0); if (windowArg instanceof IntLiteral) { IntLiteral windowIntLiteral = (IntLiteral) windowArg; long windowValue = windowIntLiteral.getValue(); if (windowValue < 0) { throw new SemanticException("window argument must >= 0"); } } else { throw new SemanticException("window argument must be numerical type"); } } if (fnName.getFunction().equalsIgnoreCase(FunctionSet.MAX_BY)) { if (functionCallExpr.getChildren().size() != 2 || functionCallExpr.getChildren().isEmpty()) { throw new SemanticException( "max_by requires two parameters: " + functionCallExpr.toSql()); } if (functionCallExpr.getChild(0).isConstant() || functionCallExpr.getChild(1).isConstant()) { throw new SemanticException("max_by function args must be column"); } fnParams.setIsDistinct(false); Type maxByType = functionCallExpr.getChild(1).getType(); if (!maxByType.canApplyToNumeric()) { throw new SemanticException(Type.ONLY_METRIC_TYPE_ERROR_MSG); } return; } if ((fnName.getFunction().equals(FunctionSet.SUM) || fnName.getFunction().equals(FunctionSet.AVG)) && ((!arg.getType().isNumericType() && !arg.getType().isBoolean() && !arg.getType().isStringType() && !arg.getType().isNull() && !(arg instanceof NullLiteral)) || !arg.getType().canApplyToNumeric())) { throw new SemanticException( fnName.getFunction() + " requires a numeric parameter: " + functionCallExpr.toSql()); } if (fnName.getFunction().equals(FunctionSet.SUM_DISTINCT) && ((!arg.getType().isNumericType() && !arg.getType().isNull() && !(arg instanceof NullLiteral)) || !arg.getType().canApplyToNumeric())) { throw new SemanticException( "SUM_DISTINCT requires a numeric parameter: " + functionCallExpr.toSql()); } if ((fnName.getFunction().equals(FunctionSet.MIN) || fnName.getFunction().equals(FunctionSet.MAX) || fnName.getFunction().equals(FunctionSet.NDV) || fnName.getFunction().equals(FunctionSet.APPROX_COUNT_DISTINCT)) && !arg.getType().canApplyToNumeric()) { throw new SemanticException(Type.ONLY_METRIC_TYPE_ERROR_MSG); } if ((fnName.getFunction().equals(FunctionSet.BITMAP_UNION_INT) && !arg.getType().isIntegerType())) { throw new SemanticException("BITMAP_UNION_INT params only support Integer getType()"); } if (fnName.getFunction().equals(FunctionSet.INTERSECT_COUNT)) { if (functionCallExpr.getChildren().size() <= 2) { throw new SemanticException("intersect_count(bitmap_column, column_to_filter, filter_values) " + "function requires at least three parameters"); } Type inputType = functionCallExpr.getChild(0).getType(); if (!inputType.isBitmapType()) { throw new SemanticException( "intersect_count function first argument should be of BITMAP getType(), but was " + inputType); } if (functionCallExpr.getChild(1).isConstant()) { throw new SemanticException("intersect_count function filter_values arg must be column"); } for (int i = 2; i < functionCallExpr.getChildren().size(); i++) { if (!functionCallExpr.getChild(i).isConstant()) { throw new SemanticException("intersect_count function filter_values arg must be constant"); } } return; } if (fnName.getFunction().equals(FunctionSet.BITMAP_COUNT) || fnName.getFunction().equals(FunctionSet.BITMAP_UNION) || fnName.getFunction().equals(FunctionSet.BITMAP_UNION_COUNT) || fnName.getFunction().equals(FunctionSet.BITMAP_INTERSECT)) { if (functionCallExpr.getChildren().size() != 1) { throw new SemanticException(fnName + " function could only have one child"); } Type inputType = functionCallExpr.getChild(0).getType(); if (!inputType.isBitmapType()) { throw new SemanticException( fnName + " function's argument should be of BITMAP getType(), but was " + inputType); } return; } if ((fnName.getFunction().equals(FunctionSet.HLL_UNION_AGG) || fnName.getFunction().equals(FunctionSet.HLL_UNION) || fnName.getFunction().equals(FunctionSet.HLL_CARDINALITY) || fnName.getFunction().equals(FunctionSet.HLL_RAW_AGG)) && !arg.getType().isHllType()) { throw new SemanticException( "HLL_UNION_AGG, HLL_RAW_AGG and HLL_CARDINALITY's params must be hll column"); } if (fnName.getFunction().equals(FunctionSet.MIN) || fnName.getFunction().equals(FunctionSet.MAX) || fnName.getFunction().equals(FunctionSet.NDV) || fnName.getFunction().equals(FunctionSet.HLL_UNION_AGG)) { fnParams.setIsDistinct(false); } if (fnName.getFunction().equals(FunctionSet.PERCENTILE_APPROX)) { if (functionCallExpr.getChildren().size() != 2 && functionCallExpr.getChildren().size() != 3) { throw new SemanticException("percentile_approx(expr, DOUBLE [, B]) requires two or three parameters"); } if (!functionCallExpr.getChild(1).isConstant()) { throw new SemanticException("percentile_approx requires second parameter must be a constant : " + functionCallExpr.toSql()); } if (functionCallExpr.getChildren().size() == 3) { if (!functionCallExpr.getChild(2).isConstant()) { throw new SemanticException("percentile_approx requires the third parameter must be a constant : " + functionCallExpr.toSql()); } } } }
if (functionCallExpr.getChildren().size() != 2 || functionCallExpr.getChildren().isEmpty()) {
private static void analyzeBuiltinAggFunction(FunctionCallExpr functionCallExpr) { FunctionName fnName = functionCallExpr.getFnName(); FunctionParams fnParams = functionCallExpr.getParams(); if (fnParams.isStar() && !fnName.getFunction().equals(FunctionSet.COUNT)) { throw new SemanticException("'*' can only be used in conjunction with COUNT: " + functionCallExpr.toSql()); } if (fnName.getFunction().equals(FunctionSet.COUNT)) { if (functionCallExpr.getChildren().size() > 1 && !fnParams.isDistinct()) { throw new SemanticException( "COUNT must have DISTINCT for multiple arguments: " + functionCallExpr.toSql()); } return; } if (fnName.getFunction().equals(FunctionSet.GROUP_CONCAT)) { if (functionCallExpr.getChildren().size() > 2 || functionCallExpr.getChildren().isEmpty()) { throw new SemanticException( "group_concat requires one or two parameters: " + functionCallExpr.toSql()); } if (fnParams.isDistinct()) { throw new SemanticException("group_concat does not support DISTINCT"); } Expr arg0 = functionCallExpr.getChild(0); if (!arg0.getType().isStringType() && !arg0.getType().isNull()) { throw new SemanticException( "group_concat requires first parameter to be of getType() STRING: " + functionCallExpr.toSql()); } if (functionCallExpr.getChildren().size() == 2) { Expr arg1 = functionCallExpr.getChild(1); if (!arg1.getType().isStringType() && !arg1.getType().isNull()) { throw new SemanticException( "group_concat requires second parameter to be of getType() STRING: " + functionCallExpr.toSql()); } } return; } if (fnName.getFunction().equals(FunctionSet.LAG) || fnName.getFunction().equals(FunctionSet.LEAD)) { if (!functionCallExpr.isAnalyticFnCall()) { throw new SemanticException(fnName.getFunction() + " only used in analytic function"); } else { if (functionCallExpr.getChildren().size() > 2) { if (!functionCallExpr.getChild(2).isConstant()) { throw new SemanticException( "The default parameter (parameter 3) of LAG must be a constant: " + functionCallExpr.toSql()); } } return; } } if (FunctionSet.onlyAnalyticUsedFunctions.contains(fnName.getFunction())) { if (!functionCallExpr.isAnalyticFnCall()) { throw new SemanticException(fnName.getFunction() + " only used in analytic function"); } } Expr arg = functionCallExpr.getChild(0); if (arg == null) { return; } if (fnName.getFunction().equals(FunctionSet.ARRAY_AGG)) { if (fnParams.isDistinct()) { throw new SemanticException("array_agg does not support DISTINCT"); } if (arg.getType().isDecimalV3()) { throw new SemanticException("array_agg does not support DecimalV3"); } } if (fnName.getFunction().equals(FunctionSet.ARRAYS_OVERLAP)) { if (functionCallExpr.getChildren().size() != 2) { throw new SemanticException("arrays_overlap only support 2 parameters"); } } if (fnName.getFunction().equals(FunctionSet.ARRAY_FILTER)) { if (functionCallExpr.getChildren().size() != 2) { throw new SemanticException("array_filter only support 2 parameters"); } } if (fnName.getFunction().equals(FunctionSet.RETENTION)) { if (!arg.getType().isArrayType()) { throw new SemanticException("retention only support Array<BOOLEAN>"); } ArrayType type = (ArrayType) arg.getType(); if (!type.getItemType().isBoolean()) { throw new SemanticException("retention only support Array<BOOLEAN>"); } } if (fnName.getFunction().equals(FunctionSet.WINDOW_FUNNEL)) { Expr modeArg = functionCallExpr.getChild(2); if (modeArg instanceof IntLiteral) { IntLiteral modeIntLiteral = (IntLiteral) modeArg; long modeValue = modeIntLiteral.getValue(); if (modeValue < 0 || modeValue > 3) { throw new SemanticException("mode argument's range must be [0-3]"); } } else { throw new SemanticException("mode argument must be numerical type"); } Expr windowArg = functionCallExpr.getChild(0); if (windowArg instanceof IntLiteral) { IntLiteral windowIntLiteral = (IntLiteral) windowArg; long windowValue = windowIntLiteral.getValue(); if (windowValue < 0) { throw new SemanticException("window argument must >= 0"); } } else { throw new SemanticException("window argument must be numerical type"); } } if (fnName.getFunction().equals(FunctionSet.MAX_BY)) { if (functionCallExpr.getChildren().size() != 2 || functionCallExpr.getChildren().isEmpty()) { throw new SemanticException( "max_by requires two parameters: " + functionCallExpr.toSql()); } if (functionCallExpr.getChild(0).isConstant() || functionCallExpr.getChild(1).isConstant()) { throw new SemanticException("max_by function args must be column"); } fnParams.setIsDistinct(false); Type sortKeyType = functionCallExpr.getChild(1).getType(); if (!sortKeyType.canApplyToNumeric()) { throw new SemanticException(Type.ONLY_METRIC_TYPE_ERROR_MSG); } return; } if ((fnName.getFunction().equals(FunctionSet.SUM) || fnName.getFunction().equals(FunctionSet.AVG)) && ((!arg.getType().isNumericType() && !arg.getType().isBoolean() && !arg.getType().isStringType() && !arg.getType().isNull() && !(arg instanceof NullLiteral)) || !arg.getType().canApplyToNumeric())) { throw new SemanticException( fnName.getFunction() + " requires a numeric parameter: " + functionCallExpr.toSql()); } if (fnName.getFunction().equals(FunctionSet.SUM_DISTINCT) && ((!arg.getType().isNumericType() && !arg.getType().isNull() && !(arg instanceof NullLiteral)) || !arg.getType().canApplyToNumeric())) { throw new SemanticException( "SUM_DISTINCT requires a numeric parameter: " + functionCallExpr.toSql()); } if ((fnName.getFunction().equals(FunctionSet.MIN) || fnName.getFunction().equals(FunctionSet.MAX) || fnName.getFunction().equals(FunctionSet.NDV) || fnName.getFunction().equals(FunctionSet.APPROX_COUNT_DISTINCT)) && !arg.getType().canApplyToNumeric()) { throw new SemanticException(Type.ONLY_METRIC_TYPE_ERROR_MSG); } if ((fnName.getFunction().equals(FunctionSet.BITMAP_UNION_INT) && !arg.getType().isIntegerType())) { throw new SemanticException("BITMAP_UNION_INT params only support Integer getType()"); } if (fnName.getFunction().equals(FunctionSet.INTERSECT_COUNT)) { if (functionCallExpr.getChildren().size() <= 2) { throw new SemanticException("intersect_count(bitmap_column, column_to_filter, filter_values) " + "function requires at least three parameters"); } Type inputType = functionCallExpr.getChild(0).getType(); if (!inputType.isBitmapType()) { throw new SemanticException( "intersect_count function first argument should be of BITMAP getType(), but was " + inputType); } if (functionCallExpr.getChild(1).isConstant()) { throw new SemanticException("intersect_count function filter_values arg must be column"); } for (int i = 2; i < functionCallExpr.getChildren().size(); i++) { if (!functionCallExpr.getChild(i).isConstant()) { throw new SemanticException("intersect_count function filter_values arg must be constant"); } } return; } if (fnName.getFunction().equals(FunctionSet.BITMAP_COUNT) || fnName.getFunction().equals(FunctionSet.BITMAP_UNION) || fnName.getFunction().equals(FunctionSet.BITMAP_UNION_COUNT) || fnName.getFunction().equals(FunctionSet.BITMAP_INTERSECT)) { if (functionCallExpr.getChildren().size() != 1) { throw new SemanticException(fnName + " function could only have one child"); } Type inputType = functionCallExpr.getChild(0).getType(); if (!inputType.isBitmapType()) { throw new SemanticException( fnName + " function's argument should be of BITMAP getType(), but was " + inputType); } return; } if ((fnName.getFunction().equals(FunctionSet.HLL_UNION_AGG) || fnName.getFunction().equals(FunctionSet.HLL_UNION) || fnName.getFunction().equals(FunctionSet.HLL_CARDINALITY) || fnName.getFunction().equals(FunctionSet.HLL_RAW_AGG)) && !arg.getType().isHllType()) { throw new SemanticException( "HLL_UNION_AGG, HLL_RAW_AGG and HLL_CARDINALITY's params must be hll column"); } if (fnName.getFunction().equals(FunctionSet.MIN) || fnName.getFunction().equals(FunctionSet.MAX) || fnName.getFunction().equals(FunctionSet.NDV) || fnName.getFunction().equals(FunctionSet.HLL_UNION_AGG)) { fnParams.setIsDistinct(false); } if (fnName.getFunction().equals(FunctionSet.PERCENTILE_APPROX)) { if (functionCallExpr.getChildren().size() != 2 && functionCallExpr.getChildren().size() != 3) { throw new SemanticException("percentile_approx(expr, DOUBLE [, B]) requires two or three parameters"); } if (!functionCallExpr.getChild(1).isConstant()) { throw new SemanticException("percentile_approx requires second parameter must be a constant : " + functionCallExpr.toSql()); } if (functionCallExpr.getChildren().size() == 3) { if (!functionCallExpr.getChild(2).isConstant()) { throw new SemanticException("percentile_approx requires the third parameter must be a constant : " + functionCallExpr.toSql()); } } } }
class FunctionAnalyzer { public static void analyze(FunctionCallExpr functionCallExpr) { if (functionCallExpr.getFn() instanceof AggregateFunction) { analyzeBuiltinAggFunction(functionCallExpr); } if (functionCallExpr.getParams().isStar() && !(functionCallExpr.getFn() instanceof AggregateFunction)) { throw new SemanticException("Cannot pass '*' to scalar function."); } FunctionName fnName = functionCallExpr.getFnName(); if (fnName.getFunction().equals(FunctionSet.DATE_TRUNC)) { if (!(functionCallExpr.getChild(0) instanceof StringLiteral)) { throw new SemanticException("date_trunc requires first parameter must be a string constant"); } final StringLiteral fmtLiteral = (StringLiteral) functionCallExpr.getChild(0); if (functionCallExpr.getChild(1).getType().isDatetime()) { if (!Lists.newArrayList("year", "quarter", "month", "week", "day", "hour", "minute", "second") .contains(fmtLiteral.getStringValue())) { throw new SemanticException("date_trunc function can't support argument other than " + "year|quarter|month|week|day|hour|minute|second"); } } else if (functionCallExpr.getChild(1).getType().isDate()) { if (!Lists.newArrayList("year", "quarter", "month", "week", "day") .contains(fmtLiteral.getStringValue())) { throw new SemanticException("date_trunc function can't support argument other than " + "year|quarter|month|week|day"); } } } if (fnName.getFunction().equals(FunctionSet.ARRAY_DIFFERENCE)) { Preconditions.checkState(functionCallExpr.getChildren().size() == 1); if (!functionCallExpr.getChild(0).getType().isNull()) { Preconditions.checkState(functionCallExpr.getChild(0).getType().isArrayType()); ArrayType arrayType = (ArrayType) functionCallExpr.getChild(0).getType(); if (!arrayType.hasNumericItem() && !arrayType.isBooleanType() && !arrayType.isNullTypeItem()) { throw new SemanticException("array_difference function only support numeric array types"); } } } if (fnName.getFunction().equals(FunctionSet.ARRAY_MAP)) { Preconditions.checkState(functionCallExpr.getChildren().size() > 1); functionCallExpr.setType(new ArrayType(functionCallExpr.getChild(0).getChild(1).getType())); } } }
class FunctionAnalyzer { public static void analyze(FunctionCallExpr functionCallExpr) { if (functionCallExpr.getFn() instanceof AggregateFunction) { analyzeBuiltinAggFunction(functionCallExpr); } if (functionCallExpr.getParams().isStar() && !(functionCallExpr.getFn() instanceof AggregateFunction)) { throw new SemanticException("Cannot pass '*' to scalar function."); } FunctionName fnName = functionCallExpr.getFnName(); if (fnName.getFunction().equals(FunctionSet.DATE_TRUNC)) { if (!(functionCallExpr.getChild(0) instanceof StringLiteral)) { throw new SemanticException("date_trunc requires first parameter must be a string constant"); } final StringLiteral fmtLiteral = (StringLiteral) functionCallExpr.getChild(0); if (functionCallExpr.getChild(1).getType().isDatetime()) { if (!Lists.newArrayList("year", "quarter", "month", "week", "day", "hour", "minute", "second") .contains(fmtLiteral.getStringValue())) { throw new SemanticException("date_trunc function can't support argument other than " + "year|quarter|month|week|day|hour|minute|second"); } } else if (functionCallExpr.getChild(1).getType().isDate()) { if (!Lists.newArrayList("year", "quarter", "month", "week", "day") .contains(fmtLiteral.getStringValue())) { throw new SemanticException("date_trunc function can't support argument other than " + "year|quarter|month|week|day"); } } } if (fnName.getFunction().equals(FunctionSet.ARRAY_DIFFERENCE)) { Preconditions.checkState(functionCallExpr.getChildren().size() == 1); if (!functionCallExpr.getChild(0).getType().isNull()) { Preconditions.checkState(functionCallExpr.getChild(0).getType().isArrayType()); ArrayType arrayType = (ArrayType) functionCallExpr.getChild(0).getType(); if (!arrayType.hasNumericItem() && !arrayType.isBooleanType() && !arrayType.isNullTypeItem()) { throw new SemanticException("array_difference function only support numeric array types"); } } } if (fnName.getFunction().equals(FunctionSet.ARRAY_MAP)) { Preconditions.checkState(functionCallExpr.getChildren().size() > 1); functionCallExpr.setType(new ArrayType(functionCallExpr.getChild(0).getChild(1).getType())); } } }
```suggestion addShutdownHookAndCleanup(); // Register a shutdown hook to handle graceful shutdown of the application ```
public static void main(String[] args) throws CustomException { profilerStartTime = TimeUnit.MILLISECONDS.convert(System.nanoTime(), TimeUnit.NANOSECONDS); tempFileCleanupShutdownHook(); printHeader(); handleProfilerArguments(args); extractTheProfiler(); createTempJar(balJarName); initialize(balJarName); }
tempFileCleanupShutdownHook();
public static void main(String[] args) throws CustomException { profilerStartTime = TimeUnit.MILLISECONDS.convert(System.nanoTime(), TimeUnit.NANOSECONDS); addShutdownHookAndCleanup(); printHeader(); handleProfilerArguments(args); extractProfiler(); createTempJar(balJarName); initializeProfiling(balJarName); }
class Main { public static final String ANSI_RESET = "\u001B[0m"; public static final String ANSI_GRAY = "\033[37m"; public static final String ANSI_CYAN = "\033[1;38;2;32;182;176m"; static long profilerStartTime; static int exitCode = 0; public static final String TEMPJARFILENAME = "temp.jar"; private static String balJarArgs = null; static String balJarName = null; static String skipFunctionString = null; private static int balFunctionCount = 0; static int moduleCount = 0; static final List<String> INSTRUMENTEDPATHS = new ArrayList<>(); static final List<String> INSTRUMENTEDFILES = new ArrayList<>(); static final List<String> UTILINITPATHS = new ArrayList<>(); static final List<String> UTILPATHS = new ArrayList<>(); private static void printHeader() { String header = "%n" + ANSI_GRAY + "================================================================================" + ANSI_RESET + "%n" + ANSI_CYAN + "Ballerina Profiler" + ANSI_RESET + ": Profiling..." + "%n" + ANSI_GRAY + "================================================================================" + ANSI_RESET + "%n" + "WARNING : Ballerina Profiler is an experimental feature."; System.out.printf(header + "%n"); } private static void handleProfilerArguments(String[] args) { String invalidArgument = "Invalid CLI Argument"; if (args.length != 0) { for (int i = 0; i < args.length; i++) { switch (args[i]) { case "--file": balJarName = args[i + 1]; if (balJarName.startsWith("[") && balJarName.endsWith("]")) { balJarName = balJarName.substring(1, balJarName.length() - 1); } else { System.out.printf(invalidArgument + "\n"); } break; case "--args": balJarArgs = args[i + 1]; if (balJarArgs != null && balJarArgs.startsWith("[") && balJarArgs.endsWith("]")) { balJarArgs = balJarArgs.substring(1, balJarArgs.length() - 1); } else { System.out.printf(invalidArgument + "\n"); } break; case "--skip": skipFunctionString = args[i + 1]; if (skipFunctionString != null && skipFunctionString.matches("\\[.*\\]")) { skipFunctionString = skipFunctionString.substring(1, skipFunctionString.length() - 1); } else { System.out.printf(invalidArgument + "\n"); } break; default: System.out.printf(invalidArgument + "\n"); break; } } } } private static void extractTheProfiler() throws CustomException { System.out.printf(ANSI_CYAN + "[1/6] Initializing Profiler..." + ANSI_RESET + "%n"); try { new ProcessBuilder("jar", "xvf", "Profiler.jar", "io/ballerina/runtime/profiler/runtime") .start() .waitFor(); } catch (IOException | InterruptedException exception) { throw new CustomException(exception); } } public static void createTempJar(String balJarName) { try { System.out.printf(ANSI_CYAN + "[2/6] Copying Executable..." + ANSI_RESET + "%n"); Path sourcePath = Paths.get(balJarName); Path destinationPath = Paths.get(TEMPJARFILENAME); Files.copy(sourcePath, destinationPath); } catch (IOException e) { exitCode = 2; System.out.printf("Error occurred while copying the file: %s%n", e.getMessage()); } } private static void initialize(String balJarName) throws CustomException { System.out.printf(ANSI_CYAN + "[3/6] Performing Analysis..." + ANSI_RESET + "%n"); ArrayList<String> classNames = new ArrayList<>(); try { findAllClassNames(balJarName, classNames); findUtilityClasses(classNames); } catch (Exception e) { System.out.printf("(No such file or directory)" + "%n"); } System.out.printf(ANSI_CYAN + "[4/6] Instrumenting Functions..." + ANSI_RESET + "%n"); try (JarFile jarFile = new JarFile(balJarName)) { String mainClassPackage = MethodWrapper.mainClassFinder( new URLClassLoader(new URL[]{new File(balJarName).toURI().toURL()})); CustomClassLoader customClassLoader = new CustomClassLoader( new URLClassLoader(new URL[]{new File(balJarName).toURI().toURL()})); Set<String> usedPaths = new HashSet<>(); for (String className : classNames) { if (mainClassPackage == null) { continue; } if (className.startsWith(mainClassPackage.split("/")[0]) || UTILPATHS.contains(className)) { try (InputStream inputStream = jarFile.getInputStream(jarFile.getJarEntry(className))) { byte[] code = MethodWrapper.modifyMethods(inputStream); customClassLoader.loadClass(code); usedPaths.add(className.replace(".class", "").replace("/", ".")); MethodWrapper.printCode(className, code); } } if (className.endsWith("/$_init.class")) { moduleCount++; } } System.out.printf(" ○ Instrumented Module Count: " + moduleCount + "%n"); try (PrintWriter printWriter = new PrintWriter("usedPathsList.txt", StandardCharsets.UTF_8)) { printWriter.println(String.join(", ", usedPaths)); } System.out.printf(" ○ Instrumented Function Count: " + balFunctionCount + "%n"); } catch (Throwable throwable) { throw new CustomException(throwable); } try { modifyTheJar(); } catch (Throwable throwable) { throw new CustomException(throwable); } } private static void modifyTheJar() throws InterruptedException, IOException { try { final File userDirectory = new File(System.getProperty("user.dir")); listAllFiles(userDirectory); List<String> changedDirectories = INSTRUMENTEDFILES.stream().distinct().collect(Collectors.toList()); loadDirectories(changedDirectories); } finally { for (String instrumentedFilePath : INSTRUMENTEDPATHS) { FileUtils.deleteDirectory(new File(instrumentedFilePath)); } FileUtils.deleteDirectory(new File("io/ballerina/runtime/profiler/runtime")); MethodWrapper.invokeMethods(); } } private static void loadDirectories(List<String> changedDirs) { try { ProcessBuilder processBuilder = new ProcessBuilder("jar", "uf", TEMPJARFILENAME); processBuilder.command().addAll(changedDirs); processBuilder.start().waitFor(); } catch (IOException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } } public static void listAllFiles(final File userDirectory) { String absolutePath = Paths.get(TEMPJARFILENAME).toFile().getAbsolutePath(); absolutePath = absolutePath.replaceAll(TEMPJARFILENAME, ""); File[] files = userDirectory.listFiles(); if (files != null) { for (final File fileEntry : files) { if (fileEntry.isDirectory()) { listAllFiles(fileEntry); } else { String fileEntryString = String.valueOf(fileEntry); if (fileEntryString.endsWith(".class")) { fileEntryString = fileEntryString.replaceAll(absolutePath, ""); int index = fileEntryString.lastIndexOf('/'); fileEntryString = fileEntryString.substring(0, index); String[] fileEntryParts = fileEntryString.split("/"); INSTRUMENTEDPATHS.add(fileEntryParts[0]); INSTRUMENTEDFILES.add(fileEntryString); } } } } } private static void findAllClassNames(String jarPath, ArrayList<String> classNames) throws IOException { try (ZipInputStream zipInputStream = new ZipInputStream(new FileInputStream(jarPath))) { for (ZipEntry entry = zipInputStream.getNextEntry(); entry != null; entry = zipInputStream.getNextEntry()) { if (!entry.isDirectory() && entry.getName().endsWith(".class")) { classNames.add(String.valueOf(entry)); } } } } private static void findUtilityClasses(ArrayList<String> classNames) { for (String className : classNames) { if (className.endsWith("$_init.class")) { String path = className.substring(0, className.lastIndexOf('/') + 1); if (!UTILINITPATHS.contains(path)) { UTILINITPATHS.add(path); } } } for (String name : classNames) { for (String path : UTILINITPATHS) { if (name.startsWith(path)) { String subPath = name.substring(path.length()); if (subPath.indexOf('/') == -1) { UTILPATHS.add(name); } } } } } private static void deleteTempData() { String filePrefix = "jartmp"; File[] files = new File(System.getProperty("user.dir")).listFiles(); if (files != null) { for (File file : files) { if (file.getName().startsWith(filePrefix)) { FileUtils.deleteQuietly(file); } } } } private static void tempFileCleanupShutdownHook() { Runtime.getRuntime().addShutdownHook(new Thread(() -> { try { long profilerTotalTime = TimeUnit.MILLISECONDS.convert( System.nanoTime(), TimeUnit.NANOSECONDS) - profilerStartTime; File tempJarFile = new File(TEMPJARFILENAME); if (tempJarFile.exists()) { boolean deleted = tempJarFile.delete(); if (!deleted) { System.err.printf("Failed to delete temp jar file: " + TEMPJARFILENAME + "%n"); } } System.out.printf("%n" + ANSI_CYAN + "[6/6] Generating Output..." + ANSI_RESET + "%n"); Thread.sleep(100); initializeCPUParser(skipFunctionString); deleteFileIfExists("usedPathsList.txt"); deleteFileIfExists("CpuPre.json"); System.out.printf(" ○ Execution Time: " + profilerTotalTime / 1000 + " Seconds" + "%n"); deleteTempData(); initializeHTMLExport(); deleteFileIfExists("performance_report.json"); System.out.printf("----------------------------------------"); System.out.printf("----------------------------------------" + "%n"); } catch (IOException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } finally { String jarPath; try { jarPath = Main.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath(); } catch (URISyntaxException e) { throw new RuntimeException(e); } File jarFile = new File(jarPath); if (jarFile.exists()) { boolean deleted = jarFile.delete(); if (!deleted) { System.err.printf("Failed to delete jar file: " + jarPath + "%n"); } } } })); } private static void deleteFileIfExists(String filePath) { File file = new File(filePath); if (file.exists()) { boolean deleted = file.delete(); if (!deleted) { System.err.printf("Failed to delete file: " + filePath + "%n"); } } } public static void incrementBalFunctionCount() { balFunctionCount++; } public static String getBalJarArgs() { return balJarArgs; } }
class Main { static long profilerStartTime; static int exitCode = 0; private static String balJarArgs = null; static String balJarName = null; static String skipFunctionString = null; private static int balFunctionCount = 0; static int moduleCount = 0; static final List<String> INSTRUMENTED_PATHS = new ArrayList<>(); static final List<String> INSTRUMENTED_FILES = new ArrayList<>(); static final List<String> UTIL_INIT_PATHS = new ArrayList<>(); static final List<String> UTIL_PATHS = new ArrayList<>(); private static void printHeader() { String header = "%n" + Constants.ANSI_GRAY + "================================================================================" + Constants.ANSI_RESET + "%n" + Constants.ANSI_CYAN + "Ballerina Profiler" + Constants.ANSI_RESET + ": Profiling..." + "%n" + Constants.ANSI_GRAY + "================================================================================" + Constants.ANSI_RESET + "%n" + "WARNING : Ballerina Profiler is an experimental feature."; OUT.printf(header + "%n"); } private static void handleProfilerArguments(String[] args) { String invalidArgument = "Invalid CLI Argument"; if (args.length != 0) { for (int i = 0; i < args.length; i++) { switch (args[i]) { case "--file": balJarName = args[i + 1]; if (balJarName.startsWith("[") && balJarName.endsWith("]")) { balJarName = balJarName.substring(1, balJarName.length() - 1); } else { OUT.printf(invalidArgument + "\n"); } break; case "--args": balJarArgs = args[i + 1]; if (balJarArgs != null && balJarArgs.startsWith("[") && balJarArgs.endsWith("]")) { balJarArgs = balJarArgs.substring(1, balJarArgs.length() - 1); } else { OUT.printf(invalidArgument + "\n"); } break; case "--skip": skipFunctionString = args[i + 1]; if (skipFunctionString != null && skipFunctionString.matches("\\[.*\\]")) { skipFunctionString = skipFunctionString.substring(1, skipFunctionString.length() - 1); } else { OUT.printf(invalidArgument + "\n"); } break; default: break; } } } } private static void extractProfiler() throws CustomException { OUT.printf(Constants.ANSI_CYAN + "[1/6] Initializing Profiler..." + Constants.ANSI_RESET + "%n"); try { new ProcessBuilder("jar", "xvf", "Profiler.jar", "io/ballerina/runtime/profiler/runtime") .start() .waitFor(); } catch (IOException | InterruptedException exception) { throw new CustomException(exception); } } public static void createTempJar(String balJarName) { try { OUT.printf(Constants.ANSI_CYAN + "[2/6] Copying Executable..." + Constants.ANSI_RESET + "%n"); Path sourcePath = Paths.get(balJarName); Path destinationPath = Paths.get(Constants.TEMP_JAR_FILE_NAME); Files.copy(sourcePath, destinationPath); } catch (IOException e) { exitCode = 2; OUT.printf("Error occurred while copying the file: %s%n", e.getMessage()); } } private static void initializeProfiling(String balJarName) throws CustomException { OUT.printf(Constants.ANSI_CYAN + "[3/6] Performing Analysis..." + Constants.ANSI_RESET + "%n"); ArrayList<String> classNames = new ArrayList<>(); try { findAllClassNames(balJarName, classNames); findUtilityClasses(classNames); } catch (Exception e) { OUT.printf("(No such file or directory)" + "%n"); } OUT.printf(Constants.ANSI_CYAN + "[4/6] Instrumenting Functions..." + Constants.ANSI_RESET + "%n"); try (JarFile jarFile = new JarFile(balJarName)) { String mainClassPackage = MethodWrapper.mainClassFinder( new URLClassLoader(new URL[]{new File(balJarName).toURI().toURL()})); CustomClassLoader customClassLoader = new CustomClassLoader( new URLClassLoader(new URL[]{new File(balJarName).toURI().toURL()})); Set<String> usedPaths = new HashSet<>(); for (String className : classNames) { if (mainClassPackage == null) { continue; } if (className.startsWith(mainClassPackage.split("/")[0]) || UTIL_PATHS.contains(className)) { try (InputStream inputStream = jarFile.getInputStream(jarFile.getJarEntry(className))) { byte[] code = MethodWrapper.modifyMethods(inputStream); customClassLoader.loadClass(code); usedPaths.add(className.replace(".class", "").replace("/", ".")); MethodWrapper.printCode(className, code); } } if (className.endsWith("/$_init.class")) { moduleCount++; } } OUT.printf(" ○ Instrumented Module Count: " + moduleCount + "%n"); try (PrintWriter printWriter = new PrintWriter("usedPathsList.txt", StandardCharsets.UTF_8)) { printWriter.println(String.join(", ", usedPaths)); } OUT.printf(" ○ Instrumented Function Count: " + balFunctionCount + "%n"); } catch (Throwable throwable) { throw new CustomException(throwable); } try { modifyJar(); } catch (Throwable throwable) { throw new CustomException(throwable); } } private static void modifyJar() throws InterruptedException, IOException { try { final File userDirectory = new File(System.getProperty("user.dir")); listAllFiles(userDirectory); List<String> changedDirectories = INSTRUMENTED_FILES.stream().distinct().collect(Collectors.toList()); loadDirectories(changedDirectories); } finally { for (String instrumentedFilePath : INSTRUMENTED_PATHS) { FileUtils.deleteDirectory(new File(instrumentedFilePath)); } FileUtils.deleteDirectory(new File("io/ballerina/runtime/profiler/runtime")); MethodWrapper.invokeMethods(); } } private static void loadDirectories(List<String> changedDirs) { try { ProcessBuilder processBuilder = new ProcessBuilder("jar", "uf", Constants.TEMP_JAR_FILE_NAME); processBuilder.command().addAll(changedDirs); processBuilder.start().waitFor(); } catch (IOException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } } public static void listAllFiles(final File userDirectory) { String absolutePath = Paths.get(Constants.TEMP_JAR_FILE_NAME).toFile() .getAbsolutePath().replaceAll(Constants.TEMP_JAR_FILE_NAME, ""); File[] files = userDirectory.listFiles(); if (files != null) { for (final File fileEntry : files) { if (fileEntry.isDirectory()) { listAllFiles(fileEntry); } else { String fileEntryString = String.valueOf(fileEntry); if (fileEntryString.endsWith(".class")) { fileEntryString = fileEntryString.replaceAll(absolutePath, ""); int index = fileEntryString.lastIndexOf('/'); fileEntryString = fileEntryString.substring(0, index); String[] fileEntryParts = fileEntryString.split("/"); INSTRUMENTED_PATHS.add(fileEntryParts[0]); INSTRUMENTED_FILES.add(fileEntryString); } } } } } private static void findAllClassNames(String jarPath, ArrayList<String> classNames) throws IOException { try (ZipInputStream zipInputStream = new ZipInputStream(new FileInputStream(jarPath))) { for (ZipEntry entry = zipInputStream.getNextEntry(); entry != null; entry = zipInputStream.getNextEntry()) { if (!entry.isDirectory() && entry.getName().endsWith(".class")) { classNames.add(String.valueOf(entry)); } } } } private static void findUtilityClasses(ArrayList<String> classNames) { for (String className : classNames) { if (className.endsWith("$_init.class")) { String path = className.substring(0, className.lastIndexOf('/') + 1); if (!UTIL_INIT_PATHS.contains(path)) { UTIL_INIT_PATHS.add(path); } } } for (String name : classNames) { for (String path : UTIL_INIT_PATHS) { if (name.startsWith(path)) { String subPath = name.substring(path.length()); if (subPath.indexOf('/') == -1) { UTIL_PATHS.add(name); } } } } } private static void deleteTempData() { String filePrefix = "jartmp"; File[] files = new File(System.getProperty("user.dir")).listFiles(); if (files != null) { for (File file : files) { if (file.getName().startsWith(filePrefix)) { FileUtils.deleteQuietly(file); } } } } private static void addShutdownHookAndCleanup() { Runtime.getRuntime().addShutdownHook(new Thread(() -> { try { long profilerTotalTime = TimeUnit.MILLISECONDS.convert( System.nanoTime(), TimeUnit.NANOSECONDS) - profilerStartTime; File tempJarFile = new File(Constants.TEMP_JAR_FILE_NAME); if (tempJarFile.exists()) { boolean deleted = tempJarFile.delete(); if (!deleted) { System.err.printf("Failed to delete temp jar file: " + Constants.TEMP_JAR_FILE_NAME + "%n"); } } OUT.printf("%n" + Constants.ANSI_CYAN + "[6/6] Generating Output..." + Constants.ANSI_RESET + "%n"); Thread.sleep(100); initializeCPUParser(skipFunctionString); deleteFileIfExists("usedPathsList.txt"); deleteFileIfExists("CpuPre.json"); OUT.printf(" ○ Execution Time: " + profilerTotalTime / 1000 + " Seconds" + "%n"); deleteTempData(); initializeHTMLExport(); deleteFileIfExists("performance_report.json"); OUT.printf("----------------------------------------"); OUT.printf("----------------------------------------" + "%n"); } catch (IOException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } finally { String jarPath; try { jarPath = Main.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath(); } catch (URISyntaxException e) { throw new RuntimeException(e); } File jarFile = new File(jarPath); if (jarFile.exists()) { boolean deleted = jarFile.delete(); if (!deleted) { System.err.printf("Failed to delete jar file: " + jarPath + "%n"); } } } })); } private static void deleteFileIfExists(String filePath) { File file = new File(filePath); if (file.exists()) { boolean deleted = file.delete(); if (!deleted) { System.err.printf("Failed to delete file: " + filePath + "%n"); } } } public static void incrementBalFunctionCount() { balFunctionCount++; } public static String getBalJarArgs() { return balJarArgs; } }
Logging a warning here might be confusion, maybe just comment that this is expected as the db already exists when running this before the 2nd test...? Though, nothing critical ....
public void prepareDatabase() throws SQLException { pipelineRead.getOptions().setStableUniqueNames(CheckEnabled.OFF); DataSource dbDs = DatabaseTestHelper.getDataSourceForContainer(getDb(dbms)); try { DatabaseTestHelper.createTable( dbDs, TABLE_NAME, Lists.newArrayList( KV.of("id", "INTEGER"), KV.of("name", "VARCHAR(50)"), KV.of("specialDate", "TIMESTAMP"))); } catch (SQLException e) { LOG.warn("Exception occurred when preparing database {}", dbms, e); return; } catch (Exception e) { throw e; } final String dbmsLocal = dbms; pipelineWrite .apply(GenerateSequence.from(0).to(NUM_ROWS)) .apply(MapElements.via(new MapRowDataFn())) .apply( JdbcIO.<RowData>write() .withTable(TABLE_NAME) .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal)))); PipelineResult res = pipelineWrite.run(); res.metrics() .allMetrics() .getDistributions() .forEach( dist -> { if (dist.getName().getName().contains("intsDistribution")) { LOG.info( "Metric: {} | Min: {} | Max: {}", dist.getName().getName(), dist.getCommitted().getMin(), dist.getCommitted().getMax()); } else if (dist.getName().getName().contains("intsDistribution")) { LOG.info( "Metric: {} | Min: {} | Max: {}", dist.getName().getName(), new DateTime(Instant.EPOCH.plus(Duration.millis(dist.getCommitted().getMin()))), new DateTime( Instant.EPOCH.plus(Duration.millis(dist.getCommitted().getMax())))); } }); res.waitUntilFinish(); }
LOG.warn("Exception occurred when preparing database {}", dbms, e);
public void prepareDatabase() throws SQLException { pipelineRead.getOptions().setStableUniqueNames(CheckEnabled.OFF); DataSource dbDs = DatabaseTestHelper.getDataSourceForContainer(getDb(dbms)); try { DatabaseTestHelper.createTable( dbDs, TABLE_NAME, Lists.newArrayList( KV.of("id", "INTEGER"), KV.of("name", "VARCHAR(50)"), KV.of("specialDate", "TIMESTAMP"))); } catch (SQLException e) { LOG.info( "Exception occurred when preparing database {}. " + "This is expected, and the test should pass.", dbms, e); return; } catch (Exception e) { throw e; } final String dbmsLocal = dbms; pipelineWrite .apply(GenerateSequence.from(0).to(NUM_ROWS)) .apply(MapElements.via(new MapRowDataFn())) .apply( JdbcIO.<RowData>write() .withTable(TABLE_NAME) .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal)))); PipelineResult res = pipelineWrite.run(); res.metrics() .allMetrics() .getDistributions() .forEach( dist -> { if (dist.getName().getName().contains("intsDistribution")) { LOG.info( "Metric: {} | Min: {} | Max: {}", dist.getName().getName(), dist.getCommitted().getMin(), dist.getCommitted().getMax()); } else if (dist.getName().getName().contains("intsDistribution")) { LOG.info( "Metric: {} | Min: {} | Max: {}", dist.getName().getName(), new DateTime(Instant.EPOCH.plus(Duration.millis(dist.getCommitted().getMin()))), new DateTime( Instant.EPOCH.plus(Duration.millis(dist.getCommitted().getMax())))); } }); res.waitUntilFinish(); }
class JdbcIOAutoPartitioningIT { private static final Logger LOG = LoggerFactory.getLogger(JdbcIOAutoPartitioningIT.class); public static final Integer NUM_ROWS = 1_000; public static final String TABLE_NAME = "baseTable"; @ClassRule public static TestPipeline pipelineWrite = TestPipeline.create(); @Rule public TestPipeline pipelineRead = TestPipeline.create(); @Parameterized.Parameters(name = "{0}") public static Iterable<String> params() { return Lists.newArrayList("mysql", "postgres"); } @Parameterized.Parameter(0) public String dbms; public static JdbcDatabaseContainer<?> getDb(String dbName) { if (dbName.equals("mysql")) { return mysql; } else { return postgres; } } @Rule public TestRule retryRule = new TestRule() { public final int maxRetries = 2; @Override public Statement apply(Statement base, Description description) { return new Statement() { @Override public void evaluate() throws Throwable { Throwable caughtThrowable = null; for (int i = 0; i < maxRetries; i++) { try { pipelineRead.apply(base, description); base.evaluate(); return; } catch (Throwable t) { caughtThrowable = t; System.err.println( description.getDisplayName() + ": run " + (i + 1) + " failed."); } } System.err.println( description.getDisplayName() + ": Giving up after " + maxRetries + " failures."); throw Objects.requireNonNull(caughtThrowable); } }; } }; @ClassRule public static JdbcDatabaseContainer<?> mysql = new MySQLContainer<>("mysql"); @ClassRule public static JdbcDatabaseContainer<?> postgres = new PostgreSQLContainer<>("postgres"); @Before @DefaultSchema(JavaFieldSchema.class) static class RowData { public final Integer id; public final String name; public final DateTime specialDate; @SchemaCreate public RowData(Integer id, String name, DateTime specialDate) { this.id = id; this.name = name; this.specialDate = specialDate; } } static class MapRowDataFn extends SimpleFunction<Long, RowData> { private static final Distribution intDist = Metrics.distribution(MapRowDataFn.class, "intsDistribution"); private static final Distribution millisDist = Metrics.distribution(MapRowDataFn.class, "millisDistribution"); static String randomStr(int seed) { Random rnd = new Random(seed); StringBuilder sb = new StringBuilder(rnd.nextInt(50)); for (int i = 0; i < sb.capacity(); i++) { int nextChar = rnd.nextInt(); while (!Character.isBmpCodePoint(nextChar)) { nextChar = rnd.nextInt(); } sb.append(Character.toChars(nextChar)[0]); } return sb.toString(); } @Override public RowData apply(Long input) { Random rnd = new Random(input); int millisOffset = rnd.nextInt(); millisOffset = millisOffset < 0 ? -millisOffset : millisOffset; int id = rnd.nextInt(); MapRowDataFn.intDist.update(id); MapRowDataFn.millisDist.update(millisOffset); return new RowData( id, randomStr(rnd.nextInt()), new DateTime(Instant.EPOCH.plus(Duration.millis(millisOffset)))); } } static class RowDataMapper implements RowMapper<RowData> { @Override public RowData mapRow(ResultSet resultSet) throws Exception { return new RowData( resultSet.getInt(1), resultSet.getString(2), new DateTime(resultSet.getTimestamp(3))); } } @Test public void testAutomaticDateTimePartitioning() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, DateTime>readWithPartitions(TypeDescriptor.of(DateTime.class)) .withPartitionColumn("specialDate") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withLowerBound(new DateTime(0)) .withUpperBound(DateTime.now()) .withNumPartitions(10) .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test public void testAutomaticLongPartitioning() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, Long>readWithPartitions(TypeDescriptors.longs()) .withPartitionColumn("id") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withLowerBound(Long.MIN_VALUE) .withUpperBound(Long.MAX_VALUE) .withNumPartitions(10) .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test @Ignore("BEAM-13846") public void testAutomaticStringPartitioning() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, String>readWithPartitions(TypeDescriptors.strings()) .withPartitionColumn("name") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withLowerBound("") .withUpperBound("999999") .withNumPartitions(5) .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test public void testAutomaticDateTimePartitioningAutomaticRangeManagement() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, DateTime>readWithPartitions(TypeDescriptor.of(DateTime.class)) .withPartitionColumn("specialDate") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withNumPartitions(10) .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test public void testAutomaticLongPartitioningAutomaticRangeManagement() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, Long>readWithPartitions(TypeDescriptors.longs()) .withPartitionColumn("id") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withNumPartitions(10) .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test @Ignore("BEAM-13846") public void testAutomaticStringPartitioningAutomaticRangeManagement() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, String>readWithPartitions(TypeDescriptors.strings()) .withPartitionColumn("name") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withNumPartitions(5) .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test public void testAutomaticLongPartitioningAutomaticPartitionManagement() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData>readWithPartitions() .withPartitionColumn("id") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test @Ignore("BEAM-13846") public void testAutomaticStringPartitioningAutomaticPartitionManagement() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, String>readWithPartitions(TypeDescriptors.strings()) .withPartitionColumn("name") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test public void testAutomaticDateTimePartitioningAutomaticPartitionManagement() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, DateTime>readWithPartitions(TypeDescriptor.of(DateTime.class)) .withPartitionColumn("specialDate") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test public void testAutomaticDateTimePartitioningAutomaticPartitionManagementAndBeamRows() throws SQLException { final String dbmsLocal = dbms; PCollection<Row> databaseData = pipelineRead.apply( JdbcIO.<Row, DateTime>readWithPartitions(TypeDescriptor.of(DateTime.class)) .withPartitionColumn("specialDate") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withRowOutput()); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } }
class JdbcIOAutoPartitioningIT { private static final Logger LOG = LoggerFactory.getLogger(JdbcIOAutoPartitioningIT.class); public static final Integer NUM_ROWS = 1_000; public static final String TABLE_NAME = "baseTable"; @ClassRule public static TestPipeline pipelineWrite = TestPipeline.create(); @Rule public TestPipeline pipelineRead = TestPipeline.create(); @Parameterized.Parameters(name = "{0}") public static Iterable<String> params() { return Lists.newArrayList("mysql", "postgres"); } @Parameterized.Parameter(0) public String dbms; public static JdbcDatabaseContainer<?> getDb(String dbName) { if (dbName.equals("mysql")) { return mysql; } else { return postgres; } } @Rule public TestRule retryRule = new TestRule() { public final int maxRetries = 2; @Override public Statement apply(Statement base, Description description) { return new Statement() { @Override public void evaluate() throws Throwable { Throwable caughtThrowable = null; for (int i = 0; i < maxRetries; i++) { try { pipelineRead.apply(base, description); base.evaluate(); return; } catch (Throwable t) { caughtThrowable = t; System.err.println( description.getDisplayName() + ": run " + (i + 1) + " failed."); } } System.err.println( description.getDisplayName() + ": Giving up after " + maxRetries + " failures."); throw Objects.requireNonNull(caughtThrowable); } }; } }; @ClassRule public static JdbcDatabaseContainer<?> mysql = new MySQLContainer<>("mysql"); @ClassRule public static JdbcDatabaseContainer<?> postgres = new PostgreSQLContainer<>("postgres"); @Before @DefaultSchema(JavaFieldSchema.class) static class RowData { public final Integer id; public final String name; public final DateTime specialDate; @SchemaCreate public RowData(Integer id, String name, DateTime specialDate) { this.id = id; this.name = name; this.specialDate = specialDate; } } static class MapRowDataFn extends SimpleFunction<Long, RowData> { private static final Distribution intDist = Metrics.distribution(MapRowDataFn.class, "intsDistribution"); private static final Distribution millisDist = Metrics.distribution(MapRowDataFn.class, "millisDistribution"); static String randomStr(int seed) { Random rnd = new Random(seed); StringBuilder sb = new StringBuilder(rnd.nextInt(50)); for (int i = 0; i < sb.capacity(); i++) { int nextChar = rnd.nextInt(); while (!Character.isBmpCodePoint(nextChar)) { nextChar = rnd.nextInt(); } sb.append(Character.toChars(nextChar)[0]); } return sb.toString(); } @Override public RowData apply(Long input) { Random rnd = new Random(input); int millisOffset = rnd.nextInt(); millisOffset = millisOffset < 0 ? -millisOffset : millisOffset; int id = rnd.nextInt(); MapRowDataFn.intDist.update(id); MapRowDataFn.millisDist.update(millisOffset); return new RowData( id, randomStr(rnd.nextInt()), new DateTime(Instant.EPOCH.plus(Duration.millis(millisOffset)))); } } static class RowDataMapper implements RowMapper<RowData> { @Override public RowData mapRow(ResultSet resultSet) throws Exception { return new RowData( resultSet.getInt(1), resultSet.getString(2), new DateTime(resultSet.getTimestamp(3))); } } @Test public void testAutomaticDateTimePartitioning() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, DateTime>readWithPartitions(TypeDescriptor.of(DateTime.class)) .withPartitionColumn("specialDate") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withLowerBound(new DateTime(0)) .withUpperBound(DateTime.now()) .withNumPartitions(10) .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test public void testAutomaticLongPartitioning() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, Long>readWithPartitions(TypeDescriptors.longs()) .withPartitionColumn("id") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withLowerBound(Long.MIN_VALUE) .withUpperBound(Long.MAX_VALUE) .withNumPartitions(10) .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test @Ignore("BEAM-13846") public void testAutomaticStringPartitioning() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, String>readWithPartitions(TypeDescriptors.strings()) .withPartitionColumn("name") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withLowerBound("") .withUpperBound("999999") .withNumPartitions(5) .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test public void testAutomaticDateTimePartitioningAutomaticRangeManagement() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, DateTime>readWithPartitions(TypeDescriptor.of(DateTime.class)) .withPartitionColumn("specialDate") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withNumPartitions(10) .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test public void testAutomaticLongPartitioningAutomaticRangeManagement() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, Long>readWithPartitions(TypeDescriptors.longs()) .withPartitionColumn("id") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withNumPartitions(10) .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test @Ignore("BEAM-13846") public void testAutomaticStringPartitioningAutomaticRangeManagement() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, String>readWithPartitions(TypeDescriptors.strings()) .withPartitionColumn("name") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withNumPartitions(5) .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test public void testAutomaticLongPartitioningAutomaticPartitionManagement() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData>readWithPartitions() .withPartitionColumn("id") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test @Ignore("BEAM-13846") public void testAutomaticStringPartitioningAutomaticPartitionManagement() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, String>readWithPartitions(TypeDescriptors.strings()) .withPartitionColumn("name") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test public void testAutomaticDateTimePartitioningAutomaticPartitionManagement() throws SQLException { final String dbmsLocal = dbms; PCollection<RowData> databaseData = pipelineRead.apply( JdbcIO.<RowData, DateTime>readWithPartitions(TypeDescriptor.of(DateTime.class)) .withPartitionColumn("specialDate") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withRowMapper(new RowDataMapper())); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } @Test public void testAutomaticDateTimePartitioningAutomaticPartitionManagementAndBeamRows() throws SQLException { final String dbmsLocal = dbms; PCollection<Row> databaseData = pipelineRead.apply( JdbcIO.<Row, DateTime>readWithPartitions(TypeDescriptor.of(DateTime.class)) .withPartitionColumn("specialDate") .withDataSourceProviderFn( voide -> DatabaseTestHelper.getDataSourceForContainer(getDb(dbmsLocal))) .withTable("baseTable") .withRowOutput()); PAssert.that(databaseData.apply(Count.globally())).containsInAnyOrder(NUM_ROWS.longValue()); pipelineRead.run().waitUntilFinish(); } }
The strange issue was related to HTTP/2 and solved by `executeBlocking` https://github.com/quarkusio/quarkus/issues/34912
public static Uni<HttpResponse<Buffer>> sendRequest(Vertx vertx, HttpRequest<Buffer> request, boolean blockingDnsLookup) { if (blockingDnsLookup) { return vertx.executeBlocking(new Callable<Void>() { @Override public Void call() { try { InetAddress.getByName(request.host()); } catch (UnknownHostException e) { throw new RuntimeException(e); } return null; } }).flatMap(new Function<Void, Uni<? extends HttpResponse<Buffer>>>() { @Override public Uni<? extends HttpResponse<Buffer>> apply(Void unused) { return request.send(); } }); } else { return request.send(); } }
return vertx.executeBlocking(new Callable<Void>() {
Override public Void call() { try { InetAddress.getByName(request.host()); } catch (UnknownHostException e) { throw new RuntimeException(e); } return null; }
class OidcCommonUtils { public static final Duration CONNECTION_BACKOFF_DURATION = Duration.ofSeconds(2); static final byte AMP = '&'; static final byte EQ = '='; static final String HTTP_SCHEME = "http"; private static final Logger LOG = Logger.getLogger(OidcCommonUtils.class); private OidcCommonUtils() { } public static void verifyEndpointUrl(String endpointUrl) { try { URI.create(endpointUrl).toURL(); } catch (Throwable ex) { throw new ConfigurationException( String.format("'%s' is invalid", endpointUrl), ex); } } public static void verifyCommonConfiguration(OidcCommonConfig oidcConfig, boolean clientIdOptional, boolean isServerConfig) { final String configPrefix = isServerConfig ? "quarkus.oidc." : "quarkus.oidc-client."; if (!clientIdOptional && !oidcConfig.getClientId().isPresent()) { throw new ConfigurationException( String.format("'%sclient-id' property must be configured", configPrefix)); } Credentials creds = oidcConfig.getCredentials(); if (creds.secret.isPresent() && creds.clientSecret.value.isPresent()) { throw new ConfigurationException( String.format( "'%1$scredentials.secret' and '%1$scredentials.client-secret' properties are mutually exclusive", configPrefix)); } if ((creds.secret.isPresent() || creds.clientSecret.value.isPresent()) && creds.jwt.secret.isPresent()) { throw new ConfigurationException( String.format( "Use only '%1$scredentials.secret' or '%1$scredentials.client-secret' or '%1$scredentials.jwt.secret' property", configPrefix)); } } public static String prependSlash(String path) { return !path.startsWith("/") ? "/" + path : path; } public static Buffer encodeForm(MultiMap form) { Buffer buffer = Buffer.buffer(); for (Map.Entry<String, String> entry : form) { if (buffer.length() != 0) { buffer.appendByte(AMP); } buffer.appendString(entry.getKey()); buffer.appendByte(EQ); buffer.appendString(urlEncode(entry.getValue())); } return buffer; } public static String urlEncode(String value) { try { return URLEncoder.encode(value, StandardCharsets.UTF_8.name()); } catch (Exception ex) { throw new RuntimeException(ex); } } public static void setHttpClientOptions(OidcCommonConfig oidcConfig, TlsConfig tlsConfig, HttpClientOptions options) { boolean trustAll = oidcConfig.tls.verification.isPresent() ? oidcConfig.tls.verification.get() == Verification.NONE : tlsConfig.trustAll; if (trustAll) { options.setTrustAll(true); options.setVerifyHost(false); } else if (oidcConfig.tls.trustStoreFile.isPresent()) { try { byte[] trustStoreData = getFileContent(oidcConfig.tls.trustStoreFile.get()); io.vertx.core.net.KeyStoreOptions trustStoreOptions = new KeyStoreOptions() .setPassword(oidcConfig.tls.getTrustStorePassword().orElse("password")) .setAlias(oidcConfig.tls.getTrustStoreCertAlias().orElse(null)) .setValue(io.vertx.core.buffer.Buffer.buffer(trustStoreData)) .setType(getKeyStoreType(oidcConfig.tls.trustStoreFileType, oidcConfig.tls.trustStoreFile.get())) .setProvider(oidcConfig.tls.trustStoreProvider.orElse(null)); options.setTrustOptions(trustStoreOptions); if (Verification.CERTIFICATE_VALIDATION == oidcConfig.tls.verification.orElse(Verification.REQUIRED)) { options.setVerifyHost(false); } } catch (IOException ex) { throw new ConfigurationException(String.format( "OIDC truststore file does not exist or can not be read", oidcConfig.tls.trustStoreFile.get().toString()), ex); } } if (oidcConfig.tls.keyStoreFile.isPresent()) { try { byte[] keyStoreData = getFileContent(oidcConfig.tls.keyStoreFile.get()); io.vertx.core.net.KeyStoreOptions keyStoreOptions = new KeyStoreOptions() .setAlias(oidcConfig.tls.keyStoreKeyAlias.orElse(null)) .setAliasPassword(oidcConfig.tls.keyStoreKeyPassword.orElse(null)) .setValue(io.vertx.core.buffer.Buffer.buffer(keyStoreData)) .setType(getKeyStoreType(oidcConfig.tls.keyStoreFileType, oidcConfig.tls.keyStoreFile.get())) .setProvider(oidcConfig.tls.keyStoreProvider.orElse(null)); if (oidcConfig.tls.keyStorePassword.isPresent()) { keyStoreOptions.setPassword(oidcConfig.tls.keyStorePassword.get()); } options.setKeyCertOptions(keyStoreOptions); } catch (IOException ex) { throw new ConfigurationException(String.format( "OIDC keystore file does not exist or can not be read", oidcConfig.tls.keyStoreFile.get().toString()), ex); } } Optional<ProxyOptions> proxyOpt = toProxyOptions(oidcConfig.getProxy()); if (proxyOpt.isPresent()) { options.setProxyOptions(proxyOpt.get()); } OptionalInt maxPoolSize = oidcConfig.maxPoolSize; if (maxPoolSize.isPresent()) { options.setMaxPoolSize(maxPoolSize.getAsInt()); } options.setConnectTimeout((int) oidcConfig.getConnectionTimeout().toMillis()); } public static String getKeyStoreType(Optional<String> fileType, Path storePath) { if (fileType.isPresent()) { return fileType.get().toUpperCase(); } final String pathName = storePath.toString(); if (pathName.endsWith(".p12") || pathName.endsWith(".pkcs12") || pathName.endsWith(".pfx")) { return "PKCS12"; } else { return "JKS"; } } public static String getAuthServerUrl(OidcCommonConfig oidcConfig) { return removeLastPathSeparator(oidcConfig.getAuthServerUrl().get()); } private static String removeLastPathSeparator(String value) { return value.endsWith("/") ? value.substring(0, value.length() - 1) : value; } public static String getOidcEndpointUrl(String authServerUrl, Optional<String> endpointPath) { if (endpointPath != null && endpointPath.isPresent()) { return isAbsoluteUrl(endpointPath) ? endpointPath.get() : authServerUrl + prependSlash(endpointPath.get()); } else { return null; } } public static boolean isAbsoluteUrl(Optional<String> endpointUrl) { return endpointUrl.isPresent() && endpointUrl.get().startsWith(HTTP_SCHEME); } private static long getConnectionDelay(OidcCommonConfig oidcConfig) { return oidcConfig.getConnectionDelay().isPresent() ? oidcConfig.getConnectionDelay().get().getSeconds() : 0; } public static long getConnectionDelayInMillis(OidcCommonConfig oidcConfig) { final long connectionDelayInSecs = getConnectionDelay(oidcConfig); final long connectionRetryCount = connectionDelayInSecs > 1 ? connectionDelayInSecs / 2 : 1; if (connectionRetryCount > 1) { LOG.infof("Connecting to OpenId Connect Provider for up to %d times every 2 seconds", connectionRetryCount); } return connectionDelayInSecs * 1000; } public static Optional<ProxyOptions> toProxyOptions(OidcCommonConfig.Proxy proxyConfig) { if (!proxyConfig.host.isPresent()) { return Optional.empty(); } JsonObject jsonOptions = new JsonObject(); String host = URI.create(proxyConfig.host.get()).getHost(); if (host == null) { host = proxyConfig.host.get(); } jsonOptions.put("host", host); jsonOptions.put("port", proxyConfig.port); if (proxyConfig.username.isPresent()) { jsonOptions.put("username", proxyConfig.username.get()); } if (proxyConfig.password.isPresent()) { jsonOptions.put("password", proxyConfig.password.get()); } return Optional.of(new ProxyOptions(jsonOptions)); } public static String formatConnectionErrorMessage(String authServerUrlString) { return String.format("OIDC server is not available at the '%s' URL. " + "Please make sure it is correct. Note it has to end with a realm value if you work with Keycloak, for example:" + " 'https: } public static boolean isClientSecretBasicAuthRequired(Credentials creds) { return creds.secret.isPresent() || ((creds.clientSecret.value.isPresent() || creds.clientSecret.provider.key.isPresent()) && clientSecretMethod(creds) == Secret.Method.BASIC); } public static boolean isClientJwtAuthRequired(Credentials creds) { return creds.jwt.secret.isPresent() || creds.jwt.secretProvider.key.isPresent() || creds.jwt.keyFile.isPresent() || creds.jwt.keyStoreFile.isPresent(); } public static boolean isClientSecretPostAuthRequired(Credentials creds) { return (creds.clientSecret.value.isPresent() || creds.clientSecret.provider.key.isPresent()) && clientSecretMethod(creds) == Secret.Method.POST; } public static boolean isClientSecretPostJwtAuthRequired(Credentials creds) { return clientSecretMethod(creds) == Secret.Method.POST_JWT && isClientJwtAuthRequired(creds); } public static String clientSecret(Credentials creds) { return creds.secret.orElse(creds.clientSecret.value.orElseGet(fromCredentialsProvider(creds.clientSecret.provider))); } public static String jwtSecret(Credentials creds) { return creds.jwt.secret.orElseGet(fromCredentialsProvider(creds.jwt.secretProvider)); } public static Secret.Method clientSecretMethod(Credentials creds) { return creds.clientSecret.method.orElseGet(() -> Secret.Method.BASIC); } private static Supplier<? extends String> fromCredentialsProvider(Provider provider) { return new Supplier<String>() { @Override public String get() { if (provider.key.isPresent()) { String providerName = provider.name.orElse(null); CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(providerName); if (credentialsProvider != null) { return credentialsProvider.getCredentials(providerName).get(provider.key.get()); } } return null; } }; } public static Key clientJwtKey(Credentials creds) { if (creds.jwt.secret.isPresent() || creds.jwt.secretProvider.key.isPresent()) { return KeyUtils .createSecretKeyFromSecret(jwtSecret(creds)); } else { Key key = null; try { if (creds.jwt.getKeyFile().isPresent()) { key = KeyUtils.readSigningKey(creds.jwt.getKeyFile().get(), creds.jwt.keyId.orElse(null), getSignatureAlgorithm(creds, SignatureAlgorithm.RS256)); } else if (creds.jwt.keyStoreFile.isPresent()) { KeyStore ks = KeyStore.getInstance("JKS"); InputStream is = ResourceUtils.getResourceStream(creds.jwt.keyStoreFile.get()); if (creds.jwt.keyStorePassword.isPresent()) { ks.load(is, creds.jwt.keyStorePassword.get().toCharArray()); } else { ks.load(is, null); } if (creds.jwt.keyPassword.isPresent()) { key = ks.getKey(creds.jwt.keyId.get(), creds.jwt.keyPassword.get().toCharArray()); } else { throw new ConfigurationException( "When using a key store, the `quarkus.oidc-client.credentials.jwt.key-password` property must be set"); } } } catch (Exception ex) { throw new ConfigurationException("Key can not be loaded", ex); } if (key == null) { throw new ConfigurationException("Key is null"); } return key; } } public static String signJwtWithKey(OidcCommonConfig oidcConfig, String tokenRequestUri, Key key) { JwtSignatureBuilder builder = Jwt .claims(additionalClaims(oidcConfig.credentials.jwt.getClaims())) .issuer(oidcConfig.credentials.jwt.issuer.orElse(oidcConfig.clientId.get())) .subject(oidcConfig.credentials.jwt.subject.orElse(oidcConfig.clientId.get())) .audience(oidcConfig.credentials.jwt.getAudience().isPresent() ? removeLastPathSeparator(oidcConfig.credentials.jwt.getAudience().get()) : tokenRequestUri) .expiresIn(oidcConfig.credentials.jwt.lifespan) .jws(); if (oidcConfig.credentials.jwt.getTokenKeyId().isPresent()) { builder.keyId(oidcConfig.credentials.jwt.getTokenKeyId().get()); } SignatureAlgorithm signatureAlgorithm = getSignatureAlgorithm(oidcConfig.credentials, null); if (signatureAlgorithm != null) { builder.algorithm(signatureAlgorithm); } if (key instanceof SecretKey) { return builder.sign((SecretKey) key); } else { return builder.sign((PrivateKey) key); } } @SuppressWarnings({ "unchecked", "rawtypes" }) private static Map<String, Object> additionalClaims(Map<String, String> claims) { return (Map) claims; } private static SignatureAlgorithm getSignatureAlgorithm(Credentials credentials, SignatureAlgorithm defaultAlgorithm) { if (credentials.jwt.getSignatureAlgorithm().isPresent()) { try { return SignatureAlgorithm.fromAlgorithm(credentials.jwt.getSignatureAlgorithm().get()); } catch (Exception ex) { throw new ConfigurationException("Unsupported signature algorithm"); } } else { return defaultAlgorithm; } } public static void verifyConfigurationId(String defaultId, String configKey, Optional<String> configId) { if (configKey.equals(defaultId)) { throw new ConfigurationException("configuration id '" + configKey + "' duplicates the default configuration id"); } if (configId.isPresent() && !configKey.equals(configId.get())) { throw new ConfigurationException("Configuration has 2 different id values: '" + configKey + "' and '" + configId.get() + "'"); } } public static String initClientSecretBasicAuth(OidcCommonConfig oidcConfig) { if (isClientSecretBasicAuthRequired(oidcConfig.credentials)) { return basicSchemeValue(oidcConfig.getClientId().get(), clientSecret(oidcConfig.credentials)); } return null; } public static String basicSchemeValue(String name, String secret) { return OidcConstants.BASIC_SCHEME + " " + Base64.getEncoder().encodeToString((name + ":" + secret).getBytes(StandardCharsets.UTF_8)); } public static Key initClientJwtKey(OidcCommonConfig oidcConfig) { if (isClientJwtAuthRequired(oidcConfig.credentials)) { return clientJwtKey(oidcConfig.credentials); } return null; } public static Predicate<? super Throwable> oidcEndpointNotAvailable() { return t -> (t instanceof ConnectException || (t instanceof OidcEndpointAccessException && ((OidcEndpointAccessException) t).getErrorStatus() == 404)); } public static Uni<JsonObject> discoverMetadata(WebClient client, Map<OidcEndpoint.Type, List<OidcRequestFilter>> filters, String authServerUrl, long connectionDelayInMillisecs, Vertx vertx, boolean blockingDnsLookup) { final String discoveryUrl = getDiscoveryUri(authServerUrl); HttpRequest<Buffer> request = client.getAbs(discoveryUrl); if (!filters.isEmpty()) { OidcRequestContextProperties requestProps = new OidcRequestContextProperties( Map.of(OidcRequestContextProperties.DISCOVERY_ENDPOINT, discoveryUrl)); for (OidcRequestFilter filter : getMatchingOidcRequestFilters(filters, OidcEndpoint.Type.DISCOVERY)) { filter.filter(request, null, requestProps); } } return sendRequest(vertx, request, blockingDnsLookup).onItem().transform(resp -> { if (resp.statusCode() == 200) { return resp.bodyAsJsonObject(); } else { String errorMessage = resp.bodyAsString(); if (errorMessage != null && !errorMessage.isEmpty()) { LOG.warnf("Discovery request %s has failed, status code: %d, error message: %s", discoveryUrl, resp.statusCode(), errorMessage); } else { LOG.warnf("Discovery request %s has failed, status code: %d", discoveryUrl, resp.statusCode()); } throw new OidcEndpointAccessException(resp.statusCode()); } }).onFailure(oidcEndpointNotAvailable()) .retry() .withBackOff(CONNECTION_BACKOFF_DURATION, CONNECTION_BACKOFF_DURATION) .expireIn(connectionDelayInMillisecs) .onFailure().transform(t -> { LOG.warn("OIDC Server is not available:", t.getCause() != null ? t.getCause() : t); return new RuntimeException("OIDC Server is not available"); }); } public static String getDiscoveryUri(String authServerUrl) { return authServerUrl + OidcConstants.WELL_KNOWN_CONFIGURATION; } private static byte[] getFileContent(Path path) throws IOException { byte[] data; final InputStream resource = Thread.currentThread().getContextClassLoader() .getResourceAsStream(ClassPathUtils.toResourceName(path)); if (resource != null) { try (InputStream is = resource) { data = doRead(is); } } else { try (InputStream is = Files.newInputStream(path)) { data = doRead(is); } } return data; } private static byte[] doRead(InputStream is) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buf = new byte[1024]; int r; while ((r = is.read(buf)) > 0) { out.write(buf, 0, r); } return out.toByteArray(); } public static Map<OidcEndpoint.Type, List<OidcRequestFilter>> getOidcRequestFilters() { ArcContainer container = Arc.container(); if (container != null) { Map<OidcEndpoint.Type, List<OidcRequestFilter>> map = new HashMap<>(); for (OidcRequestFilter filter : container.listAll(OidcRequestFilter.class).stream().map(handle -> handle.get()) .collect(Collectors.toList())) { OidcEndpoint endpoint = ClientProxy.unwrap(filter).getClass().getAnnotation(OidcEndpoint.class); if (endpoint != null) { for (OidcEndpoint.Type type : endpoint.value()) { map.computeIfAbsent(type, k -> new ArrayList<OidcRequestFilter>()).add(filter); } } else { map.computeIfAbsent(OidcEndpoint.Type.ALL, k -> new ArrayList<OidcRequestFilter>()).add(filter); } } return map; } return Map.of(); } public static List<OidcRequestFilter> getMatchingOidcRequestFilters(Map<OidcEndpoint.Type, List<OidcRequestFilter>> filters, OidcEndpoint.Type type) { List<OidcRequestFilter> typeSpecific = filters.get(type); List<OidcRequestFilter> all = filters.get(OidcEndpoint.Type.ALL); if (typeSpecific == null && all == null) { return List.of(); } if (typeSpecific != null && all == null) { return typeSpecific; } else if (typeSpecific == null && all != null) { return all; } else { List<OidcRequestFilter> combined = new ArrayList<>(typeSpecific.size() + all.size()); combined.addAll(typeSpecific); combined.addAll(all); return combined; } } public static Uni<HttpResponse<Buffer>> sendRequest(io.vertx.core.Vertx vertx, HttpRequest<Buffer> request, boolean blockingDnsLookup) { if (blockingDnsLookup) { return sendRequest(new Vertx(vertx), request, true); } else { return request.send(); } } public static Uni<HttpResponse<Buffer>> sendRequest(Vertx vertx, HttpRequest<Buffer> request, boolean blockingDnsLookup) { if (blockingDnsLookup) { return vertx.executeBlocking(new Callable<Void>() { @ }).flatMap(new Function<Void, Uni<? extends HttpResponse<Buffer>>>() { @Override public Uni<? extends HttpResponse<Buffer>> apply(Void unused) { return request.send(); } }); } else { return request.send(); } } }
class OidcCommonUtils { public static final Duration CONNECTION_BACKOFF_DURATION = Duration.ofSeconds(2); static final byte AMP = '&'; static final byte EQ = '='; static final String HTTP_SCHEME = "http"; private static final Logger LOG = Logger.getLogger(OidcCommonUtils.class); private OidcCommonUtils() { } public static void verifyEndpointUrl(String endpointUrl) { try { URI.create(endpointUrl).toURL(); } catch (Throwable ex) { throw new ConfigurationException( String.format("'%s' is invalid", endpointUrl), ex); } } public static void verifyCommonConfiguration(OidcCommonConfig oidcConfig, boolean clientIdOptional, boolean isServerConfig) { final String configPrefix = isServerConfig ? "quarkus.oidc." : "quarkus.oidc-client."; if (!clientIdOptional && !oidcConfig.getClientId().isPresent()) { throw new ConfigurationException( String.format("'%sclient-id' property must be configured", configPrefix)); } Credentials creds = oidcConfig.getCredentials(); if (creds.secret.isPresent() && creds.clientSecret.value.isPresent()) { throw new ConfigurationException( String.format( "'%1$scredentials.secret' and '%1$scredentials.client-secret' properties are mutually exclusive", configPrefix)); } if ((creds.secret.isPresent() || creds.clientSecret.value.isPresent()) && creds.jwt.secret.isPresent()) { throw new ConfigurationException( String.format( "Use only '%1$scredentials.secret' or '%1$scredentials.client-secret' or '%1$scredentials.jwt.secret' property", configPrefix)); } } public static String prependSlash(String path) { return !path.startsWith("/") ? "/" + path : path; } public static Buffer encodeForm(MultiMap form) { Buffer buffer = Buffer.buffer(); for (Map.Entry<String, String> entry : form) { if (buffer.length() != 0) { buffer.appendByte(AMP); } buffer.appendString(entry.getKey()); buffer.appendByte(EQ); buffer.appendString(urlEncode(entry.getValue())); } return buffer; } public static String urlEncode(String value) { try { return URLEncoder.encode(value, StandardCharsets.UTF_8.name()); } catch (Exception ex) { throw new RuntimeException(ex); } } public static void setHttpClientOptions(OidcCommonConfig oidcConfig, TlsConfig tlsConfig, HttpClientOptions options) { boolean trustAll = oidcConfig.tls.verification.isPresent() ? oidcConfig.tls.verification.get() == Verification.NONE : tlsConfig.trustAll; if (trustAll) { options.setTrustAll(true); options.setVerifyHost(false); } else if (oidcConfig.tls.trustStoreFile.isPresent()) { try { byte[] trustStoreData = getFileContent(oidcConfig.tls.trustStoreFile.get()); io.vertx.core.net.KeyStoreOptions trustStoreOptions = new KeyStoreOptions() .setPassword(oidcConfig.tls.getTrustStorePassword().orElse("password")) .setAlias(oidcConfig.tls.getTrustStoreCertAlias().orElse(null)) .setValue(io.vertx.core.buffer.Buffer.buffer(trustStoreData)) .setType(getKeyStoreType(oidcConfig.tls.trustStoreFileType, oidcConfig.tls.trustStoreFile.get())) .setProvider(oidcConfig.tls.trustStoreProvider.orElse(null)); options.setTrustOptions(trustStoreOptions); if (Verification.CERTIFICATE_VALIDATION == oidcConfig.tls.verification.orElse(Verification.REQUIRED)) { options.setVerifyHost(false); } } catch (IOException ex) { throw new ConfigurationException(String.format( "OIDC truststore file does not exist or can not be read", oidcConfig.tls.trustStoreFile.get().toString()), ex); } } if (oidcConfig.tls.keyStoreFile.isPresent()) { try { byte[] keyStoreData = getFileContent(oidcConfig.tls.keyStoreFile.get()); io.vertx.core.net.KeyStoreOptions keyStoreOptions = new KeyStoreOptions() .setAlias(oidcConfig.tls.keyStoreKeyAlias.orElse(null)) .setAliasPassword(oidcConfig.tls.keyStoreKeyPassword.orElse(null)) .setValue(io.vertx.core.buffer.Buffer.buffer(keyStoreData)) .setType(getKeyStoreType(oidcConfig.tls.keyStoreFileType, oidcConfig.tls.keyStoreFile.get())) .setProvider(oidcConfig.tls.keyStoreProvider.orElse(null)); if (oidcConfig.tls.keyStorePassword.isPresent()) { keyStoreOptions.setPassword(oidcConfig.tls.keyStorePassword.get()); } options.setKeyCertOptions(keyStoreOptions); } catch (IOException ex) { throw new ConfigurationException(String.format( "OIDC keystore file does not exist or can not be read", oidcConfig.tls.keyStoreFile.get().toString()), ex); } } Optional<ProxyOptions> proxyOpt = toProxyOptions(oidcConfig.getProxy()); if (proxyOpt.isPresent()) { options.setProxyOptions(proxyOpt.get()); } OptionalInt maxPoolSize = oidcConfig.maxPoolSize; if (maxPoolSize.isPresent()) { options.setMaxPoolSize(maxPoolSize.getAsInt()); } options.setConnectTimeout((int) oidcConfig.getConnectionTimeout().toMillis()); } public static String getKeyStoreType(Optional<String> fileType, Path storePath) { if (fileType.isPresent()) { return fileType.get().toUpperCase(); } final String pathName = storePath.toString(); if (pathName.endsWith(".p12") || pathName.endsWith(".pkcs12") || pathName.endsWith(".pfx")) { return "PKCS12"; } else { return "JKS"; } } public static String getAuthServerUrl(OidcCommonConfig oidcConfig) { return removeLastPathSeparator(oidcConfig.getAuthServerUrl().get()); } private static String removeLastPathSeparator(String value) { return value.endsWith("/") ? value.substring(0, value.length() - 1) : value; } public static String getOidcEndpointUrl(String authServerUrl, Optional<String> endpointPath) { if (endpointPath != null && endpointPath.isPresent()) { return isAbsoluteUrl(endpointPath) ? endpointPath.get() : authServerUrl + prependSlash(endpointPath.get()); } else { return null; } } public static boolean isAbsoluteUrl(Optional<String> endpointUrl) { return endpointUrl.isPresent() && endpointUrl.get().startsWith(HTTP_SCHEME); } private static long getConnectionDelay(OidcCommonConfig oidcConfig) { return oidcConfig.getConnectionDelay().isPresent() ? oidcConfig.getConnectionDelay().get().getSeconds() : 0; } public static long getConnectionDelayInMillis(OidcCommonConfig oidcConfig) { final long connectionDelayInSecs = getConnectionDelay(oidcConfig); final long connectionRetryCount = connectionDelayInSecs > 1 ? connectionDelayInSecs / 2 : 1; if (connectionRetryCount > 1) { LOG.infof("Connecting to OpenId Connect Provider for up to %d times every 2 seconds", connectionRetryCount); } return connectionDelayInSecs * 1000; } public static Optional<ProxyOptions> toProxyOptions(OidcCommonConfig.Proxy proxyConfig) { if (!proxyConfig.host.isPresent()) { return Optional.empty(); } JsonObject jsonOptions = new JsonObject(); String host = URI.create(proxyConfig.host.get()).getHost(); if (host == null) { host = proxyConfig.host.get(); } jsonOptions.put("host", host); jsonOptions.put("port", proxyConfig.port); if (proxyConfig.username.isPresent()) { jsonOptions.put("username", proxyConfig.username.get()); } if (proxyConfig.password.isPresent()) { jsonOptions.put("password", proxyConfig.password.get()); } return Optional.of(new ProxyOptions(jsonOptions)); } public static String formatConnectionErrorMessage(String authServerUrlString) { return String.format("OIDC server is not available at the '%s' URL. " + "Please make sure it is correct. Note it has to end with a realm value if you work with Keycloak, for example:" + " 'https: } public static boolean isClientSecretBasicAuthRequired(Credentials creds) { return creds.secret.isPresent() || ((creds.clientSecret.value.isPresent() || creds.clientSecret.provider.key.isPresent()) && clientSecretMethod(creds) == Secret.Method.BASIC); } public static boolean isClientJwtAuthRequired(Credentials creds) { return creds.jwt.secret.isPresent() || creds.jwt.secretProvider.key.isPresent() || creds.jwt.keyFile.isPresent() || creds.jwt.keyStoreFile.isPresent(); } public static boolean isClientSecretPostAuthRequired(Credentials creds) { return (creds.clientSecret.value.isPresent() || creds.clientSecret.provider.key.isPresent()) && clientSecretMethod(creds) == Secret.Method.POST; } public static boolean isClientSecretPostJwtAuthRequired(Credentials creds) { return clientSecretMethod(creds) == Secret.Method.POST_JWT && isClientJwtAuthRequired(creds); } public static String clientSecret(Credentials creds) { return creds.secret.orElse(creds.clientSecret.value.orElseGet(fromCredentialsProvider(creds.clientSecret.provider))); } public static String jwtSecret(Credentials creds) { return creds.jwt.secret.orElseGet(fromCredentialsProvider(creds.jwt.secretProvider)); } public static Secret.Method clientSecretMethod(Credentials creds) { return creds.clientSecret.method.orElseGet(() -> Secret.Method.BASIC); } private static Supplier<? extends String> fromCredentialsProvider(Provider provider) { return new Supplier<String>() { @Override public String get() { if (provider.key.isPresent()) { String providerName = provider.name.orElse(null); CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(providerName); if (credentialsProvider != null) { return credentialsProvider.getCredentials(providerName).get(provider.key.get()); } } return null; } }; } public static Key clientJwtKey(Credentials creds) { if (creds.jwt.secret.isPresent() || creds.jwt.secretProvider.key.isPresent()) { return KeyUtils .createSecretKeyFromSecret(jwtSecret(creds)); } else { Key key = null; try { if (creds.jwt.getKeyFile().isPresent()) { key = KeyUtils.readSigningKey(creds.jwt.getKeyFile().get(), creds.jwt.keyId.orElse(null), getSignatureAlgorithm(creds, SignatureAlgorithm.RS256)); } else if (creds.jwt.keyStoreFile.isPresent()) { KeyStore ks = KeyStore.getInstance("JKS"); InputStream is = ResourceUtils.getResourceStream(creds.jwt.keyStoreFile.get()); if (creds.jwt.keyStorePassword.isPresent()) { ks.load(is, creds.jwt.keyStorePassword.get().toCharArray()); } else { ks.load(is, null); } if (creds.jwt.keyPassword.isPresent()) { key = ks.getKey(creds.jwt.keyId.get(), creds.jwt.keyPassword.get().toCharArray()); } else { throw new ConfigurationException( "When using a key store, the `quarkus.oidc-client.credentials.jwt.key-password` property must be set"); } } } catch (Exception ex) { throw new ConfigurationException("Key can not be loaded", ex); } if (key == null) { throw new ConfigurationException("Key is null"); } return key; } } public static String signJwtWithKey(OidcCommonConfig oidcConfig, String tokenRequestUri, Key key) { JwtSignatureBuilder builder = Jwt .claims(additionalClaims(oidcConfig.credentials.jwt.getClaims())) .issuer(oidcConfig.credentials.jwt.issuer.orElse(oidcConfig.clientId.get())) .subject(oidcConfig.credentials.jwt.subject.orElse(oidcConfig.clientId.get())) .audience(oidcConfig.credentials.jwt.getAudience().isPresent() ? removeLastPathSeparator(oidcConfig.credentials.jwt.getAudience().get()) : tokenRequestUri) .expiresIn(oidcConfig.credentials.jwt.lifespan) .jws(); if (oidcConfig.credentials.jwt.getTokenKeyId().isPresent()) { builder.keyId(oidcConfig.credentials.jwt.getTokenKeyId().get()); } SignatureAlgorithm signatureAlgorithm = getSignatureAlgorithm(oidcConfig.credentials, null); if (signatureAlgorithm != null) { builder.algorithm(signatureAlgorithm); } if (key instanceof SecretKey) { return builder.sign((SecretKey) key); } else { return builder.sign((PrivateKey) key); } } @SuppressWarnings({ "unchecked", "rawtypes" }) private static Map<String, Object> additionalClaims(Map<String, String> claims) { return (Map) claims; } private static SignatureAlgorithm getSignatureAlgorithm(Credentials credentials, SignatureAlgorithm defaultAlgorithm) { if (credentials.jwt.getSignatureAlgorithm().isPresent()) { try { return SignatureAlgorithm.fromAlgorithm(credentials.jwt.getSignatureAlgorithm().get()); } catch (Exception ex) { throw new ConfigurationException("Unsupported signature algorithm"); } } else { return defaultAlgorithm; } } public static void verifyConfigurationId(String defaultId, String configKey, Optional<String> configId) { if (configKey.equals(defaultId)) { throw new ConfigurationException("configuration id '" + configKey + "' duplicates the default configuration id"); } if (configId.isPresent() && !configKey.equals(configId.get())) { throw new ConfigurationException("Configuration has 2 different id values: '" + configKey + "' and '" + configId.get() + "'"); } } public static String initClientSecretBasicAuth(OidcCommonConfig oidcConfig) { if (isClientSecretBasicAuthRequired(oidcConfig.credentials)) { return basicSchemeValue(oidcConfig.getClientId().get(), clientSecret(oidcConfig.credentials)); } return null; } public static String basicSchemeValue(String name, String secret) { return OidcConstants.BASIC_SCHEME + " " + Base64.getEncoder().encodeToString((name + ":" + secret).getBytes(StandardCharsets.UTF_8)); } public static Key initClientJwtKey(OidcCommonConfig oidcConfig) { if (isClientJwtAuthRequired(oidcConfig.credentials)) { return clientJwtKey(oidcConfig.credentials); } return null; } public static Predicate<? super Throwable> oidcEndpointNotAvailable() { return t -> (t instanceof ConnectException || (t instanceof OidcEndpointAccessException && ((OidcEndpointAccessException) t).getErrorStatus() == 404)); } public static Uni<JsonObject> discoverMetadata(WebClient client, Map<OidcEndpoint.Type, List<OidcRequestFilter>> filters, String authServerUrl, long connectionDelayInMillisecs, Vertx vertx, boolean blockingDnsLookup) { final String discoveryUrl = getDiscoveryUri(authServerUrl); HttpRequest<Buffer> request = client.getAbs(discoveryUrl); if (!filters.isEmpty()) { OidcRequestContextProperties requestProps = new OidcRequestContextProperties( Map.of(OidcRequestContextProperties.DISCOVERY_ENDPOINT, discoveryUrl)); for (OidcRequestFilter filter : getMatchingOidcRequestFilters(filters, OidcEndpoint.Type.DISCOVERY)) { filter.filter(request, null, requestProps); } } return sendRequest(vertx, request, blockingDnsLookup).onItem().transform(resp -> { if (resp.statusCode() == 200) { return resp.bodyAsJsonObject(); } else { String errorMessage = resp.bodyAsString(); if (errorMessage != null && !errorMessage.isEmpty()) { LOG.warnf("Discovery request %s has failed, status code: %d, error message: %s", discoveryUrl, resp.statusCode(), errorMessage); } else { LOG.warnf("Discovery request %s has failed, status code: %d", discoveryUrl, resp.statusCode()); } throw new OidcEndpointAccessException(resp.statusCode()); } }).onFailure(oidcEndpointNotAvailable()) .retry() .withBackOff(CONNECTION_BACKOFF_DURATION, CONNECTION_BACKOFF_DURATION) .expireIn(connectionDelayInMillisecs) .onFailure().transform(t -> { LOG.warn("OIDC Server is not available:", t.getCause() != null ? t.getCause() : t); return new RuntimeException("OIDC Server is not available"); }); } public static String getDiscoveryUri(String authServerUrl) { return authServerUrl + OidcConstants.WELL_KNOWN_CONFIGURATION; } private static byte[] getFileContent(Path path) throws IOException { byte[] data; final InputStream resource = Thread.currentThread().getContextClassLoader() .getResourceAsStream(ClassPathUtils.toResourceName(path)); if (resource != null) { try (InputStream is = resource) { data = doRead(is); } } else { try (InputStream is = Files.newInputStream(path)) { data = doRead(is); } } return data; } private static byte[] doRead(InputStream is) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buf = new byte[1024]; int r; while ((r = is.read(buf)) > 0) { out.write(buf, 0, r); } return out.toByteArray(); } public static Map<OidcEndpoint.Type, List<OidcRequestFilter>> getOidcRequestFilters() { ArcContainer container = Arc.container(); if (container != null) { Map<OidcEndpoint.Type, List<OidcRequestFilter>> map = new HashMap<>(); for (OidcRequestFilter filter : container.listAll(OidcRequestFilter.class).stream().map(handle -> handle.get()) .collect(Collectors.toList())) { OidcEndpoint endpoint = ClientProxy.unwrap(filter).getClass().getAnnotation(OidcEndpoint.class); if (endpoint != null) { for (OidcEndpoint.Type type : endpoint.value()) { map.computeIfAbsent(type, k -> new ArrayList<OidcRequestFilter>()).add(filter); } } else { map.computeIfAbsent(OidcEndpoint.Type.ALL, k -> new ArrayList<OidcRequestFilter>()).add(filter); } } return map; } return Map.of(); } public static List<OidcRequestFilter> getMatchingOidcRequestFilters(Map<OidcEndpoint.Type, List<OidcRequestFilter>> filters, OidcEndpoint.Type type) { List<OidcRequestFilter> typeSpecific = filters.get(type); List<OidcRequestFilter> all = filters.get(OidcEndpoint.Type.ALL); if (typeSpecific == null && all == null) { return List.of(); } if (typeSpecific != null && all == null) { return typeSpecific; } else if (typeSpecific == null && all != null) { return all; } else { List<OidcRequestFilter> combined = new ArrayList<>(typeSpecific.size() + all.size()); combined.addAll(typeSpecific); combined.addAll(all); return combined; } } public static Uni<HttpResponse<Buffer>> sendRequest(io.vertx.core.Vertx vertx, HttpRequest<Buffer> request, boolean blockingDnsLookup) { if (blockingDnsLookup) { return sendRequest(new Vertx(vertx), request, true); } else { return request.send(); } } public static Uni<HttpResponse<Buffer>> sendRequest(Vertx vertx, HttpRequest<Buffer> request, boolean blockingDnsLookup) { if (blockingDnsLookup) { return vertx.executeBlocking(new Callable<Void>() { @ }).flatMap(new Function<Void, Uni<? extends HttpResponse<Buffer>>>() { @Override public Uni<? extends HttpResponse<Buffer>> apply(Void unused) { return request.send(); } }); } else { return request.send(); } } }
I think we can remove the DEFAULT_CLUSTER also. The feature is useless but only has compatibility.
public TGetDBPrivsResult getDBPrivs(TGetDBPrivsParams params) throws TException { LOG.debug("get database privileges request: {}", params); TGetDBPrivsResult result = new TGetDBPrivsResult(); List<TDBPrivDesc> tDBPrivs = Lists.newArrayList(); result.setDb_privs(tDBPrivs); UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident); List<DbPrivEntry> dbPrivEntries = GlobalStateMgr.getCurrentState().getAuth().getDBPrivEntries(currentUser); for (DbPrivEntry entry : dbPrivEntries) { PrivBitSet savedPrivs = entry.getPrivSet(); String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER; String userIdentStr = currentUser.toString().replace(clusterPrefix, ""); String dbName = entry.getOrigDb(); boolean isGrantable = savedPrivs.satisfy(PrivPredicate.GRANT); List<TDBPrivDesc> tPrivs = savedPrivs.toPrivilegeList().stream().map( priv -> { TDBPrivDesc privDesc = new TDBPrivDesc(); privDesc.setDb_name(dbName); privDesc.setIs_grantable(isGrantable); privDesc.setUser_ident_str(userIdentStr); privDesc.setPriv(priv.getUpperNameForMysql()); return privDesc; } ).collect(Collectors.toList()); if (savedPrivs.satisfy(PrivPredicate.LOAD)) { tPrivs.addAll(Lists.newArrayList("INSERT", "UPDATE", "DELETE").stream().map(priv -> { TDBPrivDesc privDesc = new TDBPrivDesc(); privDesc.setDb_name(dbName); privDesc.setIs_grantable(isGrantable); privDesc.setUser_ident_str(userIdentStr); privDesc.setPriv(priv); return privDesc; }).collect(Collectors.toList())); } tDBPrivs.addAll(tPrivs); } return result; }
String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER;
public TGetDBPrivsResult getDBPrivs(TGetDBPrivsParams params) throws TException { LOG.debug("get database privileges request: {}", params); TGetDBPrivsResult result = new TGetDBPrivsResult(); List<TDBPrivDesc> tDBPrivs = Lists.newArrayList(); result.setDb_privs(tDBPrivs); UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident); List<DbPrivEntry> dbPrivEntries = GlobalStateMgr.getCurrentState().getAuth().getDBPrivEntries(currentUser); for (DbPrivEntry entry : dbPrivEntries) { PrivBitSet savedPrivs = entry.getPrivSet(); String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER; String userIdentStr = currentUser.toString().replace(clusterPrefix, ""); String dbName = entry.getOrigDb(); boolean isGrantable = savedPrivs.satisfy(PrivPredicate.GRANT); List<TDBPrivDesc> tPrivs = savedPrivs.toPrivilegeList().stream().map( priv -> { TDBPrivDesc privDesc = new TDBPrivDesc(); privDesc.setDb_name(dbName); privDesc.setIs_grantable(isGrantable); privDesc.setUser_ident_str(userIdentStr); privDesc.setPriv(priv.getUpperNameForMysql()); return privDesc; } ).collect(Collectors.toList()); if (savedPrivs.satisfy(PrivPredicate.LOAD)) { tPrivs.addAll(Lists.newArrayList("INSERT", "UPDATE", "DELETE").stream().map(priv -> { TDBPrivDesc privDesc = new TDBPrivDesc(); privDesc.setDb_name(dbName); privDesc.setIs_grantable(isGrantable); privDesc.setUser_ident_str(userIdentStr); privDesc.setPriv(priv); return privDesc; }).collect(Collectors.toList())); } tDBPrivs.addAll(tPrivs); } return result; }
class FrontendServiceImpl implements FrontendService.Iface { private static final Logger LOG = LogManager.getLogger(LeaderImpl.class); private LeaderImpl leaderImpl; private ExecuteEnv exeEnv; public FrontendServiceImpl(ExecuteEnv exeEnv) { leaderImpl = new LeaderImpl(); this.exeEnv = exeEnv; } @Override public TGetDbsResult getDbNames(TGetDbsParams params) throws TException { LOG.debug("get db request: {}", params); TGetDbsResult result = new TGetDbsResult(); List<String> dbs = Lists.newArrayList(); PatternMatcher matcher = null; if (params.isSetPattern()) { try { matcher = PatternMatcher.createMysqlPattern(params.getPattern(), CaseSensibility.DATABASE.getCaseSensibility()); } catch (AnalysisException e) { throw new TException("Pattern is in bad format: " + params.getPattern()); } } GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); List<String> dbNames = globalStateMgr.getDbNames(); LOG.debug("get db names: {}", dbNames); UserIdentity currentUser = null; if (params.isSetCurrent_user_ident()) { currentUser = UserIdentity.fromThrift(params.current_user_ident); } else { currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip); } for (String fullName : dbNames) { if (!globalStateMgr.getAuth().checkDbPriv(currentUser, fullName, PrivPredicate.SHOW)) { continue; } final String db = ClusterNamespace.getNameFromFullName(fullName); if (matcher != null && !matcher.match(db)) { continue; } dbs.add(fullName); } result.setDbs(dbs); return result; } @Override public TGetTablesResult getTableNames(TGetTablesParams params) throws TException { LOG.debug("get table name request: {}", params); TGetTablesResult result = new TGetTablesResult(); List<String> tablesResult = Lists.newArrayList(); result.setTables(tablesResult); PatternMatcher matcher = null; if (params.isSetPattern()) { try { matcher = PatternMatcher.createMysqlPattern(params.getPattern(), CaseSensibility.TABLE.getCaseSensibility()); } catch (AnalysisException e) { throw new TException("Pattern is in bad format: " + params.getPattern()); } } Database db = GlobalStateMgr.getCurrentState().getDb(params.db); UserIdentity currentUser = null; if (params.isSetCurrent_user_ident()) { currentUser = UserIdentity.fromThrift(params.current_user_ident); } else { currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip); } if (db != null) { for (String tableName : db.getTableNamesWithLock()) { LOG.debug("get table: {}, wait to check", tableName); if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db, tableName, PrivPredicate.SHOW)) { continue; } if (matcher != null && !matcher.match(tableName)) { continue; } tablesResult.add(tableName); } } return result; } @Override public TListTableStatusResult listTableStatus(TGetTablesParams params) throws TException { LOG.debug("get list table request: {}", params); TListTableStatusResult result = new TListTableStatusResult(); List<TTableStatus> tablesResult = Lists.newArrayList(); result.setTables(tablesResult); PatternMatcher matcher = null; if (params.isSetPattern()) { try { matcher = PatternMatcher.createMysqlPattern(params.getPattern(), CaseSensibility.TABLE.getCaseSensibility()); } catch (AnalysisException e) { throw new TException("Pattern is in bad format " + params.getPattern()); } } Database db = GlobalStateMgr.getCurrentState().getDb(params.db); long limit = params.isSetLimit() ? params.getLimit() : -1; UserIdentity currentUser = null; if (params.isSetCurrent_user_ident()) { currentUser = UserIdentity.fromThrift(params.current_user_ident); } else { currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip); } if (params.isSetType() && TTableType.MATERIALIZED_VIEW.equals(params.getType())) { listMaterializedViewStatus(tablesResult, limit, matcher, currentUser, params.db); return result; } if (db != null) { db.readLock(); try { boolean listingViews = params.isSetType() && TTableType.VIEW.equals(params.getType()); List<Table> tables = listingViews ? db.getViews() : db.getTables(); for (Table table : tables) { if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db, table.getName(), PrivPredicate.SHOW)) { continue; } if (matcher != null && !matcher.match(table.getName())) { continue; } TTableStatus status = new TTableStatus(); status.setName(table.getName()); status.setType(table.getMysqlType()); status.setEngine(table.getEngine()); status.setComment(table.getComment()); status.setCreate_time(table.getCreateTime()); status.setLast_check_time(table.getLastCheckTime()); if (listingViews) { View view = (View) table; String ddlSql = view.getInlineViewDef(); List<TableRef> tblRefs = new ArrayList<>(); view.getQueryStmt().collectTableRefs(tblRefs); for (TableRef tblRef : tblRefs) { if (!GlobalStateMgr.getCurrentState().getAuth() .checkTblPriv(currentUser, tblRef.getName().getDb(), tblRef.getName().getTbl(), PrivPredicate.SHOW)) { ddlSql = ""; break; } } status.setDdl_sql(ddlSql); } tablesResult.add(status); if (limit > 0 && tablesResult.size() >= limit) { break; } } } finally { db.readUnlock(); } } return result; } public void listMaterializedViewStatus(List<TTableStatus> tablesResult, long limit, PatternMatcher matcher, UserIdentity currentUser, String dbName) { Database db = GlobalStateMgr.getCurrentState().getDb(dbName); if (db == null) { LOG.warn("database not exists: {}", dbName); return; } db.readLock(); try { for (Table materializedView : db.getMaterializedViews()) { if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, dbName, materializedView.getName(), PrivPredicate.SHOW)) { continue; } if (matcher != null && !matcher.match(materializedView.getName())) { continue; } MaterializedView mvTable = (MaterializedView) materializedView; List<String> createTableStmt = Lists.newArrayList(); GlobalStateMgr.getDdlStmt(mvTable, createTableStmt, null, null, false, true); String ddlSql = createTableStmt.get(0); TTableStatus status = new TTableStatus(); status.setId(String.valueOf(mvTable.getId())); status.setName(mvTable.getName()); status.setDdl_sql(ddlSql); status.setRows(String.valueOf(mvTable.getRowCount())); status.setType(mvTable.getMysqlType()); status.setComment(mvTable.getComment()); tablesResult.add(status); if (limit > 0 && tablesResult.size() >= limit) { return; } } for (Table table : db.getTables()) { if (table.getType() == Table.TableType.OLAP) { OlapTable olapTable = (OlapTable) table; List<MaterializedIndex> visibleMaterializedViews = olapTable.getVisibleIndex(); long baseIdx = olapTable.getBaseIndexId(); for (MaterializedIndex mvIdx : visibleMaterializedViews) { if (baseIdx == mvIdx.getId()) { continue; } if (matcher != null && !matcher.match(olapTable.getIndexNameById(mvIdx.getId()))) { continue; } MaterializedIndexMeta mvMeta = olapTable.getVisibleIndexIdToMeta().get(mvIdx.getId()); TTableStatus status = new TTableStatus(); status.setId(String.valueOf(mvIdx.getId())); status.setName(olapTable.getIndexNameById(mvIdx.getId())); if (mvMeta.getOriginStmt() == null) { StringBuilder originStmtBuilder = new StringBuilder( "create materialized view " + olapTable.getIndexNameById(mvIdx.getId()) + " as select "); String groupByString = ""; for (Column column : mvMeta.getSchema()) { if (column.isKey()) { groupByString += column.getName() + ","; } } originStmtBuilder.append(groupByString); for (Column column : mvMeta.getSchema()) { if (!column.isKey()) { originStmtBuilder.append(column.getAggregationType().toString()).append("(") .append(column.getName()).append(")").append(","); } } originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length()); originStmtBuilder.append(" from ").append(olapTable.getName()).append(" group by ") .append(groupByString); originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length()); status.setDdl_sql(originStmtBuilder.toString()); } else { status.setDdl_sql(mvMeta.getOriginStmt().replace("\n", "").replace("\t", "") .replaceAll("[ ]+", " ")); } status.setRows(String.valueOf(mvIdx.getRowCount())); status.setType(""); status.setComment(""); tablesResult.add(status); if (limit > 0 && tablesResult.size() >= limit) { return; } } } } } finally { db.readUnlock(); } } @Override public TGetTaskInfoResult getTasks(TGetTasksParams params) throws TException { LOG.debug("get show task request: {}", params); TGetTaskInfoResult result = new TGetTaskInfoResult(); List<TTaskInfo> tasksResult = Lists.newArrayList(); result.setTasks(tasksResult); UserIdentity currentUser = null; if (params.isSetCurrent_user_ident()) { currentUser = UserIdentity.fromThrift(params.current_user_ident); } GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); TaskManager taskManager = globalStateMgr.getTaskManager(); List<Task> taskList = taskManager.showTasks(null); for (Task task : taskList) { if (!globalStateMgr.getAuth().checkDbPriv(currentUser, task.getDbName(), PrivPredicate.SHOW)) { continue; } TTaskInfo info = new TTaskInfo(); info.setTask_name(task.getName()); info.setCreate_time(task.getCreateTime() / 1000); String scheduleStr = task.getType().name(); if (task.getType() == Constants.TaskType.PERIODICAL) { scheduleStr += task.getSchedule(); } info.setSchedule(scheduleStr); info.setDatabase(ClusterNamespace.getNameFromFullName(task.getDbName())); info.setDefinition(task.getDefinition()); info.setExpire_time(task.getExpireTime() / 1000); tasksResult.add(info); } return result; } @Override public TGetTaskRunInfoResult getTaskRuns(TGetTasksParams params) throws TException { LOG.debug("get show task run request: {}", params); TGetTaskRunInfoResult result = new TGetTaskRunInfoResult(); List<TTaskRunInfo> tasksResult = Lists.newArrayList(); result.setTask_runs(tasksResult); UserIdentity currentUser = null; if (params.isSetCurrent_user_ident()) { currentUser = UserIdentity.fromThrift(params.current_user_ident); } GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); TaskManager taskManager = globalStateMgr.getTaskManager(); List<TaskRunStatus> taskRunList = taskManager.showTaskRunStatus(null); for (TaskRunStatus status : taskRunList) { if (!globalStateMgr.getAuth().checkDbPriv(currentUser, status.getDbName(), PrivPredicate.SHOW)) { continue; } TTaskRunInfo info = new TTaskRunInfo(); info.setQuery_id(status.getQueryId()); info.setTask_name(status.getTaskName()); info.setCreate_time(status.getCreateTime() / 1000); info.setFinish_time(status.getFinishTime() / 1000); info.setState(status.getState().toString()); info.setDatabase(ClusterNamespace.getNameFromFullName(status.getDbName())); info.setDefinition(status.getDefinition()); info.setError_code(status.getErrorCode()); info.setError_message(status.getErrorMessage()); info.setExpire_time(status.getExpireTime() / 1000); tasksResult.add(info); } return result; } @Override @Override public TGetTablePrivsResult getTablePrivs(TGetTablePrivsParams params) throws TException { LOG.debug("get table privileges request: {}", params); TGetTablePrivsResult result = new TGetTablePrivsResult(); List<TTablePrivDesc> tTablePrivs = Lists.newArrayList(); result.setTable_privs(tTablePrivs); UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident); List<TablePrivEntry> tablePrivEntries = GlobalStateMgr.getCurrentState().getAuth().getTablePrivEntries(currentUser); for (TablePrivEntry entry : tablePrivEntries) { PrivBitSet savedPrivs = entry.getPrivSet(); String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER; String userIdentStr = currentUser.toString().replace(clusterPrefix, ""); String dbName = entry.getOrigDb(); boolean isGrantable = savedPrivs.satisfy(PrivPredicate.GRANT); List<TTablePrivDesc> tPrivs = savedPrivs.toPrivilegeList().stream().map( priv -> { TTablePrivDesc privDesc = new TTablePrivDesc(); privDesc.setDb_name(dbName); privDesc.setTable_name(entry.getOrigTbl()); privDesc.setIs_grantable(isGrantable); privDesc.setUser_ident_str(userIdentStr); privDesc.setPriv(priv.getUpperNameForMysql()); return privDesc; } ).collect(Collectors.toList()); if (savedPrivs.satisfy(PrivPredicate.LOAD)) { tPrivs.addAll(Lists.newArrayList("INSERT", "UPDATE", "DELETE").stream().map(priv -> { TTablePrivDesc privDesc = new TTablePrivDesc(); privDesc.setDb_name(dbName); privDesc.setTable_name(entry.getOrigTbl()); privDesc.setIs_grantable(isGrantable); privDesc.setUser_ident_str(userIdentStr); privDesc.setPriv(priv); return privDesc; }).collect(Collectors.toList())); } tTablePrivs.addAll(tPrivs); } return result; } @Override public TGetUserPrivsResult getUserPrivs(TGetUserPrivsParams params) throws TException { LOG.debug("get user privileges request: {}", params); TGetUserPrivsResult result = new TGetUserPrivsResult(); List<TUserPrivDesc> tUserPrivs = Lists.newArrayList(); result.setUser_privs(tUserPrivs); UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident); Auth currAuth = GlobalStateMgr.getCurrentState().getAuth(); UserPrivTable userPrivTable = currAuth.getUserPrivTable(); List<UserIdentity> userIdents = Lists.newArrayList(); userIdents.add(currentUser); for (UserIdentity userIdent : userIdents) { PrivBitSet savedPrivs = new PrivBitSet(); userPrivTable.getPrivs(userIdent, savedPrivs); String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER; String userIdentStr = currentUser.toString().replace(clusterPrefix, ""); List<TUserPrivDesc> tPrivs = savedPrivs.toPrivilegeList().stream().map( priv -> { boolean isGrantable = Privilege.NODE_PRIV != priv && userPrivTable.hasPriv(userIdent, PrivPredicate.GRANT); TUserPrivDesc privDesc = new TUserPrivDesc(); privDesc.setIs_grantable(isGrantable); privDesc.setUser_ident_str(userIdentStr); privDesc.setPriv(priv.getUpperNameForMysql()); return privDesc; } ).collect(Collectors.toList()); tUserPrivs.addAll(tPrivs); } return result; } @Override public TFeResult updateExportTaskStatus(TUpdateExportTaskStatusRequest request) throws TException { TStatus status = new TStatus(TStatusCode.OK); TFeResult result = new TFeResult(FrontendServiceVersion.V1, status); return result; } @Override public TDescribeTableResult describeTable(TDescribeTableParams params) throws TException { LOG.debug("get desc table request: {}", params); TDescribeTableResult result = new TDescribeTableResult(); List<TColumnDef> columns = Lists.newArrayList(); result.setColumns(columns); UserIdentity currentUser = null; if (params.isSetCurrent_user_ident()) { currentUser = UserIdentity.fromThrift(params.current_user_ident); } else { currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip); } long limit = params.isSetLimit() ? params.getLimit() : -1; if (!params.isSetDb() && StringUtils.isBlank(params.getTable_name())) { describeWithoutDbAndTable(currentUser, columns, limit); return result; } if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db, params.getTable_name(), PrivPredicate.SHOW)) { return result; } Database db = GlobalStateMgr.getCurrentState().getDb(params.db); if (db != null) { db.readLock(); try { Table table = db.getTable(params.getTable_name()); setColumnDesc(columns, table, limit, false, params.db, params.getTable_name()); } finally { db.readUnlock(); } } return result; } private void describeWithoutDbAndTable(UserIdentity currentUser, List<TColumnDef> columns, long limit) { GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); List<String> dbNames = globalStateMgr.getDbNames(); boolean reachLimit; for (String fullName : dbNames) { if (!GlobalStateMgr.getCurrentState().getAuth().checkDbPriv(currentUser, fullName, PrivPredicate.SHOW)) { continue; } Database db = GlobalStateMgr.getCurrentState().getDb(fullName); if (db != null) { for (String tableName : db.getTableNamesWithLock()) { LOG.debug("get table: {}, wait to check", tableName); if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, fullName, tableName, PrivPredicate.SHOW)) { continue; } db.readLock(); try { Table table = db.getTable(tableName); reachLimit = setColumnDesc(columns, table, limit, true, fullName, tableName); } finally { db.readUnlock(); } if (reachLimit) { return; } } } } } private boolean setColumnDesc(List<TColumnDef> columns, Table table, long limit, boolean needSetDbAndTable, String db, String tbl) { if (table != null) { String tableKeysType = ""; if (TableType.OLAP.equals(table.getType())) { OlapTable olapTable = (OlapTable) table; tableKeysType = olapTable.getKeysType().name().substring(0, 3).toUpperCase(); } for (Column column : table.getBaseSchema()) { final TColumnDesc desc = new TColumnDesc(column.getName(), column.getPrimitiveType().toThrift()); final Integer precision = column.getType().getPrecision(); if (precision != null) { desc.setColumnPrecision(precision); } final Integer columnLength = column.getType().getColumnSize(); if (columnLength != null) { desc.setColumnLength(columnLength); } final Integer decimalDigits = column.getType().getDecimalDigits(); if (decimalDigits != null) { desc.setColumnScale(decimalDigits); } if (column.isKey()) { desc.setColumnKey(tableKeysType); } else { desc.setColumnKey(""); } final TColumnDef colDef = new TColumnDef(desc); final String comment = column.getComment(); if (comment != null) { colDef.setComment(comment); } columns.add(colDef); if (needSetDbAndTable) { columns.get(columns.size() - 1).columnDesc.setDbName(db); columns.get(columns.size() - 1).columnDesc.setTableName(tbl); } if (limit > 0 && columns.size() >= limit) { return true; } } } return false; } @Override public TShowVariableResult showVariables(TShowVariableRequest params) throws TException { TShowVariableResult result = new TShowVariableResult(); Map<String, String> map = Maps.newHashMap(); result.setVariables(map); ConnectContext ctx = exeEnv.getScheduler().getContext(params.getThreadId()); if (ctx == null) { return result; } List<List<String>> rows = VariableMgr.dump(SetType.fromThrift(params.getVarType()), ctx.getSessionVariable(), null); for (List<String> row : rows) { map.put(row.get(0), row.get(1)); } return result; } @Override public TReportExecStatusResult reportExecStatus(TReportExecStatusParams params) throws TException { return QeProcessorImpl.INSTANCE.reportExecStatus(params, getClientAddr()); } @Override public TMasterResult finishTask(TFinishTaskRequest request) throws TException { return leaderImpl.finishTask(request); } @Override public TMasterResult report(TReportRequest request) throws TException { return leaderImpl.report(request); } @Override public TFetchResourceResult fetchResource() throws TException { throw new TException("not supported"); } @Override public TFeResult isMethodSupported(TIsMethodSupportedRequest request) throws TException { TStatus status = new TStatus(TStatusCode.OK); TFeResult result = new TFeResult(FrontendServiceVersion.V1, status); switch (request.getFunction_name()) { case "STREAMING_MINI_LOAD": break; default: status.setStatus_code(NOT_IMPLEMENTED_ERROR); break; } return result; } @Override public TMasterOpResult forward(TMasterOpRequest params) throws TException { TNetworkAddress clientAddr = getClientAddr(); if (clientAddr != null) { Frontend fe = GlobalStateMgr.getCurrentState().getFeByHost(clientAddr.getHostname()); if (fe == null) { LOG.warn("reject request from invalid host. client: {}", clientAddr); throw new TException("request from invalid host was rejected."); } } LOG.info("receive forwarded stmt {} from FE: {}", params.getStmt_id(), clientAddr.getHostname()); ConnectContext context = new ConnectContext(null); ConnectProcessor processor = new ConnectProcessor(context); TMasterOpResult result = processor.proxyExecute(params); ConnectContext.remove(); return result; } private void checkPasswordAndPrivs(String cluster, String user, String passwd, String db, String tbl, String clientIp, PrivPredicate predicate) throws AuthenticationException { final String fullUserName = ClusterNamespace.getFullName(user); List<UserIdentity> currentUser = Lists.newArrayList(); if (!GlobalStateMgr.getCurrentState().getAuth() .checkPlainPassword(fullUserName, clientIp, passwd, currentUser)) { throw new AuthenticationException("Access denied for " + fullUserName + "@" + clientIp); } Preconditions.checkState(currentUser.size() == 1); if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser.get(0), db, tbl, predicate)) { throw new AuthenticationException( "Access denied; you need (at least one of) the LOAD privilege(s) for this operation"); } } @Override public TLoadTxnBeginResult loadTxnBegin(TLoadTxnBeginRequest request) throws TException { String clientAddr = getClientAddrAsString(); LOG.info("receive txn begin request, db: {}, tbl: {}, label: {}, backend: {}", request.getDb(), request.getTbl(), request.getLabel(), clientAddr); LOG.debug("txn begin request: {}", request); TLoadTxnBeginResult result = new TLoadTxnBeginResult(); if (!GlobalStateMgr.getCurrentState().isLeader()) { TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR); status.setError_msgs(Lists.newArrayList("current fe is not master")); result.setStatus(status); return result; } TStatus status = new TStatus(TStatusCode.OK); result.setStatus(status); try { result.setTxnId(loadTxnBeginImpl(request, clientAddr)); } catch (DuplicatedRequestException e) { LOG.info("duplicate request for stream load. request id: {}, txn_id: {}", e.getDuplicatedRequestId(), e.getTxnId()); result.setTxnId(e.getTxnId()); } catch (LabelAlreadyUsedException e) { status.setStatus_code(TStatusCode.LABEL_ALREADY_EXISTS); status.addToError_msgs(e.getMessage()); result.setJob_status(e.getJobStatus()); } catch (UserException e) { LOG.warn("failed to begin: {}", e.getMessage()); status.setStatus_code(TStatusCode.ANALYSIS_ERROR); status.addToError_msgs(e.getMessage()); } catch (Throwable e) { LOG.warn("catch unknown result.", e); status.setStatus_code(TStatusCode.INTERNAL_ERROR); status.addToError_msgs(Strings.nullToEmpty(e.getMessage())); return result; } return result; } private long loadTxnBeginImpl(TLoadTxnBeginRequest request, String clientIp) throws UserException { String cluster = request.getCluster(); if (Strings.isNullOrEmpty(cluster)) { cluster = SystemInfoService.DEFAULT_CLUSTER; } checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(), request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD); if (Strings.isNullOrEmpty(request.getLabel())) { throw new UserException("empty label in begin request"); } GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); String dbName = request.getDb(); Database db = globalStateMgr.getDb(dbName); if (db == null) { throw new UserException("unknown database, database=" + dbName); } Table table = db.getTable(request.getTbl()); if (table == null) { throw new UserException("unknown table \"" + request.getDb() + "." + request.getTbl() + "\""); } long timeoutSecond = request.isSetTimeout() ? request.getTimeout() : Config.stream_load_default_timeout_second; MetricRepo.COUNTER_LOAD_ADD.increase(1L); return GlobalStateMgr.getCurrentGlobalTransactionMgr().beginTransaction( db.getId(), Lists.newArrayList(table.getId()), request.getLabel(), request.getRequest_id(), new TxnCoordinator(TxnSourceType.BE, clientIp), TransactionState.LoadJobSourceType.BACKEND_STREAMING, -1, timeoutSecond); } @Override public TLoadTxnCommitResult loadTxnCommit(TLoadTxnCommitRequest request) throws TException { String clientAddr = getClientAddrAsString(); LOG.info("receive txn commit request. db: {}, tbl: {}, txn_id: {}, backend: {}", request.getDb(), request.getTbl(), request.getTxnId(), clientAddr); LOG.debug("txn commit request: {}", request); TLoadTxnCommitResult result = new TLoadTxnCommitResult(); if (!GlobalStateMgr.getCurrentState().isLeader()) { TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR); status.setError_msgs(Lists.newArrayList("current fe is not master")); result.setStatus(status); return result; } TStatus status = new TStatus(TStatusCode.OK); result.setStatus(status); try { if (!loadTxnCommitImpl(request)) { status.setStatus_code(TStatusCode.PUBLISH_TIMEOUT); status.addToError_msgs("Publish timeout. The data will be visible after a while"); } } catch (UserException e) { LOG.warn("failed to commit txn_id: {}: {}", request.getTxnId(), e.getMessage()); status.setStatus_code(TStatusCode.ANALYSIS_ERROR); status.addToError_msgs(e.getMessage()); } catch (Throwable e) { LOG.warn("catch unknown result.", e); status.setStatus_code(TStatusCode.INTERNAL_ERROR); status.addToError_msgs(Strings.nullToEmpty(e.getMessage())); return result; } return result; } private boolean loadTxnCommitImpl(TLoadTxnCommitRequest request) throws UserException { String cluster = request.getCluster(); if (Strings.isNullOrEmpty(cluster)) { cluster = SystemInfoService.DEFAULT_CLUSTER; } if (request.isSetAuth_code()) { } else { checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(), request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD); } GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); String dbName = request.getDb(); Database db = globalStateMgr.getDb(dbName); if (db == null) { throw new UserException("unknown database, database=" + dbName); } TxnCommitAttachment attachment = TxnCommitAttachment.fromThrift(request.txnCommitAttachment); long timeoutMs = request.isSetThrift_rpc_timeout_ms() ? request.getThrift_rpc_timeout_ms() : 5000; timeoutMs = timeoutMs * 3 / 4; boolean ret = GlobalStateMgr.getCurrentGlobalTransactionMgr().commitAndPublishTransaction( db, request.getTxnId(), TabletCommitInfo.fromThrift(request.getCommitInfos()), timeoutMs, attachment); if (!ret) { return ret; } MetricRepo.COUNTER_LOAD_FINISHED.increase(1L); if (null == attachment) { return ret; } Table tbl = db.getTable(request.getTbl()); if (null == tbl) { return ret; } TableMetricsEntity entity = TableMetricsRegistry.getInstance().getMetricsEntity(tbl.getId()); switch (request.txnCommitAttachment.getLoadType()) { case ROUTINE_LOAD: if (!(attachment instanceof RLTaskTxnCommitAttachment)) { break; } RLTaskTxnCommitAttachment routineAttachment = (RLTaskTxnCommitAttachment) attachment; entity.counterRoutineLoadFinishedTotal.increase(1L); entity.counterRoutineLoadBytesTotal.increase(routineAttachment.getReceivedBytes()); entity.counterRoutineLoadRowsTotal.increase(routineAttachment.getLoadedRows()); break; case MANUAL_LOAD: if (!(attachment instanceof ManualLoadTxnCommitAttachment)) { break; } ManualLoadTxnCommitAttachment streamAttachment = (ManualLoadTxnCommitAttachment) attachment; entity.counterStreamLoadFinishedTotal.increase(1L); entity.counterStreamLoadBytesTotal.increase(streamAttachment.getReceivedBytes()); entity.counterStreamLoadRowsTotal.increase(streamAttachment.getLoadedRows()); break; default: break; } return ret; } @Override public TLoadTxnCommitResult loadTxnPrepare(TLoadTxnCommitRequest request) throws TException { String clientAddr = getClientAddrAsString(); LOG.info("receive txn prepare request. db: {}, tbl: {}, txn_id: {}, backend: {}", request.getDb(), request.getTbl(), request.getTxnId(), clientAddr); LOG.debug("txn prepare request: {}", request); TLoadTxnCommitResult result = new TLoadTxnCommitResult(); if (!GlobalStateMgr.getCurrentState().isLeader()) { TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR); status.setError_msgs(Lists.newArrayList("current fe is not master")); result.setStatus(status); return result; } TStatus status = new TStatus(TStatusCode.OK); result.setStatus(status); try { loadTxnPrepareImpl(request); } catch (UserException e) { LOG.warn("failed to prepare txn_id: {}: {}", request.getTxnId(), e.getMessage()); status.setStatus_code(TStatusCode.ANALYSIS_ERROR); status.addToError_msgs(e.getMessage()); } catch (Throwable e) { LOG.warn("catch unknown result.", e); status.setStatus_code(TStatusCode.INTERNAL_ERROR); status.addToError_msgs(Strings.nullToEmpty(e.getMessage())); return result; } return result; } private void loadTxnPrepareImpl(TLoadTxnCommitRequest request) throws UserException { String cluster = request.getCluster(); if (Strings.isNullOrEmpty(cluster)) { cluster = SystemInfoService.DEFAULT_CLUSTER; } if (request.isSetAuth_code()) { } else { checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(), request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD); } GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); String dbName = request.getDb(); Database db = globalStateMgr.getDb(dbName); if (db == null) { throw new UserException("unknown database, database=" + dbName); } TxnCommitAttachment attachment = TxnCommitAttachment.fromThrift(request.txnCommitAttachment); GlobalStateMgr.getCurrentGlobalTransactionMgr().prepareTransaction( db.getId(), request.getTxnId(), TabletCommitInfo.fromThrift(request.getCommitInfos()), attachment); } @Override public TLoadTxnRollbackResult loadTxnRollback(TLoadTxnRollbackRequest request) throws TException { String clientAddr = getClientAddrAsString(); LOG.info("receive txn rollback request. db: {}, tbl: {}, txn_id: {}, reason: {}, backend: {}", request.getDb(), request.getTbl(), request.getTxnId(), request.getReason(), clientAddr); LOG.debug("txn rollback request: {}", request); TLoadTxnRollbackResult result = new TLoadTxnRollbackResult(); if (!GlobalStateMgr.getCurrentState().isLeader()) { TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR); status.setError_msgs(Lists.newArrayList("current fe is not master")); result.setStatus(status); return result; } TStatus status = new TStatus(TStatusCode.OK); result.setStatus(status); try { loadTxnRollbackImpl(request); } catch (TransactionNotFoundException e) { LOG.warn("failed to rollback txn {}: {}", request.getTxnId(), e.getMessage()); status.setStatus_code(TStatusCode.TXN_NOT_EXISTS); status.addToError_msgs(e.getMessage()); } catch (UserException e) { LOG.warn("failed to rollback txn {}: {}", request.getTxnId(), e.getMessage()); status.setStatus_code(TStatusCode.ANALYSIS_ERROR); status.addToError_msgs(e.getMessage()); } catch (Throwable e) { LOG.warn("catch unknown result.", e); status.setStatus_code(TStatusCode.INTERNAL_ERROR); status.addToError_msgs(Strings.nullToEmpty(e.getMessage())); return result; } return result; } private void loadTxnRollbackImpl(TLoadTxnRollbackRequest request) throws UserException { String cluster = request.getCluster(); if (Strings.isNullOrEmpty(cluster)) { cluster = SystemInfoService.DEFAULT_CLUSTER; } if (request.isSetAuth_code()) { } else { checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(), request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD); } String dbName = request.getDb(); Database db = GlobalStateMgr.getCurrentState().getDb(dbName); if (db == null) { throw new MetaNotFoundException("db " + dbName + " does not exist"); } long dbId = db.getId(); GlobalStateMgr.getCurrentGlobalTransactionMgr().abortTransaction(dbId, request.getTxnId(), request.isSetReason() ? request.getReason() : "system cancel", TxnCommitAttachment.fromThrift(request.getTxnCommitAttachment())); } @Override public TStreamLoadPutResult streamLoadPut(TStreamLoadPutRequest request) { String clientAddr = getClientAddrAsString(); LOG.info("receive stream load put request. db:{}, tbl: {}, txn_id: {}, load id: {}, backend: {}", request.getDb(), request.getTbl(), request.getTxnId(), DebugUtil.printId(request.getLoadId()), clientAddr); LOG.debug("stream load put request: {}", request); TStreamLoadPutResult result = new TStreamLoadPutResult(); TStatus status = new TStatus(TStatusCode.OK); result.setStatus(status); try { result.setParams(streamLoadPutImpl(request)); } catch (UserException e) { LOG.warn("failed to get stream load plan: {}", e.getMessage()); status.setStatus_code(TStatusCode.ANALYSIS_ERROR); status.addToError_msgs(e.getMessage()); } catch (Throwable e) { LOG.warn("catch unknown result.", e); status.setStatus_code(TStatusCode.INTERNAL_ERROR); status.addToError_msgs(Strings.nullToEmpty(e.getMessage())); return result; } return result; } private TExecPlanFragmentParams streamLoadPutImpl(TStreamLoadPutRequest request) throws UserException { String cluster = request.getCluster(); if (Strings.isNullOrEmpty(cluster)) { cluster = SystemInfoService.DEFAULT_CLUSTER; } GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); String dbName = request.getDb(); Database db = globalStateMgr.getDb(dbName); if (db == null) { throw new UserException("unknown database, database=" + dbName); } long timeoutMs = request.isSetThrift_rpc_timeout_ms() ? request.getThrift_rpc_timeout_ms() : 5000; if (!db.tryReadLock(timeoutMs, TimeUnit.MILLISECONDS)) { throw new UserException("get database read lock timeout, database=" + dbName); } try { Table table = db.getTable(request.getTbl()); if (table == null) { throw new UserException("unknown table, table=" + request.getTbl()); } if (!(table instanceof OlapTable)) { throw new UserException("load table type is not OlapTable, type=" + table.getClass()); } if (table instanceof MaterializedView) { throw new UserException(String.format( "The data of '%s' cannot be inserted because '%s' is a materialized view," + "and the data of materialized view must be consistent with the base table.", table.getName(), table.getName())); } StreamLoadTask streamLoadTask = StreamLoadTask.fromTStreamLoadPutRequest(request, db); StreamLoadPlanner planner = new StreamLoadPlanner(db, (OlapTable) table, streamLoadTask); TExecPlanFragmentParams plan = planner.plan(streamLoadTask.getId()); TransactionState txnState = GlobalStateMgr.getCurrentGlobalTransactionMgr().getTransactionState(db.getId(), request.getTxnId()); if (txnState == null) { throw new UserException("txn does not exist: " + request.getTxnId()); } txnState.addTableIndexes((OlapTable) table); return plan; } finally { db.readUnlock(); } } @Override public TStatus snapshotLoaderReport(TSnapshotLoaderReportRequest request) throws TException { if (GlobalStateMgr.getCurrentState().getBackupHandler().report(request.getTask_type(), request.getJob_id(), request.getTask_id(), request.getFinished_num(), request.getTotal_num())) { return new TStatus(TStatusCode.OK); } return new TStatus(TStatusCode.CANCELLED); } @Override public TRefreshTableResponse refreshTable(TRefreshTableRequest request) throws TException { try { if (request.getCatalog_name() == null) { request.setCatalog_name(InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME); } GlobalStateMgr.getCurrentState().refreshExternalTable(new TableName(request.getCatalog_name(), request.getDb_name(), request.getTable_name()), request.getPartitions()); return new TRefreshTableResponse(new TStatus(TStatusCode.OK)); } catch (DdlException e) { TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR); status.setError_msgs(Lists.newArrayList(e.getMessage())); return new TRefreshTableResponse(status); } } private TNetworkAddress getClientAddr() { ThriftServerContext connectionContext = ThriftServerEventProcessor.getConnectionContext(); if (connectionContext != null) { return connectionContext.getClient(); } return null; } private String getClientAddrAsString() { TNetworkAddress addr = getClientAddr(); return addr == null ? "unknown" : addr.hostname; } @Override public TGetTableMetaResponse getTableMeta(TGetTableMetaRequest request) throws TException { return leaderImpl.getTableMeta(request); } @Override public TBeginRemoteTxnResponse beginRemoteTxn(TBeginRemoteTxnRequest request) throws TException { return leaderImpl.beginRemoteTxn(request); } @Override public TCommitRemoteTxnResponse commitRemoteTxn(TCommitRemoteTxnRequest request) throws TException { return leaderImpl.commitRemoteTxn(request); } @Override public TAbortRemoteTxnResponse abortRemoteTxn(TAbortRemoteTxnRequest request) throws TException { return leaderImpl.abortRemoteTxn(request); } @Override public TSetConfigResponse setConfig(TSetConfigRequest request) throws TException { try { Preconditions.checkState(request.getKeys().size() == request.getValues().size()); Map<String, String> configs = new HashMap<>(); for (int i = 0; i < request.getKeys().size(); i++) { configs.put(request.getKeys().get(i), request.getValues().get(i)); } GlobalStateMgr.getCurrentState().setFrontendConfig(configs); return new TSetConfigResponse(new TStatus(TStatusCode.OK)); } catch (DdlException e) { TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR); status.setError_msgs(Lists.newArrayList(e.getMessage())); return new TSetConfigResponse(status); } } }
class FrontendServiceImpl implements FrontendService.Iface { private static final Logger LOG = LogManager.getLogger(LeaderImpl.class); private LeaderImpl leaderImpl; private ExecuteEnv exeEnv; public FrontendServiceImpl(ExecuteEnv exeEnv) { leaderImpl = new LeaderImpl(); this.exeEnv = exeEnv; } @Override public TGetDbsResult getDbNames(TGetDbsParams params) throws TException { LOG.debug("get db request: {}", params); TGetDbsResult result = new TGetDbsResult(); List<String> dbs = Lists.newArrayList(); PatternMatcher matcher = null; if (params.isSetPattern()) { try { matcher = PatternMatcher.createMysqlPattern(params.getPattern(), CaseSensibility.DATABASE.getCaseSensibility()); } catch (AnalysisException e) { throw new TException("Pattern is in bad format: " + params.getPattern()); } } GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); List<String> dbNames = globalStateMgr.getDbNames(); LOG.debug("get db names: {}", dbNames); UserIdentity currentUser = null; if (params.isSetCurrent_user_ident()) { currentUser = UserIdentity.fromThrift(params.current_user_ident); } else { currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip); } for (String fullName : dbNames) { if (!globalStateMgr.getAuth().checkDbPriv(currentUser, fullName, PrivPredicate.SHOW)) { continue; } final String db = ClusterNamespace.getNameFromFullName(fullName); if (matcher != null && !matcher.match(db)) { continue; } dbs.add(fullName); } result.setDbs(dbs); return result; } @Override public TGetTablesResult getTableNames(TGetTablesParams params) throws TException { LOG.debug("get table name request: {}", params); TGetTablesResult result = new TGetTablesResult(); List<String> tablesResult = Lists.newArrayList(); result.setTables(tablesResult); PatternMatcher matcher = null; if (params.isSetPattern()) { try { matcher = PatternMatcher.createMysqlPattern(params.getPattern(), CaseSensibility.TABLE.getCaseSensibility()); } catch (AnalysisException e) { throw new TException("Pattern is in bad format: " + params.getPattern()); } } Database db = GlobalStateMgr.getCurrentState().getDb(params.db); UserIdentity currentUser = null; if (params.isSetCurrent_user_ident()) { currentUser = UserIdentity.fromThrift(params.current_user_ident); } else { currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip); } if (db != null) { for (String tableName : db.getTableNamesWithLock()) { LOG.debug("get table: {}, wait to check", tableName); if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db, tableName, PrivPredicate.SHOW)) { continue; } if (matcher != null && !matcher.match(tableName)) { continue; } tablesResult.add(tableName); } } return result; } @Override public TListTableStatusResult listTableStatus(TGetTablesParams params) throws TException { LOG.debug("get list table request: {}", params); TListTableStatusResult result = new TListTableStatusResult(); List<TTableStatus> tablesResult = Lists.newArrayList(); result.setTables(tablesResult); PatternMatcher matcher = null; if (params.isSetPattern()) { try { matcher = PatternMatcher.createMysqlPattern(params.getPattern(), CaseSensibility.TABLE.getCaseSensibility()); } catch (AnalysisException e) { throw new TException("Pattern is in bad format " + params.getPattern()); } } Database db = GlobalStateMgr.getCurrentState().getDb(params.db); long limit = params.isSetLimit() ? params.getLimit() : -1; UserIdentity currentUser = null; if (params.isSetCurrent_user_ident()) { currentUser = UserIdentity.fromThrift(params.current_user_ident); } else { currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip); } if (params.isSetType() && TTableType.MATERIALIZED_VIEW.equals(params.getType())) { listMaterializedViewStatus(tablesResult, limit, matcher, currentUser, params.db); return result; } if (db != null) { db.readLock(); try { boolean listingViews = params.isSetType() && TTableType.VIEW.equals(params.getType()); List<Table> tables = listingViews ? db.getViews() : db.getTables(); for (Table table : tables) { if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db, table.getName(), PrivPredicate.SHOW)) { continue; } if (matcher != null && !matcher.match(table.getName())) { continue; } TTableStatus status = new TTableStatus(); status.setName(table.getName()); status.setType(table.getMysqlType()); status.setEngine(table.getEngine()); status.setComment(table.getComment()); status.setCreate_time(table.getCreateTime()); status.setLast_check_time(table.getLastCheckTime()); if (listingViews) { View view = (View) table; String ddlSql = view.getInlineViewDef(); List<TableRef> tblRefs = new ArrayList<>(); view.getQueryStmt().collectTableRefs(tblRefs); for (TableRef tblRef : tblRefs) { if (!GlobalStateMgr.getCurrentState().getAuth() .checkTblPriv(currentUser, tblRef.getName().getDb(), tblRef.getName().getTbl(), PrivPredicate.SHOW)) { ddlSql = ""; break; } } status.setDdl_sql(ddlSql); } tablesResult.add(status); if (limit > 0 && tablesResult.size() >= limit) { break; } } } finally { db.readUnlock(); } } return result; } public void listMaterializedViewStatus(List<TTableStatus> tablesResult, long limit, PatternMatcher matcher, UserIdentity currentUser, String dbName) { Database db = GlobalStateMgr.getCurrentState().getDb(dbName); if (db == null) { LOG.warn("database not exists: {}", dbName); return; } db.readLock(); try { for (Table materializedView : db.getMaterializedViews()) { if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, dbName, materializedView.getName(), PrivPredicate.SHOW)) { continue; } if (matcher != null && !matcher.match(materializedView.getName())) { continue; } MaterializedView mvTable = (MaterializedView) materializedView; List<String> createTableStmt = Lists.newArrayList(); GlobalStateMgr.getDdlStmt(mvTable, createTableStmt, null, null, false, true); String ddlSql = createTableStmt.get(0); TTableStatus status = new TTableStatus(); status.setId(String.valueOf(mvTable.getId())); status.setName(mvTable.getName()); status.setDdl_sql(ddlSql); status.setRows(String.valueOf(mvTable.getRowCount())); status.setType(mvTable.getMysqlType()); status.setComment(mvTable.getComment()); tablesResult.add(status); if (limit > 0 && tablesResult.size() >= limit) { return; } } for (Table table : db.getTables()) { if (table.getType() == Table.TableType.OLAP) { OlapTable olapTable = (OlapTable) table; List<MaterializedIndex> visibleMaterializedViews = olapTable.getVisibleIndex(); long baseIdx = olapTable.getBaseIndexId(); for (MaterializedIndex mvIdx : visibleMaterializedViews) { if (baseIdx == mvIdx.getId()) { continue; } if (matcher != null && !matcher.match(olapTable.getIndexNameById(mvIdx.getId()))) { continue; } MaterializedIndexMeta mvMeta = olapTable.getVisibleIndexIdToMeta().get(mvIdx.getId()); TTableStatus status = new TTableStatus(); status.setId(String.valueOf(mvIdx.getId())); status.setName(olapTable.getIndexNameById(mvIdx.getId())); if (mvMeta.getOriginStmt() == null) { StringBuilder originStmtBuilder = new StringBuilder( "create materialized view " + olapTable.getIndexNameById(mvIdx.getId()) + " as select "); String groupByString = ""; for (Column column : mvMeta.getSchema()) { if (column.isKey()) { groupByString += column.getName() + ","; } } originStmtBuilder.append(groupByString); for (Column column : mvMeta.getSchema()) { if (!column.isKey()) { originStmtBuilder.append(column.getAggregationType().toString()).append("(") .append(column.getName()).append(")").append(","); } } originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length()); originStmtBuilder.append(" from ").append(olapTable.getName()).append(" group by ") .append(groupByString); originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length()); status.setDdl_sql(originStmtBuilder.toString()); } else { status.setDdl_sql(mvMeta.getOriginStmt().replace("\n", "").replace("\t", "") .replaceAll("[ ]+", " ")); } status.setRows(String.valueOf(mvIdx.getRowCount())); status.setType(""); status.setComment(""); tablesResult.add(status); if (limit > 0 && tablesResult.size() >= limit) { return; } } } } } finally { db.readUnlock(); } } @Override public TGetTaskInfoResult getTasks(TGetTasksParams params) throws TException { LOG.debug("get show task request: {}", params); TGetTaskInfoResult result = new TGetTaskInfoResult(); List<TTaskInfo> tasksResult = Lists.newArrayList(); result.setTasks(tasksResult); UserIdentity currentUser = null; if (params.isSetCurrent_user_ident()) { currentUser = UserIdentity.fromThrift(params.current_user_ident); } GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); TaskManager taskManager = globalStateMgr.getTaskManager(); List<Task> taskList = taskManager.showTasks(null); for (Task task : taskList) { if (!globalStateMgr.getAuth().checkDbPriv(currentUser, task.getDbName(), PrivPredicate.SHOW)) { continue; } TTaskInfo info = new TTaskInfo(); info.setTask_name(task.getName()); info.setCreate_time(task.getCreateTime() / 1000); String scheduleStr = task.getType().name(); if (task.getType() == Constants.TaskType.PERIODICAL) { scheduleStr += task.getSchedule(); } info.setSchedule(scheduleStr); info.setDatabase(ClusterNamespace.getNameFromFullName(task.getDbName())); info.setDefinition(task.getDefinition()); info.setExpire_time(task.getExpireTime() / 1000); tasksResult.add(info); } return result; } @Override public TGetTaskRunInfoResult getTaskRuns(TGetTasksParams params) throws TException { LOG.debug("get show task run request: {}", params); TGetTaskRunInfoResult result = new TGetTaskRunInfoResult(); List<TTaskRunInfo> tasksResult = Lists.newArrayList(); result.setTask_runs(tasksResult); UserIdentity currentUser = null; if (params.isSetCurrent_user_ident()) { currentUser = UserIdentity.fromThrift(params.current_user_ident); } GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); TaskManager taskManager = globalStateMgr.getTaskManager(); List<TaskRunStatus> taskRunList = taskManager.showTaskRunStatus(null); for (TaskRunStatus status : taskRunList) { if (!globalStateMgr.getAuth().checkDbPriv(currentUser, status.getDbName(), PrivPredicate.SHOW)) { continue; } TTaskRunInfo info = new TTaskRunInfo(); info.setQuery_id(status.getQueryId()); info.setTask_name(status.getTaskName()); info.setCreate_time(status.getCreateTime() / 1000); info.setFinish_time(status.getFinishTime() / 1000); info.setState(status.getState().toString()); info.setDatabase(ClusterNamespace.getNameFromFullName(status.getDbName())); info.setDefinition(status.getDefinition()); info.setError_code(status.getErrorCode()); info.setError_message(status.getErrorMessage()); info.setExpire_time(status.getExpireTime() / 1000); tasksResult.add(info); } return result; } @Override @Override public TGetTablePrivsResult getTablePrivs(TGetTablePrivsParams params) throws TException { LOG.debug("get table privileges request: {}", params); TGetTablePrivsResult result = new TGetTablePrivsResult(); List<TTablePrivDesc> tTablePrivs = Lists.newArrayList(); result.setTable_privs(tTablePrivs); UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident); List<TablePrivEntry> tablePrivEntries = GlobalStateMgr.getCurrentState().getAuth().getTablePrivEntries(currentUser); for (TablePrivEntry entry : tablePrivEntries) { PrivBitSet savedPrivs = entry.getPrivSet(); String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER; String userIdentStr = currentUser.toString().replace(clusterPrefix, ""); String dbName = entry.getOrigDb(); boolean isGrantable = savedPrivs.satisfy(PrivPredicate.GRANT); List<TTablePrivDesc> tPrivs = savedPrivs.toPrivilegeList().stream().map( priv -> { TTablePrivDesc privDesc = new TTablePrivDesc(); privDesc.setDb_name(dbName); privDesc.setTable_name(entry.getOrigTbl()); privDesc.setIs_grantable(isGrantable); privDesc.setUser_ident_str(userIdentStr); privDesc.setPriv(priv.getUpperNameForMysql()); return privDesc; } ).collect(Collectors.toList()); if (savedPrivs.satisfy(PrivPredicate.LOAD)) { tPrivs.addAll(Lists.newArrayList("INSERT", "UPDATE", "DELETE").stream().map(priv -> { TTablePrivDesc privDesc = new TTablePrivDesc(); privDesc.setDb_name(dbName); privDesc.setTable_name(entry.getOrigTbl()); privDesc.setIs_grantable(isGrantable); privDesc.setUser_ident_str(userIdentStr); privDesc.setPriv(priv); return privDesc; }).collect(Collectors.toList())); } tTablePrivs.addAll(tPrivs); } return result; } @Override public TGetUserPrivsResult getUserPrivs(TGetUserPrivsParams params) throws TException { LOG.debug("get user privileges request: {}", params); TGetUserPrivsResult result = new TGetUserPrivsResult(); List<TUserPrivDesc> tUserPrivs = Lists.newArrayList(); result.setUser_privs(tUserPrivs); UserIdentity currentUser = UserIdentity.fromThrift(params.current_user_ident); Auth currAuth = GlobalStateMgr.getCurrentState().getAuth(); UserPrivTable userPrivTable = currAuth.getUserPrivTable(); List<UserIdentity> userIdents = Lists.newArrayList(); userIdents.add(currentUser); for (UserIdentity userIdent : userIdents) { PrivBitSet savedPrivs = new PrivBitSet(); userPrivTable.getPrivs(userIdent, savedPrivs); String clusterPrefix = SystemInfoService.DEFAULT_CLUSTER + ClusterNamespace.CLUSTER_DELIMITER; String userIdentStr = currentUser.toString().replace(clusterPrefix, ""); List<TUserPrivDesc> tPrivs = savedPrivs.toPrivilegeList().stream().map( priv -> { boolean isGrantable = Privilege.NODE_PRIV != priv && userPrivTable.hasPriv(userIdent, PrivPredicate.GRANT); TUserPrivDesc privDesc = new TUserPrivDesc(); privDesc.setIs_grantable(isGrantable); privDesc.setUser_ident_str(userIdentStr); privDesc.setPriv(priv.getUpperNameForMysql()); return privDesc; } ).collect(Collectors.toList()); tUserPrivs.addAll(tPrivs); } return result; } @Override public TFeResult updateExportTaskStatus(TUpdateExportTaskStatusRequest request) throws TException { TStatus status = new TStatus(TStatusCode.OK); TFeResult result = new TFeResult(FrontendServiceVersion.V1, status); return result; } @Override public TDescribeTableResult describeTable(TDescribeTableParams params) throws TException { LOG.debug("get desc table request: {}", params); TDescribeTableResult result = new TDescribeTableResult(); List<TColumnDef> columns = Lists.newArrayList(); result.setColumns(columns); UserIdentity currentUser = null; if (params.isSetCurrent_user_ident()) { currentUser = UserIdentity.fromThrift(params.current_user_ident); } else { currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip); } long limit = params.isSetLimit() ? params.getLimit() : -1; if (!params.isSetDb() && StringUtils.isBlank(params.getTable_name())) { describeWithoutDbAndTable(currentUser, columns, limit); return result; } if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, params.db, params.getTable_name(), PrivPredicate.SHOW)) { return result; } Database db = GlobalStateMgr.getCurrentState().getDb(params.db); if (db != null) { db.readLock(); try { Table table = db.getTable(params.getTable_name()); setColumnDesc(columns, table, limit, false, params.db, params.getTable_name()); } finally { db.readUnlock(); } } return result; } private void describeWithoutDbAndTable(UserIdentity currentUser, List<TColumnDef> columns, long limit) { GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); List<String> dbNames = globalStateMgr.getDbNames(); boolean reachLimit; for (String fullName : dbNames) { if (!GlobalStateMgr.getCurrentState().getAuth().checkDbPriv(currentUser, fullName, PrivPredicate.SHOW)) { continue; } Database db = GlobalStateMgr.getCurrentState().getDb(fullName); if (db != null) { for (String tableName : db.getTableNamesWithLock()) { LOG.debug("get table: {}, wait to check", tableName); if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser, fullName, tableName, PrivPredicate.SHOW)) { continue; } db.readLock(); try { Table table = db.getTable(tableName); reachLimit = setColumnDesc(columns, table, limit, true, fullName, tableName); } finally { db.readUnlock(); } if (reachLimit) { return; } } } } } private boolean setColumnDesc(List<TColumnDef> columns, Table table, long limit, boolean needSetDbAndTable, String db, String tbl) { if (table != null) { String tableKeysType = ""; if (TableType.OLAP.equals(table.getType())) { OlapTable olapTable = (OlapTable) table; tableKeysType = olapTable.getKeysType().name().substring(0, 3).toUpperCase(); } for (Column column : table.getBaseSchema()) { final TColumnDesc desc = new TColumnDesc(column.getName(), column.getPrimitiveType().toThrift()); final Integer precision = column.getType().getPrecision(); if (precision != null) { desc.setColumnPrecision(precision); } final Integer columnLength = column.getType().getColumnSize(); if (columnLength != null) { desc.setColumnLength(columnLength); } final Integer decimalDigits = column.getType().getDecimalDigits(); if (decimalDigits != null) { desc.setColumnScale(decimalDigits); } if (column.isKey()) { desc.setColumnKey(tableKeysType); } else { desc.setColumnKey(""); } final TColumnDef colDef = new TColumnDef(desc); final String comment = column.getComment(); if (comment != null) { colDef.setComment(comment); } columns.add(colDef); if (needSetDbAndTable) { columns.get(columns.size() - 1).columnDesc.setDbName(db); columns.get(columns.size() - 1).columnDesc.setTableName(tbl); } if (limit > 0 && columns.size() >= limit) { return true; } } } return false; } @Override public TShowVariableResult showVariables(TShowVariableRequest params) throws TException { TShowVariableResult result = new TShowVariableResult(); Map<String, String> map = Maps.newHashMap(); result.setVariables(map); ConnectContext ctx = exeEnv.getScheduler().getContext(params.getThreadId()); if (ctx == null) { return result; } List<List<String>> rows = VariableMgr.dump(SetType.fromThrift(params.getVarType()), ctx.getSessionVariable(), null); for (List<String> row : rows) { map.put(row.get(0), row.get(1)); } return result; } @Override public TReportExecStatusResult reportExecStatus(TReportExecStatusParams params) throws TException { return QeProcessorImpl.INSTANCE.reportExecStatus(params, getClientAddr()); } @Override public TMasterResult finishTask(TFinishTaskRequest request) throws TException { return leaderImpl.finishTask(request); } @Override public TMasterResult report(TReportRequest request) throws TException { return leaderImpl.report(request); } @Override public TFetchResourceResult fetchResource() throws TException { throw new TException("not supported"); } @Override public TFeResult isMethodSupported(TIsMethodSupportedRequest request) throws TException { TStatus status = new TStatus(TStatusCode.OK); TFeResult result = new TFeResult(FrontendServiceVersion.V1, status); switch (request.getFunction_name()) { case "STREAMING_MINI_LOAD": break; default: status.setStatus_code(NOT_IMPLEMENTED_ERROR); break; } return result; } @Override public TMasterOpResult forward(TMasterOpRequest params) throws TException { TNetworkAddress clientAddr = getClientAddr(); if (clientAddr != null) { Frontend fe = GlobalStateMgr.getCurrentState().getFeByHost(clientAddr.getHostname()); if (fe == null) { LOG.warn("reject request from invalid host. client: {}", clientAddr); throw new TException("request from invalid host was rejected."); } } LOG.info("receive forwarded stmt {} from FE: {}", params.getStmt_id(), clientAddr.getHostname()); ConnectContext context = new ConnectContext(null); ConnectProcessor processor = new ConnectProcessor(context); TMasterOpResult result = processor.proxyExecute(params); ConnectContext.remove(); return result; } private void checkPasswordAndPrivs(String cluster, String user, String passwd, String db, String tbl, String clientIp, PrivPredicate predicate) throws AuthenticationException { final String fullUserName = ClusterNamespace.getFullName(user); List<UserIdentity> currentUser = Lists.newArrayList(); if (!GlobalStateMgr.getCurrentState().getAuth() .checkPlainPassword(fullUserName, clientIp, passwd, currentUser)) { throw new AuthenticationException("Access denied for " + fullUserName + "@" + clientIp); } Preconditions.checkState(currentUser.size() == 1); if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(currentUser.get(0), db, tbl, predicate)) { throw new AuthenticationException( "Access denied; you need (at least one of) the LOAD privilege(s) for this operation"); } } @Override public TLoadTxnBeginResult loadTxnBegin(TLoadTxnBeginRequest request) throws TException { String clientAddr = getClientAddrAsString(); LOG.info("receive txn begin request, db: {}, tbl: {}, label: {}, backend: {}", request.getDb(), request.getTbl(), request.getLabel(), clientAddr); LOG.debug("txn begin request: {}", request); TLoadTxnBeginResult result = new TLoadTxnBeginResult(); if (!GlobalStateMgr.getCurrentState().isLeader()) { TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR); status.setError_msgs(Lists.newArrayList("current fe is not master")); result.setStatus(status); return result; } TStatus status = new TStatus(TStatusCode.OK); result.setStatus(status); try { result.setTxnId(loadTxnBeginImpl(request, clientAddr)); } catch (DuplicatedRequestException e) { LOG.info("duplicate request for stream load. request id: {}, txn_id: {}", e.getDuplicatedRequestId(), e.getTxnId()); result.setTxnId(e.getTxnId()); } catch (LabelAlreadyUsedException e) { status.setStatus_code(TStatusCode.LABEL_ALREADY_EXISTS); status.addToError_msgs(e.getMessage()); result.setJob_status(e.getJobStatus()); } catch (UserException e) { LOG.warn("failed to begin: {}", e.getMessage()); status.setStatus_code(TStatusCode.ANALYSIS_ERROR); status.addToError_msgs(e.getMessage()); } catch (Throwable e) { LOG.warn("catch unknown result.", e); status.setStatus_code(TStatusCode.INTERNAL_ERROR); status.addToError_msgs(Strings.nullToEmpty(e.getMessage())); return result; } return result; } private long loadTxnBeginImpl(TLoadTxnBeginRequest request, String clientIp) throws UserException { String cluster = request.getCluster(); if (Strings.isNullOrEmpty(cluster)) { cluster = SystemInfoService.DEFAULT_CLUSTER; } checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(), request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD); if (Strings.isNullOrEmpty(request.getLabel())) { throw new UserException("empty label in begin request"); } GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); String dbName = request.getDb(); Database db = globalStateMgr.getDb(dbName); if (db == null) { throw new UserException("unknown database, database=" + dbName); } Table table = db.getTable(request.getTbl()); if (table == null) { throw new UserException("unknown table \"" + request.getDb() + "." + request.getTbl() + "\""); } long timeoutSecond = request.isSetTimeout() ? request.getTimeout() : Config.stream_load_default_timeout_second; MetricRepo.COUNTER_LOAD_ADD.increase(1L); return GlobalStateMgr.getCurrentGlobalTransactionMgr().beginTransaction( db.getId(), Lists.newArrayList(table.getId()), request.getLabel(), request.getRequest_id(), new TxnCoordinator(TxnSourceType.BE, clientIp), TransactionState.LoadJobSourceType.BACKEND_STREAMING, -1, timeoutSecond); } @Override public TLoadTxnCommitResult loadTxnCommit(TLoadTxnCommitRequest request) throws TException { String clientAddr = getClientAddrAsString(); LOG.info("receive txn commit request. db: {}, tbl: {}, txn_id: {}, backend: {}", request.getDb(), request.getTbl(), request.getTxnId(), clientAddr); LOG.debug("txn commit request: {}", request); TLoadTxnCommitResult result = new TLoadTxnCommitResult(); if (!GlobalStateMgr.getCurrentState().isLeader()) { TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR); status.setError_msgs(Lists.newArrayList("current fe is not master")); result.setStatus(status); return result; } TStatus status = new TStatus(TStatusCode.OK); result.setStatus(status); try { if (!loadTxnCommitImpl(request)) { status.setStatus_code(TStatusCode.PUBLISH_TIMEOUT); status.addToError_msgs("Publish timeout. The data will be visible after a while"); } } catch (UserException e) { LOG.warn("failed to commit txn_id: {}: {}", request.getTxnId(), e.getMessage()); status.setStatus_code(TStatusCode.ANALYSIS_ERROR); status.addToError_msgs(e.getMessage()); } catch (Throwable e) { LOG.warn("catch unknown result.", e); status.setStatus_code(TStatusCode.INTERNAL_ERROR); status.addToError_msgs(Strings.nullToEmpty(e.getMessage())); return result; } return result; } private boolean loadTxnCommitImpl(TLoadTxnCommitRequest request) throws UserException { String cluster = request.getCluster(); if (Strings.isNullOrEmpty(cluster)) { cluster = SystemInfoService.DEFAULT_CLUSTER; } if (request.isSetAuth_code()) { } else { checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(), request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD); } GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); String dbName = request.getDb(); Database db = globalStateMgr.getDb(dbName); if (db == null) { throw new UserException("unknown database, database=" + dbName); } TxnCommitAttachment attachment = TxnCommitAttachment.fromThrift(request.txnCommitAttachment); long timeoutMs = request.isSetThrift_rpc_timeout_ms() ? request.getThrift_rpc_timeout_ms() : 5000; timeoutMs = timeoutMs * 3 / 4; boolean ret = GlobalStateMgr.getCurrentGlobalTransactionMgr().commitAndPublishTransaction( db, request.getTxnId(), TabletCommitInfo.fromThrift(request.getCommitInfos()), timeoutMs, attachment); if (!ret) { return ret; } MetricRepo.COUNTER_LOAD_FINISHED.increase(1L); if (null == attachment) { return ret; } Table tbl = db.getTable(request.getTbl()); if (null == tbl) { return ret; } TableMetricsEntity entity = TableMetricsRegistry.getInstance().getMetricsEntity(tbl.getId()); switch (request.txnCommitAttachment.getLoadType()) { case ROUTINE_LOAD: if (!(attachment instanceof RLTaskTxnCommitAttachment)) { break; } RLTaskTxnCommitAttachment routineAttachment = (RLTaskTxnCommitAttachment) attachment; entity.counterRoutineLoadFinishedTotal.increase(1L); entity.counterRoutineLoadBytesTotal.increase(routineAttachment.getReceivedBytes()); entity.counterRoutineLoadRowsTotal.increase(routineAttachment.getLoadedRows()); break; case MANUAL_LOAD: if (!(attachment instanceof ManualLoadTxnCommitAttachment)) { break; } ManualLoadTxnCommitAttachment streamAttachment = (ManualLoadTxnCommitAttachment) attachment; entity.counterStreamLoadFinishedTotal.increase(1L); entity.counterStreamLoadBytesTotal.increase(streamAttachment.getReceivedBytes()); entity.counterStreamLoadRowsTotal.increase(streamAttachment.getLoadedRows()); break; default: break; } return ret; } @Override public TLoadTxnCommitResult loadTxnPrepare(TLoadTxnCommitRequest request) throws TException { String clientAddr = getClientAddrAsString(); LOG.info("receive txn prepare request. db: {}, tbl: {}, txn_id: {}, backend: {}", request.getDb(), request.getTbl(), request.getTxnId(), clientAddr); LOG.debug("txn prepare request: {}", request); TLoadTxnCommitResult result = new TLoadTxnCommitResult(); if (!GlobalStateMgr.getCurrentState().isLeader()) { TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR); status.setError_msgs(Lists.newArrayList("current fe is not master")); result.setStatus(status); return result; } TStatus status = new TStatus(TStatusCode.OK); result.setStatus(status); try { loadTxnPrepareImpl(request); } catch (UserException e) { LOG.warn("failed to prepare txn_id: {}: {}", request.getTxnId(), e.getMessage()); status.setStatus_code(TStatusCode.ANALYSIS_ERROR); status.addToError_msgs(e.getMessage()); } catch (Throwable e) { LOG.warn("catch unknown result.", e); status.setStatus_code(TStatusCode.INTERNAL_ERROR); status.addToError_msgs(Strings.nullToEmpty(e.getMessage())); return result; } return result; } private void loadTxnPrepareImpl(TLoadTxnCommitRequest request) throws UserException { String cluster = request.getCluster(); if (Strings.isNullOrEmpty(cluster)) { cluster = SystemInfoService.DEFAULT_CLUSTER; } if (request.isSetAuth_code()) { } else { checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(), request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD); } GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); String dbName = request.getDb(); Database db = globalStateMgr.getDb(dbName); if (db == null) { throw new UserException("unknown database, database=" + dbName); } TxnCommitAttachment attachment = TxnCommitAttachment.fromThrift(request.txnCommitAttachment); GlobalStateMgr.getCurrentGlobalTransactionMgr().prepareTransaction( db.getId(), request.getTxnId(), TabletCommitInfo.fromThrift(request.getCommitInfos()), attachment); } @Override public TLoadTxnRollbackResult loadTxnRollback(TLoadTxnRollbackRequest request) throws TException { String clientAddr = getClientAddrAsString(); LOG.info("receive txn rollback request. db: {}, tbl: {}, txn_id: {}, reason: {}, backend: {}", request.getDb(), request.getTbl(), request.getTxnId(), request.getReason(), clientAddr); LOG.debug("txn rollback request: {}", request); TLoadTxnRollbackResult result = new TLoadTxnRollbackResult(); if (!GlobalStateMgr.getCurrentState().isLeader()) { TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR); status.setError_msgs(Lists.newArrayList("current fe is not master")); result.setStatus(status); return result; } TStatus status = new TStatus(TStatusCode.OK); result.setStatus(status); try { loadTxnRollbackImpl(request); } catch (TransactionNotFoundException e) { LOG.warn("failed to rollback txn {}: {}", request.getTxnId(), e.getMessage()); status.setStatus_code(TStatusCode.TXN_NOT_EXISTS); status.addToError_msgs(e.getMessage()); } catch (UserException e) { LOG.warn("failed to rollback txn {}: {}", request.getTxnId(), e.getMessage()); status.setStatus_code(TStatusCode.ANALYSIS_ERROR); status.addToError_msgs(e.getMessage()); } catch (Throwable e) { LOG.warn("catch unknown result.", e); status.setStatus_code(TStatusCode.INTERNAL_ERROR); status.addToError_msgs(Strings.nullToEmpty(e.getMessage())); return result; } return result; } private void loadTxnRollbackImpl(TLoadTxnRollbackRequest request) throws UserException { String cluster = request.getCluster(); if (Strings.isNullOrEmpty(cluster)) { cluster = SystemInfoService.DEFAULT_CLUSTER; } if (request.isSetAuth_code()) { } else { checkPasswordAndPrivs(cluster, request.getUser(), request.getPasswd(), request.getDb(), request.getTbl(), request.getUser_ip(), PrivPredicate.LOAD); } String dbName = request.getDb(); Database db = GlobalStateMgr.getCurrentState().getDb(dbName); if (db == null) { throw new MetaNotFoundException("db " + dbName + " does not exist"); } long dbId = db.getId(); GlobalStateMgr.getCurrentGlobalTransactionMgr().abortTransaction(dbId, request.getTxnId(), request.isSetReason() ? request.getReason() : "system cancel", TxnCommitAttachment.fromThrift(request.getTxnCommitAttachment())); } @Override public TStreamLoadPutResult streamLoadPut(TStreamLoadPutRequest request) { String clientAddr = getClientAddrAsString(); LOG.info("receive stream load put request. db:{}, tbl: {}, txn_id: {}, load id: {}, backend: {}", request.getDb(), request.getTbl(), request.getTxnId(), DebugUtil.printId(request.getLoadId()), clientAddr); LOG.debug("stream load put request: {}", request); TStreamLoadPutResult result = new TStreamLoadPutResult(); TStatus status = new TStatus(TStatusCode.OK); result.setStatus(status); try { result.setParams(streamLoadPutImpl(request)); } catch (UserException e) { LOG.warn("failed to get stream load plan: {}", e.getMessage()); status.setStatus_code(TStatusCode.ANALYSIS_ERROR); status.addToError_msgs(e.getMessage()); } catch (Throwable e) { LOG.warn("catch unknown result.", e); status.setStatus_code(TStatusCode.INTERNAL_ERROR); status.addToError_msgs(Strings.nullToEmpty(e.getMessage())); return result; } return result; } private TExecPlanFragmentParams streamLoadPutImpl(TStreamLoadPutRequest request) throws UserException { String cluster = request.getCluster(); if (Strings.isNullOrEmpty(cluster)) { cluster = SystemInfoService.DEFAULT_CLUSTER; } GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState(); String dbName = request.getDb(); Database db = globalStateMgr.getDb(dbName); if (db == null) { throw new UserException("unknown database, database=" + dbName); } long timeoutMs = request.isSetThrift_rpc_timeout_ms() ? request.getThrift_rpc_timeout_ms() : 5000; if (!db.tryReadLock(timeoutMs, TimeUnit.MILLISECONDS)) { throw new UserException("get database read lock timeout, database=" + dbName); } try { Table table = db.getTable(request.getTbl()); if (table == null) { throw new UserException("unknown table, table=" + request.getTbl()); } if (!(table instanceof OlapTable)) { throw new UserException("load table type is not OlapTable, type=" + table.getClass()); } if (table instanceof MaterializedView) { throw new UserException(String.format( "The data of '%s' cannot be inserted because '%s' is a materialized view," + "and the data of materialized view must be consistent with the base table.", table.getName(), table.getName())); } StreamLoadTask streamLoadTask = StreamLoadTask.fromTStreamLoadPutRequest(request, db); StreamLoadPlanner planner = new StreamLoadPlanner(db, (OlapTable) table, streamLoadTask); TExecPlanFragmentParams plan = planner.plan(streamLoadTask.getId()); TransactionState txnState = GlobalStateMgr.getCurrentGlobalTransactionMgr().getTransactionState(db.getId(), request.getTxnId()); if (txnState == null) { throw new UserException("txn does not exist: " + request.getTxnId()); } txnState.addTableIndexes((OlapTable) table); return plan; } finally { db.readUnlock(); } } @Override public TStatus snapshotLoaderReport(TSnapshotLoaderReportRequest request) throws TException { if (GlobalStateMgr.getCurrentState().getBackupHandler().report(request.getTask_type(), request.getJob_id(), request.getTask_id(), request.getFinished_num(), request.getTotal_num())) { return new TStatus(TStatusCode.OK); } return new TStatus(TStatusCode.CANCELLED); } @Override public TRefreshTableResponse refreshTable(TRefreshTableRequest request) throws TException { try { if (request.getCatalog_name() == null) { request.setCatalog_name(InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME); } GlobalStateMgr.getCurrentState().refreshExternalTable(new TableName(request.getCatalog_name(), request.getDb_name(), request.getTable_name()), request.getPartitions()); return new TRefreshTableResponse(new TStatus(TStatusCode.OK)); } catch (DdlException e) { TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR); status.setError_msgs(Lists.newArrayList(e.getMessage())); return new TRefreshTableResponse(status); } } private TNetworkAddress getClientAddr() { ThriftServerContext connectionContext = ThriftServerEventProcessor.getConnectionContext(); if (connectionContext != null) { return connectionContext.getClient(); } return null; } private String getClientAddrAsString() { TNetworkAddress addr = getClientAddr(); return addr == null ? "unknown" : addr.hostname; } @Override public TGetTableMetaResponse getTableMeta(TGetTableMetaRequest request) throws TException { return leaderImpl.getTableMeta(request); } @Override public TBeginRemoteTxnResponse beginRemoteTxn(TBeginRemoteTxnRequest request) throws TException { return leaderImpl.beginRemoteTxn(request); } @Override public TCommitRemoteTxnResponse commitRemoteTxn(TCommitRemoteTxnRequest request) throws TException { return leaderImpl.commitRemoteTxn(request); } @Override public TAbortRemoteTxnResponse abortRemoteTxn(TAbortRemoteTxnRequest request) throws TException { return leaderImpl.abortRemoteTxn(request); } @Override public TSetConfigResponse setConfig(TSetConfigRequest request) throws TException { try { Preconditions.checkState(request.getKeys().size() == request.getValues().size()); Map<String, String> configs = new HashMap<>(); for (int i = 0; i < request.getKeys().size(); i++) { configs.put(request.getKeys().get(i), request.getValues().get(i)); } GlobalStateMgr.getCurrentState().setFrontendConfig(configs); return new TSetConfigResponse(new TStatus(TStatusCode.OK)); } catch (DdlException e) { TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR); status.setError_msgs(Lists.newArrayList(e.getMessage())); return new TSetConfigResponse(status); } } }
I try to use ImmutableEquivalenceSet, i found that `ImmutableEquivalenceSet` may be not suit the scene such as I want to make a relation mapping as `RelationId#1 -> RelationId#2` which should keep the directivity。 after call` ImmutableEquivalenceSet.addEqualPair` then `tryToMap`,i found get the result is > RelationId#2 -> RelationId#1
public static List<RelationMapping> generate(List<CatalogRelation> sources, List<CatalogRelation> targets) { HashMultimap<Long, MappedRelation> sourceTableRelationIdMap = HashMultimap.create(); for (CatalogRelation relation : sources) { sourceTableRelationIdMap.put(getTableQualifier(relation.getTable()), MappedRelation.of(relation.getRelationId(), relation)); } HashMultimap<Long, MappedRelation> targetTableRelationIdMap = HashMultimap.create(); for (CatalogRelation relation : targets) { targetTableRelationIdMap.put(getTableQualifier(relation.getTable()), MappedRelation.of(relation.getRelationId(), relation)); } Set<Long> sourceTableKeySet = sourceTableRelationIdMap.keySet(); List<List<RelationMapping>> mappedRelations = new ArrayList<>(); for (Long sourceTableQualifier : sourceTableKeySet) { Set<MappedRelation> sourceMappedRelations = sourceTableRelationIdMap.get(sourceTableQualifier); Set<MappedRelation> targetMappedRelations = targetTableRelationIdMap.get(sourceTableQualifier); if (targetMappedRelations.isEmpty()) { continue; } if (targetMappedRelations.size() == 1 && sourceMappedRelations.size() == 1) { ImmutableBiMap.Builder<MappedRelation, MappedRelation> biMapBuilder = ImmutableBiMap.builder(); mappedRelations.add(ImmutableList.of( RelationMapping.of(biMapBuilder.put(sourceMappedRelations.iterator().next(), targetMappedRelations.iterator().next()).build()))); continue; } ImmutableList<Pair<MappedRelation, MappedRelation>> relationMapping = Sets.cartesianProduct( sourceMappedRelations, targetMappedRelations) .stream() .map(listPair -> Pair.of(listPair.get(0), listPair.get(1))) .collect(ImmutableList.toImmutableList()); List<RelationMapping> relationMappingPowerList = new ArrayList<>(); int relationMappingSize = relationMapping.size(); int relationMappingMinSize = Math.min(sourceMappedRelations.size(), targetMappedRelations.size()); for (int i = 0; i < relationMappingSize; i++) { HashBiMap<MappedRelation, MappedRelation> relationBiMap = HashBiMap.create(); relationBiMap.put(relationMapping.get(i).key(), relationMapping.get(i).value()); for (int j = i + 1; j < relationMappingSize; j++) { if (!relationBiMap.containsKey(relationMapping.get(j).key()) && !relationBiMap.containsValue(relationMapping.get(j).value())) { relationBiMap.put(relationMapping.get(j).key(), relationMapping.get(j).value()); } } if (relationBiMap.size() >= relationMappingMinSize) { relationMappingPowerList.add(RelationMapping.of(ImmutableBiMap.copyOf(relationBiMap))); } } mappedRelations.add(relationMappingPowerList); } return Lists.cartesianProduct(mappedRelations).stream() .map(RelationMapping::merge) .collect(ImmutableList.toImmutableList()); }
List<List<RelationMapping>> mappedRelations = new ArrayList<>();
public static List<RelationMapping> generate(List<CatalogRelation> sources, List<CatalogRelation> targets) { HashMultimap<Long, MappedRelation> sourceTableRelationIdMap = HashMultimap.create(); for (CatalogRelation relation : sources) { sourceTableRelationIdMap.put(getTableQualifier(relation.getTable()), MappedRelation.of(relation.getRelationId(), relation)); } HashMultimap<Long, MappedRelation> targetTableRelationIdMap = HashMultimap.create(); for (CatalogRelation relation : targets) { targetTableRelationIdMap.put(getTableQualifier(relation.getTable()), MappedRelation.of(relation.getRelationId(), relation)); } Set<Long> sourceTableKeySet = sourceTableRelationIdMap.keySet(); List<List<BiMap<MappedRelation, MappedRelation>>> mappedRelations = new ArrayList<>(); for (Long sourceTableId : sourceTableKeySet) { Set<MappedRelation> sourceMappedRelations = sourceTableRelationIdMap.get(sourceTableId); Set<MappedRelation> targetMappedRelations = targetTableRelationIdMap.get(sourceTableId); if (targetMappedRelations.isEmpty()) { continue; } if (targetMappedRelations.size() == 1 && sourceMappedRelations.size() == 1) { ImmutableBiMap.Builder<MappedRelation, MappedRelation> biMapBuilder = ImmutableBiMap.builder(); mappedRelations.add(ImmutableList.of( biMapBuilder.put(sourceMappedRelations.iterator().next(), targetMappedRelations.iterator().next()).build())); continue; } ImmutableList<Pair<MappedRelation, MappedRelation>> relationMapping = Sets.cartesianProduct( sourceMappedRelations, targetMappedRelations) .stream() .map(listPair -> Pair.of(listPair.get(0), listPair.get(1))) .collect(ImmutableList.toImmutableList()); List<BiMap<MappedRelation, MappedRelation>> relationMappingPowerList = new ArrayList<>(); int relationMappingSize = relationMapping.size(); int relationMappingMinSize = Math.min(sourceMappedRelations.size(), targetMappedRelations.size()); for (int i = 0; i < relationMappingSize; i++) { HashBiMap<MappedRelation, MappedRelation> relationBiMap = HashBiMap.create(); relationBiMap.put(relationMapping.get(i).key(), relationMapping.get(i).value()); for (int j = i + 1; j < relationMappingSize; j++) { if (!relationBiMap.containsKey(relationMapping.get(j).key()) && !relationBiMap.containsValue(relationMapping.get(j).value())) { relationBiMap.put(relationMapping.get(j).key(), relationMapping.get(j).value()); } } if (relationBiMap.size() >= relationMappingMinSize) { relationMappingPowerList.add(relationBiMap); } } mappedRelations.add(relationMappingPowerList); } return Lists.cartesianProduct(mappedRelations).stream() .map(RelationMapping::merge) .collect(ImmutableList.toImmutableList()); }
class RelationMapping extends Mapping { private final ImmutableBiMap<MappedRelation, MappedRelation> mappedRelationMap; public RelationMapping(ImmutableBiMap<MappedRelation, MappedRelation> mappedRelationMap) { this.mappedRelationMap = mappedRelationMap; } public BiMap<MappedRelation, MappedRelation> getMappedRelationMap() { return mappedRelationMap; } public static RelationMapping of(ImmutableBiMap<MappedRelation, MappedRelation> mappedRelationMap) { return new RelationMapping(mappedRelationMap); } /** * Generate mapping according to source and target relation */ public static RelationMapping merge(List<RelationMapping> relationMappings) { Builder<MappedRelation, MappedRelation> mappingBuilder = ImmutableBiMap.builder(); for (RelationMapping relationMapping : relationMappings) { relationMapping.getMappedRelationMap().forEach(mappingBuilder::put); } return RelationMapping.of(mappingBuilder.build()); } private static Long getTableQualifier(TableIf tableIf) { return tableIf.getId(); } }
class RelationMapping extends Mapping { private final ImmutableBiMap<MappedRelation, MappedRelation> mappedRelationMap; public RelationMapping(ImmutableBiMap<MappedRelation, MappedRelation> mappedRelationMap) { this.mappedRelationMap = mappedRelationMap; } public BiMap<MappedRelation, MappedRelation> getMappedRelationMap() { return mappedRelationMap; } public static RelationMapping of(ImmutableBiMap<MappedRelation, MappedRelation> mappedRelationMap) { return new RelationMapping(mappedRelationMap); } /** * Generate mapping according to source and target relation */ public static RelationMapping merge(List<BiMap<MappedRelation, MappedRelation>> relationMappings) { Builder<MappedRelation, MappedRelation> mappingBuilder = ImmutableBiMap.builder(); for (BiMap<MappedRelation, MappedRelation> relationMapping : relationMappings) { relationMapping.forEach(mappingBuilder::put); } return RelationMapping.of(mappingBuilder.build()); } private static Long getTableQualifier(TableIf tableIf) { return tableIf.getId(); } }
I am not fully convinced this is the proper error handling logic to extract the http or grpc status code here. DatastoreException has an int getCode() this should return the http code which serviceCallMetri.call() will convert to a grpc status code. So lets's just change the logic to } catch (DatastoreException exception) { serviceCallMetric.call(exception.getCode()); .... and we can remove getErrorInfo
private void flushBatch() throws DatastoreException, IOException, InterruptedException { LOG.debug("Writing batch of {} mutations", mutations.size()); Sleeper sleeper = Sleeper.DEFAULT; BackOff backoff = BUNDLE_WRITE_BACKOFF.backoff(); while (true) { CommitRequest.Builder commitRequest = CommitRequest.newBuilder(); commitRequest.addAllMutations(mutations); commitRequest.setMode(CommitRequest.Mode.NON_TRANSACTIONAL); long startTime = System.currentTimeMillis(), endTime; if (adaptiveThrottler.throttleRequest(startTime)) { LOG.info("Delaying request due to previous failures"); throttlingMsecs.inc(WriteBatcherImpl.DATASTORE_BATCH_TARGET_LATENCY_MS); sleeper.sleep(WriteBatcherImpl.DATASTORE_BATCH_TARGET_LATENCY_MS); continue; } HashMap<String, String> baseLabels = new HashMap<>(); baseLabels.put(MonitoringInfoConstants.Labels.PTRANSFORM, ""); baseLabels.put(MonitoringInfoConstants.Labels.SERVICE, "Datastore"); baseLabels.put(MonitoringInfoConstants.Labels.METHOD, "flushBatch"); baseLabels.put(MonitoringInfoConstants.Labels.RESOURCE, ""); baseLabels.put(MonitoringInfoConstants.Labels.DATASTORE_PROJECT, projectId.get()); baseLabels.put( MonitoringInfoConstants.Labels.DATASTORE_NAMESPACE, getNameSpace(projectId.get(), "")); ServiceCallMetric serviceCallMetric = new ServiceCallMetric(MonitoringInfoConstants.Urns.API_REQUEST_COUNT, baseLabels); try { datastore.commit(commitRequest.build()); endTime = System.currentTimeMillis(); serviceCallMetric.call("ok"); writeBatcher.addRequestLatency(endTime, endTime - startTime, mutations.size()); adaptiveThrottler.successfulRequest(startTime); latencyMsPerMutation.update((endTime - startTime) / mutations.size()); rpcSuccesses.inc(); entitiesMutated.inc(mutations.size()); break; } catch (DatastoreException exception) { GoogleJsonError.ErrorInfo errorInfo = getErrorInfo(exception); if (errorInfo == null) { serviceCallMetric.call(ServiceCallMetric.CANONICAL_STATUS_UNKNOWN); } else { serviceCallMetric.call(errorInfo.getReason()); } if (exception.getCode() == Code.DEADLINE_EXCEEDED) { /* Most errors are not related to request size, and should not change our expectation of * the latency of successful requests. DEADLINE_EXCEEDED can be taken into * consideration, though. */ endTime = System.currentTimeMillis(); writeBatcher.addRequestLatency(endTime, endTime - startTime, mutations.size()); latencyMsPerMutation.update((endTime - startTime) / mutations.size()); } LOG.error( "Error writing batch of {} mutations to Datastore ({}): {}", mutations.size(), exception.getCode(), exception.getMessage()); rpcErrors.inc(); if (NON_RETRYABLE_ERRORS.contains(exception.getCode())) { throw exception; } if (!BackOffUtils.next(sleeper, backoff)) { LOG.error("Aborting after {} retries.", MAX_RETRIES); throw exception; } } } LOG.debug("Successfully wrote {} mutations", mutations.size()); mutations.clear(); mutationsSize = 0; }
serviceCallMetric.call(errorInfo.getReason());
private void flushBatch() throws DatastoreException, IOException, InterruptedException { LOG.debug("Writing batch of {} mutations", mutations.size()); Sleeper sleeper = Sleeper.DEFAULT; BackOff backoff = BUNDLE_WRITE_BACKOFF.backoff(); while (true) { CommitRequest.Builder commitRequest = CommitRequest.newBuilder(); commitRequest.addAllMutations(mutations); commitRequest.setMode(CommitRequest.Mode.NON_TRANSACTIONAL); long startTime = System.currentTimeMillis(), endTime; if (adaptiveThrottler.throttleRequest(startTime)) { LOG.info("Delaying request due to previous failures"); throttlingMsecs.inc(WriteBatcherImpl.DATASTORE_BATCH_TARGET_LATENCY_MS); sleeper.sleep(WriteBatcherImpl.DATASTORE_BATCH_TARGET_LATENCY_MS); continue; } HashMap<String, String> baseLabels = new HashMap<>(); baseLabels.put(MonitoringInfoConstants.Labels.PTRANSFORM, ""); baseLabels.put(MonitoringInfoConstants.Labels.SERVICE, "Datastore"); baseLabels.put(MonitoringInfoConstants.Labels.METHOD, "BatchDatastoreWrite"); baseLabels.put( MonitoringInfoConstants.Labels.RESOURCE, GcpResourceIdentifiers.datastoreResource(projectId.get(), "")); baseLabels.put(MonitoringInfoConstants.Labels.DATASTORE_PROJECT, projectId.get()); baseLabels.put(MonitoringInfoConstants.Labels.DATASTORE_NAMESPACE, ""); ServiceCallMetric serviceCallMetric = new ServiceCallMetric(MonitoringInfoConstants.Urns.API_REQUEST_COUNT, baseLabels); try { datastore.commit(commitRequest.build()); endTime = System.currentTimeMillis(); serviceCallMetric.call("ok"); writeBatcher.addRequestLatency(endTime, endTime - startTime, mutations.size()); adaptiveThrottler.successfulRequest(startTime); latencyMsPerMutation.update((endTime - startTime) / mutations.size()); rpcSuccesses.inc(); entitiesMutated.inc(mutations.size()); break; } catch (DatastoreException exception) { serviceCallMetric.call(exception.getCode().getNumber()); if (exception.getCode() == Code.DEADLINE_EXCEEDED) { /* Most errors are not related to request size, and should not change our expectation of * the latency of successful requests. DEADLINE_EXCEEDED can be taken into * consideration, though. */ endTime = System.currentTimeMillis(); writeBatcher.addRequestLatency(endTime, endTime - startTime, mutations.size()); latencyMsPerMutation.update((endTime - startTime) / mutations.size()); } LOG.error( "Error writing batch of {} mutations to Datastore ({}): {}", mutations.size(), exception.getCode(), exception.getMessage()); rpcErrors.inc(); if (NON_RETRYABLE_ERRORS.contains(exception.getCode())) { throw exception; } if (!BackOffUtils.next(sleeper, backoff)) { LOG.error("Aborting after {} retries.", MAX_RETRIES); throw exception; } } } LOG.debug("Successfully wrote {} mutations", mutations.size()); mutations.clear(); mutationsSize = 0; }
class DatastoreWriterFn extends DoFn<Mutation, Void> { private static final Logger LOG = LoggerFactory.getLogger(DatastoreWriterFn.class); private final ValueProvider<String> projectId; private final @Nullable String localhost; private transient Datastore datastore; private final V1DatastoreFactory datastoreFactory; private final List<Mutation> mutations = new ArrayList<>(); private int mutationsSize = 0; private WriteBatcher writeBatcher; private transient AdaptiveThrottler adaptiveThrottler; private final Counter throttlingMsecs = Metrics.counter(DatastoreWriterFn.class, "throttling-msecs"); private final Counter rpcErrors = Metrics.counter(DatastoreWriterFn.class, "datastoreRpcErrors"); private final Counter rpcSuccesses = Metrics.counter(DatastoreWriterFn.class, "datastoreRpcSuccesses"); private final Counter entitiesMutated = Metrics.counter(DatastoreWriterFn.class, "datastoreEntitiesMutated"); private final Distribution latencyMsPerMutation = Metrics.distribution(DatastoreWriterFn.class, "datastoreLatencyMsPerMutation"); private static final int MAX_RETRIES = 5; private static final FluentBackoff BUNDLE_WRITE_BACKOFF = FluentBackoff.DEFAULT .withMaxRetries(MAX_RETRIES) .withInitialBackoff(Duration.standardSeconds(5)); DatastoreWriterFn(String projectId, @Nullable String localhost) { this( StaticValueProvider.of(projectId), localhost, new V1DatastoreFactory(), new WriteBatcherImpl()); } DatastoreWriterFn(ValueProvider<String> projectId, @Nullable String localhost) { this(projectId, localhost, new V1DatastoreFactory(), new WriteBatcherImpl()); } @VisibleForTesting DatastoreWriterFn( ValueProvider<String> projectId, @Nullable String localhost, V1DatastoreFactory datastoreFactory, WriteBatcher writeBatcher) { this.projectId = checkNotNull(projectId, "projectId"); this.localhost = localhost; this.datastoreFactory = datastoreFactory; this.writeBatcher = writeBatcher; } @StartBundle public void startBundle(StartBundleContext c) { datastore = datastoreFactory.getDatastore(c.getPipelineOptions(), projectId.get(), localhost); writeBatcher.start(); if (adaptiveThrottler == null) { adaptiveThrottler = new AdaptiveThrottler(120000, 10000, 1.25); } } @ProcessElement public void processElement(ProcessContext c) throws Exception { Mutation write = c.element(); int size = write.getSerializedSize(); if (mutations.size() > 0 && mutationsSize + size >= DatastoreV1.DATASTORE_BATCH_UPDATE_BYTES_LIMIT) { flushBatch(); } mutations.add(c.element()); mutationsSize += size; if (mutations.size() >= writeBatcher.nextBatchSize(System.currentTimeMillis())) { flushBatch(); } } @FinishBundle public void finishBundle() throws Exception { if (!mutations.isEmpty()) { flushBatch(); } } /** * Writes a batch of mutations to Cloud Datastore. * * <p>If a commit fails, it will be retried up to {@link * the batch will be committed again, even if the commit was partially successful. If the retry * limit is exceeded, the last exception from Cloud Datastore will be thrown. * * @throws DatastoreException if the commit fails or IOException or InterruptedException if * backing off between retries fails. */ @Override public void populateDisplayData(Builder builder) { super.populateDisplayData(builder); builder.addIfNotNull(DisplayData.item("projectId", projectId).withLabel("Output Project")); } }
class DatastoreWriterFn extends DoFn<Mutation, Void> { private static final Logger LOG = LoggerFactory.getLogger(DatastoreWriterFn.class); private final ValueProvider<String> projectId; private final @Nullable String localhost; private transient Datastore datastore; private final V1DatastoreFactory datastoreFactory; private final List<Mutation> mutations = new ArrayList<>(); private int mutationsSize = 0; private WriteBatcher writeBatcher; private transient AdaptiveThrottler adaptiveThrottler; private final Counter throttlingMsecs = Metrics.counter(DatastoreWriterFn.class, "throttling-msecs"); private final Counter rpcErrors = Metrics.counter(DatastoreWriterFn.class, "datastoreRpcErrors"); private final Counter rpcSuccesses = Metrics.counter(DatastoreWriterFn.class, "datastoreRpcSuccesses"); private final Counter entitiesMutated = Metrics.counter(DatastoreWriterFn.class, "datastoreEntitiesMutated"); private final Distribution latencyMsPerMutation = Metrics.distribution(DatastoreWriterFn.class, "datastoreLatencyMsPerMutation"); private static final int MAX_RETRIES = 5; private static final FluentBackoff BUNDLE_WRITE_BACKOFF = FluentBackoff.DEFAULT .withMaxRetries(MAX_RETRIES) .withInitialBackoff(Duration.standardSeconds(5)); DatastoreWriterFn(String projectId, @Nullable String localhost) { this( StaticValueProvider.of(projectId), localhost, new V1DatastoreFactory(), new WriteBatcherImpl()); } DatastoreWriterFn(ValueProvider<String> projectId, @Nullable String localhost) { this(projectId, localhost, new V1DatastoreFactory(), new WriteBatcherImpl()); } @VisibleForTesting DatastoreWriterFn( ValueProvider<String> projectId, @Nullable String localhost, V1DatastoreFactory datastoreFactory, WriteBatcher writeBatcher) { this.projectId = checkNotNull(projectId, "projectId"); this.localhost = localhost; this.datastoreFactory = datastoreFactory; this.writeBatcher = writeBatcher; } @StartBundle public void startBundle(StartBundleContext c) { datastore = datastoreFactory.getDatastore(c.getPipelineOptions(), projectId.get(), localhost); writeBatcher.start(); if (adaptiveThrottler == null) { adaptiveThrottler = new AdaptiveThrottler(120000, 10000, 1.25); } } @ProcessElement public void processElement(ProcessContext c) throws Exception { Mutation write = c.element(); int size = write.getSerializedSize(); if (mutations.size() > 0 && mutationsSize + size >= DatastoreV1.DATASTORE_BATCH_UPDATE_BYTES_LIMIT) { flushBatch(); } mutations.add(c.element()); mutationsSize += size; if (mutations.size() >= writeBatcher.nextBatchSize(System.currentTimeMillis())) { flushBatch(); } } @FinishBundle public void finishBundle() throws Exception { if (!mutations.isEmpty()) { flushBatch(); } } /** * Writes a batch of mutations to Cloud Datastore. * * <p>If a commit fails, it will be retried up to {@link * the batch will be committed again, even if the commit was partially successful. If the retry * limit is exceeded, the last exception from Cloud Datastore will be thrown. * * @throws DatastoreException if the commit fails or IOException or InterruptedException if * backing off between retries fails. */ @Override public void populateDisplayData(Builder builder) { super.populateDisplayData(builder); builder.addIfNotNull(DisplayData.item("projectId", projectId).withLabel("Output Project")); } }
Let's also log other shutdown attempts as DEBUG.
private CompletableFuture<Void> close(Throwable cause) { final CompletableFuture<Void> shutdownFuture = new CompletableFuture<>(); if (connectionShutdownFuture.compareAndSet(null, shutdownFuture) && failureCause.compareAndSet(null, cause)) { channel.close().addListener(finished -> { stats.reportInactiveConnection(); for (long requestId : pendingRequests.keySet()) { TimestampedCompletableFuture pending = pendingRequests.remove(requestId); if (pending != null && pending.completeExceptionally(cause)) { stats.reportFailedRequest(); } } if (finished.isSuccess()) { shutdownFuture.complete(null); } else { shutdownFuture.completeExceptionally(finished.cause()); } }); } return connectionShutdownFuture.get(); }
if (connectionShutdownFuture.compareAndSet(null, shutdownFuture) &&
private CompletableFuture<Void> close(Throwable cause) { CompletableFuture<Void> future = new CompletableFuture<>(); if (connectionShutdownFuture.compareAndSet(null, future)) { synchronized (connectLock) { if (failureCause == null) { failureCause = cause; } if (established != null) { established.close().whenComplete((result, throwable) -> { if (throwable != null) { future.completeExceptionally(throwable); } else { future.complete(null); } }); } else { PendingRequest pending; while ((pending = queuedRequests.poll()) != null) { pending.completeExceptionally(cause); } future.complete(null); } } } return connectionShutdownFuture.get(); }
class PendingConnection implements ChannelFutureListener { /** Lock to guard the connect call, channel hand in, etc. */ private final Object connectLock = new Object(); /** Address of the server we are connecting to. */ private final InetSocketAddress serverAddress; private final MessageSerializer<REQ, RESP> serializer; /** Queue of requests while connecting. */ private final ArrayDeque<PendingRequest> queuedRequests = new ArrayDeque<>(); /** The established connection after the connect succeeds. */ private EstablishedConnection established; /** Atomic shut down future. */ private final AtomicReference<CompletableFuture<Void>> connectionShutdownFuture = new AtomicReference<>(null); /** Closed flag. */ private boolean closed; /** Failure cause if something goes wrong. */ private Throwable failureCause; /** * Creates a pending connection to the given server. * * @param serverAddress Address of the server to connect to. */ private PendingConnection( final InetSocketAddress serverAddress, final MessageSerializer<REQ, RESP> serializer) { this.serverAddress = serverAddress; this.serializer = serializer; } @Override public void operationComplete(ChannelFuture future) throws Exception { if (future.isSuccess()) { handInChannel(future.channel()); } else { close(future.cause()); } } /** * Returns a future holding the serialized request result. * * <p>If the channel has been established, forward the call to the * established channel, otherwise queue it for when the channel is * handed in. * * @param request the request to be sent. * @return Future holding the serialized result */ public CompletableFuture<RESP> sendRequest(REQ request) { synchronized (connectLock) { if (failureCause != null) { return FutureUtils.getFailedFuture(failureCause); } else if (closed) { return FutureUtils.getFailedFuture(new ClosedChannelException()); } else { if (established != null) { return established.sendRequest(request); } else { final PendingRequest pending = new PendingRequest(request); queuedRequests.add(pending); return pending; } } } } /** * Hands in a channel after a successful connection. * * @param channel Channel to hand in */ private void handInChannel(Channel channel) { synchronized (connectLock) { if (closed || failureCause != null) { channel.close(); } else { established = new EstablishedConnection(serverAddress, serializer, channel); while (!queuedRequests.isEmpty()) { final PendingRequest pending = queuedRequests.poll(); established.sendRequest(pending.request).whenComplete( (response, throwable) -> { if (throwable != null) { pending.completeExceptionally(throwable); } else { pending.complete(response); } }); } establishedConnections.put(serverAddress, established); pendingConnections.remove(serverAddress); if (!clientShutdownFuture.compareAndSet(null, null)) { if (establishedConnections.remove(serverAddress, established)) { established.close(); } } } } } /** * Close the connecting channel with a ClosedChannelException. */ private CompletableFuture<Void> close() { return close(new ClosedChannelException()); } /** * Close the connecting channel with an Exception (can be {@code null}) * or forward to the established channel. */ @Override public String toString() { synchronized (connectLock) { return "PendingConnection{" + "serverAddress=" + serverAddress + ", queuedRequests=" + queuedRequests.size() + ", established=" + (established != null) + ", closed=" + closed + '}'; } } /** * A pending request queued while the channel is connecting. */ private final class PendingRequest extends CompletableFuture<RESP> { private final REQ request; private PendingRequest(REQ request) { this.request = request; } } }
class PendingConnection implements ChannelFutureListener { /** Lock to guard the connect call, channel hand in, etc. */ private final Object connectLock = new Object(); /** Address of the server we are connecting to. */ private final InetSocketAddress serverAddress; private final MessageSerializer<REQ, RESP> serializer; /** Queue of requests while connecting. */ private final ArrayDeque<PendingRequest> queuedRequests = new ArrayDeque<>(); /** The established connection after the connect succeeds. */ private EstablishedConnection established; /** Atomic shut down future. */ private final AtomicReference<CompletableFuture<Void>> connectionShutdownFuture = new AtomicReference<>(null); /** Failure cause if something goes wrong. */ private Throwable failureCause; /** * Creates a pending connection to the given server. * * @param serverAddress Address of the server to connect to. */ private PendingConnection( final InetSocketAddress serverAddress, final MessageSerializer<REQ, RESP> serializer) { this.serverAddress = serverAddress; this.serializer = serializer; } @Override public void operationComplete(ChannelFuture future) throws Exception { if (future.isSuccess()) { handInChannel(future.channel()); } else { close(future.cause()); } } /** * Returns a future holding the serialized request result. * * <p>If the channel has been established, forward the call to the * established channel, otherwise queue it for when the channel is * handed in. * * @param request the request to be sent. * @return Future holding the serialized result */ public CompletableFuture<RESP> sendRequest(REQ request) { synchronized (connectLock) { if (failureCause != null) { return FutureUtils.getFailedFuture(failureCause); } else if (connectionShutdownFuture.get() != null) { return FutureUtils.getFailedFuture(new ClosedChannelException()); } else { if (established != null) { return established.sendRequest(request); } else { final PendingRequest pending = new PendingRequest(request); queuedRequests.add(pending); return pending; } } } } /** * Hands in a channel after a successful connection. * * @param channel Channel to hand in */ private void handInChannel(Channel channel) { synchronized (connectLock) { if (connectionShutdownFuture.get() != null || failureCause != null) { channel.close(); } else { established = new EstablishedConnection(serverAddress, serializer, channel); while (!queuedRequests.isEmpty()) { final PendingRequest pending = queuedRequests.poll(); established.sendRequest(pending.request).whenComplete( (response, throwable) -> { if (throwable != null) { pending.completeExceptionally(throwable); } else { pending.complete(response); } }); } establishedConnections.put(serverAddress, established); pendingConnections.remove(serverAddress); if (clientShutdownFuture.get() != null) { if (establishedConnections.remove(serverAddress, established)) { established.close(); } } } } } /** * Close the connecting channel with a ClosedChannelException. */ private CompletableFuture<Void> close() { return close(new ClosedChannelException()); } /** * Close the connecting channel with an Exception (can be {@code null}) * or forward to the established channel. */ @Override public String toString() { synchronized (connectLock) { return "PendingConnection{" + "serverAddress=" + serverAddress + ", queuedRequests=" + queuedRequests.size() + ", established=" + (established != null) + ", closed=" + (connectionShutdownFuture.get() != null) + '}'; } } /** * A pending request queued while the channel is connecting. */ private final class PendingRequest extends CompletableFuture<RESP> { private final REQ request; private PendingRequest(REQ request) { this.request = request; } } }
A question here: why have to change from `BufferOrEvent` to `Optional<BufferOrEvent>`, to avoid null check?
public Optional<BufferOrEvent> pollNext() throws Exception { while (true) { Optional<BufferOrEvent> next; if (currentBuffered == null) { next = inputGate.pollNext(); } else { next = Optional.ofNullable(currentBuffered.getNext()); if (!next.isPresent()) { completeBufferedSequence(); return pollNext(); } } if (!next.isPresent()) { return handleEmptyBuffer(); } BufferOrEvent bufferOrEvent = next.get(); if (isBlocked(bufferOrEvent.getChannelIndex())) { bufferBlocker.add(bufferOrEvent); checkSizeLimit(); } else if (bufferOrEvent.isBuffer()) { return next; } else if (bufferOrEvent.getEvent().getClass() == CheckpointBarrier.class) { if (!endOfStream) { processBarrier((CheckpointBarrier) bufferOrEvent.getEvent(), bufferOrEvent.getChannelIndex()); } } else if (bufferOrEvent.getEvent().getClass() == CancelCheckpointMarker.class) { processCancellationBarrier((CancelCheckpointMarker) bufferOrEvent.getEvent()); } else { if (bufferOrEvent.getEvent().getClass() == EndOfPartitionEvent.class) { processEndOfPartition(); } return next; } } }
if (currentBuffered == null) {
public Optional<BufferOrEvent> pollNext() throws Exception { while (true) { Optional<BufferOrEvent> next; if (currentBuffered == null) { next = inputGate.pollNext(); } else { next = Optional.ofNullable(currentBuffered.getNext()); if (!next.isPresent()) { completeBufferedSequence(); return pollNext(); } } if (!next.isPresent()) { return handleEmptyBuffer(); } BufferOrEvent bufferOrEvent = next.get(); if (isBlocked(bufferOrEvent.getChannelIndex())) { bufferBlocker.add(bufferOrEvent); checkSizeLimit(); } else if (bufferOrEvent.isBuffer()) { return next; } else if (bufferOrEvent.getEvent().getClass() == CheckpointBarrier.class) { if (!endOfStream) { processBarrier((CheckpointBarrier) bufferOrEvent.getEvent(), bufferOrEvent.getChannelIndex()); } } else if (bufferOrEvent.getEvent().getClass() == CancelCheckpointMarker.class) { processCancellationBarrier((CancelCheckpointMarker) bufferOrEvent.getEvent()); } else { if (bufferOrEvent.getEvent().getClass() == EndOfPartitionEvent.class) { processEndOfPartition(); } return next; } } }
class BarrierBuffer implements CheckpointBarrierHandler { private static final Logger LOG = LoggerFactory.getLogger(BarrierBuffer.class); /** The gate that the buffer draws its input from. */ private final InputGate inputGate; /** Flags that indicate whether a channel is currently blocked/buffered. */ private final boolean[] blockedChannels; /** The total number of channels that this buffer handles data from. */ private final int totalNumberOfInputChannels; /** To utility to write blocked data to a file channel. */ private final BufferBlocker bufferBlocker; /** * The pending blocked buffer/event sequences. Must be consumed before requesting further data * from the input gate. */ private final ArrayDeque<BufferOrEventSequence> queuedBuffered; /** * The maximum number of bytes that may be buffered before an alignment is broken. -1 means * unlimited. */ private final long maxBufferedBytes; /** * The sequence of buffers/events that has been unblocked and must now be consumed before * requesting further data from the input gate. */ private BufferOrEventSequence currentBuffered; /** Handler that receives the checkpoint notifications. */ private AbstractInvokable toNotifyOnCheckpoint; /** The ID of the checkpoint for which we expect barriers. */ private long currentCheckpointId = -1L; /** * The number of received barriers (= number of blocked/buffered channels) IMPORTANT: A canceled * checkpoint must always have 0 barriers. */ private int numBarriersReceived; /** The number of already closed channels. */ private int numClosedChannels; /** The number of bytes in the queued spilled sequences. */ private long numQueuedBytes; /** The timestamp as in {@link System private long startOfAlignmentTimestamp; /** The time (in nanoseconds) that the latest alignment took. */ private long latestAlignmentDurationNanos; /** Flag to indicate whether we have drawn all available input. */ private boolean endOfStream; private boolean isFinished; /** * Creates a new checkpoint stream aligner. * * <p>There is no limit to how much data may be buffered during an alignment. * * @param inputGate The input gate to draw the buffers and events from. * @param bufferBlocker The buffer blocker to hold the buffers and events for channels with barrier. * * @throws IOException Thrown, when the spilling to temp files cannot be initialized. */ public BarrierBuffer(InputGate inputGate, BufferBlocker bufferBlocker) throws IOException { this (inputGate, bufferBlocker, -1); } /** * Creates a new checkpoint stream aligner. * * <p>The aligner will allow only alignments that buffer up to the given number of bytes. * When that number is exceeded, it will stop the alignment and notify the task that the * checkpoint has been cancelled. * * @param inputGate The input gate to draw the buffers and events from. * @param bufferBlocker The buffer blocker to hold the buffers and events for channels with barrier. * @param maxBufferedBytes The maximum bytes to be buffered before the checkpoint aborts. * * @throws IOException Thrown, when the spilling to temp files cannot be initialized. */ public BarrierBuffer(InputGate inputGate, BufferBlocker bufferBlocker, long maxBufferedBytes) throws IOException { checkArgument(maxBufferedBytes == -1 || maxBufferedBytes > 0); this.inputGate = inputGate; this.maxBufferedBytes = maxBufferedBytes; this.totalNumberOfInputChannels = inputGate.getNumberOfInputChannels(); this.blockedChannels = new boolean[this.totalNumberOfInputChannels]; this.bufferBlocker = checkNotNull(bufferBlocker); this.queuedBuffered = new ArrayDeque<BufferOrEventSequence>(); } @Override public CompletableFuture<?> isAvailable() { if (currentBuffered == null) { return inputGate.isAvailable(); } return AVAILABLE; } @Override private Optional<BufferOrEvent> handleEmptyBuffer() throws Exception { if (!inputGate.isFinished()) { return Optional.empty(); } if (endOfStream) { isFinished = true; return Optional.empty(); } else { endOfStream = true; releaseBlocksAndResetBarriers(); return pollNext(); } } private void completeBufferedSequence() throws IOException { LOG.debug("{}: Finished feeding back buffered data.", inputGate.getOwningTaskName()); currentBuffered.cleanup(); currentBuffered = queuedBuffered.pollFirst(); if (currentBuffered != null) { currentBuffered.open(); numQueuedBytes -= currentBuffered.size(); } } private void processBarrier(CheckpointBarrier receivedBarrier, int channelIndex) throws Exception { final long barrierId = receivedBarrier.getId(); if (totalNumberOfInputChannels == 1) { if (barrierId > currentCheckpointId) { currentCheckpointId = barrierId; notifyCheckpoint(receivedBarrier); } return; } if (numBarriersReceived > 0) { if (barrierId == currentCheckpointId) { onBarrier(channelIndex); } else if (barrierId > currentCheckpointId) { LOG.warn("{}: Received checkpoint barrier for checkpoint {} before completing current checkpoint {}. " + "Skipping current checkpoint.", inputGate.getOwningTaskName(), barrierId, currentCheckpointId); notifyAbort(currentCheckpointId, new CheckpointDeclineSubsumedException(barrierId)); releaseBlocksAndResetBarriers(); beginNewAlignment(barrierId, channelIndex); } else { return; } } else if (barrierId > currentCheckpointId) { beginNewAlignment(barrierId, channelIndex); } else { return; } if (numBarriersReceived + numClosedChannels == totalNumberOfInputChannels) { if (LOG.isDebugEnabled()) { LOG.debug("{}: Received all barriers, triggering checkpoint {} at {}.", inputGate.getOwningTaskName(), receivedBarrier.getId(), receivedBarrier.getTimestamp()); } releaseBlocksAndResetBarriers(); notifyCheckpoint(receivedBarrier); } } private void processCancellationBarrier(CancelCheckpointMarker cancelBarrier) throws Exception { final long barrierId = cancelBarrier.getCheckpointId(); if (totalNumberOfInputChannels == 1) { if (barrierId > currentCheckpointId) { currentCheckpointId = barrierId; notifyAbortOnCancellationBarrier(barrierId); } return; } if (numBarriersReceived > 0) { if (barrierId == currentCheckpointId) { if (LOG.isDebugEnabled()) { LOG.debug("{}: Checkpoint {} canceled, aborting alignment.", inputGate.getOwningTaskName(), barrierId); } releaseBlocksAndResetBarriers(); notifyAbortOnCancellationBarrier(barrierId); } else if (barrierId > currentCheckpointId) { LOG.warn("{}: Received cancellation barrier for checkpoint {} before completing current checkpoint {}. " + "Skipping current checkpoint.", inputGate.getOwningTaskName(), barrierId, currentCheckpointId); releaseBlocksAndResetBarriers(); currentCheckpointId = barrierId; startOfAlignmentTimestamp = 0L; latestAlignmentDurationNanos = 0L; notifyAbort(currentCheckpointId, new CheckpointDeclineSubsumedException(barrierId)); notifyAbortOnCancellationBarrier(barrierId); } } else if (barrierId > currentCheckpointId) { currentCheckpointId = barrierId; startOfAlignmentTimestamp = 0L; latestAlignmentDurationNanos = 0L; if (LOG.isDebugEnabled()) { LOG.debug("{}: Checkpoint {} canceled, skipping alignment.", inputGate.getOwningTaskName(), barrierId); } notifyAbortOnCancellationBarrier(barrierId); } } private void processEndOfPartition() throws Exception { numClosedChannels++; if (numBarriersReceived > 0) { notifyAbort(currentCheckpointId, new InputEndOfStreamException()); releaseBlocksAndResetBarriers(); } } private void notifyCheckpoint(CheckpointBarrier checkpointBarrier) throws Exception { if (toNotifyOnCheckpoint != null) { CheckpointMetaData checkpointMetaData = new CheckpointMetaData(checkpointBarrier.getId(), checkpointBarrier.getTimestamp()); long bytesBuffered = currentBuffered != null ? currentBuffered.size() : 0L; CheckpointMetrics checkpointMetrics = new CheckpointMetrics() .setBytesBufferedInAlignment(bytesBuffered) .setAlignmentDurationNanos(latestAlignmentDurationNanos); toNotifyOnCheckpoint.triggerCheckpointOnBarrier( checkpointMetaData, checkpointBarrier.getCheckpointOptions(), checkpointMetrics); } } private void notifyAbortOnCancellationBarrier(long checkpointId) throws Exception { notifyAbort(checkpointId, new CheckpointDeclineOnCancellationBarrierException()); } private void notifyAbort(long checkpointId, CheckpointDeclineException cause) throws Exception { if (toNotifyOnCheckpoint != null) { toNotifyOnCheckpoint.abortCheckpointOnBarrier(checkpointId, cause); } } private void checkSizeLimit() throws Exception { if (maxBufferedBytes > 0 && (numQueuedBytes + bufferBlocker.getBytesBlocked()) > maxBufferedBytes) { LOG.info("{}: Checkpoint {} aborted because alignment volume limit ({} bytes) exceeded.", inputGate.getOwningTaskName(), currentCheckpointId, maxBufferedBytes); releaseBlocksAndResetBarriers(); notifyAbort(currentCheckpointId, new AlignmentLimitExceededException(maxBufferedBytes)); } } @Override public void registerCheckpointEventHandler(AbstractInvokable toNotifyOnCheckpoint) { if (this.toNotifyOnCheckpoint == null) { this.toNotifyOnCheckpoint = toNotifyOnCheckpoint; } else { throw new IllegalStateException("BarrierBuffer already has a registered checkpoint notifyee"); } } @Override public boolean isEmpty() { return currentBuffered == null; } @Override public boolean isFinished() { return isFinished; } @Override public void cleanup() throws IOException { bufferBlocker.close(); if (currentBuffered != null) { currentBuffered.cleanup(); } for (BufferOrEventSequence seq : queuedBuffered) { seq.cleanup(); } queuedBuffered.clear(); numQueuedBytes = 0L; } private void beginNewAlignment(long checkpointId, int channelIndex) throws IOException { currentCheckpointId = checkpointId; onBarrier(channelIndex); startOfAlignmentTimestamp = System.nanoTime(); if (LOG.isDebugEnabled()) { LOG.debug("{}: Starting stream alignment for checkpoint {}.", inputGate.getOwningTaskName(), checkpointId); } } /** * Checks whether the channel with the given index is blocked. * * @param channelIndex The channel index to check. * @return True if the channel is blocked, false if not. */ private boolean isBlocked(int channelIndex) { return blockedChannels[channelIndex]; } /** * Blocks the given channel index, from which a barrier has been received. * * @param channelIndex The channel index to block. */ private void onBarrier(int channelIndex) throws IOException { if (!blockedChannels[channelIndex]) { blockedChannels[channelIndex] = true; numBarriersReceived++; if (LOG.isDebugEnabled()) { LOG.debug("{}: Received barrier from channel {}.", inputGate.getOwningTaskName(), channelIndex); } } else { throw new IOException("Stream corrupt: Repeated barrier for same checkpoint on input " + channelIndex); } } /** * Releases the blocks on all channels and resets the barrier count. * Makes sure the just written data is the next to be consumed. */ private void releaseBlocksAndResetBarriers() throws IOException { LOG.debug("{}: End of stream alignment, feeding buffered data back.", inputGate.getOwningTaskName()); for (int i = 0; i < blockedChannels.length; i++) { blockedChannels[i] = false; } if (currentBuffered == null) { currentBuffered = bufferBlocker.rollOverReusingResources(); if (currentBuffered != null) { currentBuffered.open(); } } else { LOG.debug("{}: Checkpoint skipped via buffered data:" + "Pushing back current alignment buffers and feeding back new alignment data first.", inputGate.getOwningTaskName()); BufferOrEventSequence bufferedNow = bufferBlocker.rollOverWithoutReusingResources(); if (bufferedNow != null) { bufferedNow.open(); queuedBuffered.addFirst(currentBuffered); numQueuedBytes += currentBuffered.size(); currentBuffered = bufferedNow; } } if (LOG.isDebugEnabled()) { LOG.debug("{}: Size of buffered data: {} bytes", inputGate.getOwningTaskName(), currentBuffered == null ? 0L : currentBuffered.size()); } numBarriersReceived = 0; if (startOfAlignmentTimestamp > 0) { latestAlignmentDurationNanos = System.nanoTime() - startOfAlignmentTimestamp; startOfAlignmentTimestamp = 0; } } /** * Gets the ID defining the current pending, or just completed, checkpoint. * * @return The ID of the pending of completed checkpoint. */ public long getCurrentCheckpointId() { return this.currentCheckpointId; } @Override public long getAlignmentDurationNanos() { long start = this.startOfAlignmentTimestamp; if (start <= 0) { return latestAlignmentDurationNanos; } else { return System.nanoTime() - start; } } @Override public String toString() { return String.format("%s: last checkpoint: %d, current barriers: %d, closed channels: %d", inputGate.getOwningTaskName(), currentCheckpointId, numBarriersReceived, numClosedChannels); } }
class BarrierBuffer implements CheckpointBarrierHandler { private static final Logger LOG = LoggerFactory.getLogger(BarrierBuffer.class); /** The gate that the buffer draws its input from. */ private final InputGate inputGate; /** Flags that indicate whether a channel is currently blocked/buffered. */ private final boolean[] blockedChannels; /** The total number of channels that this buffer handles data from. */ private final int totalNumberOfInputChannels; /** To utility to write blocked data to a file channel. */ private final BufferBlocker bufferBlocker; /** * The pending blocked buffer/event sequences. Must be consumed before requesting further data * from the input gate. */ private final ArrayDeque<BufferOrEventSequence> queuedBuffered; /** * The maximum number of bytes that may be buffered before an alignment is broken. -1 means * unlimited. */ private final long maxBufferedBytes; private final String taskName; /** * The sequence of buffers/events that has been unblocked and must now be consumed before * requesting further data from the input gate. */ private BufferOrEventSequence currentBuffered; /** Handler that receives the checkpoint notifications. */ private AbstractInvokable toNotifyOnCheckpoint; /** The ID of the checkpoint for which we expect barriers. */ private long currentCheckpointId = -1L; /** * The number of received barriers (= number of blocked/buffered channels) IMPORTANT: A canceled * checkpoint must always have 0 barriers. */ private int numBarriersReceived; /** The number of already closed channels. */ private int numClosedChannels; /** The number of bytes in the queued spilled sequences. */ private long numQueuedBytes; /** The timestamp as in {@link System private long startOfAlignmentTimestamp; /** The time (in nanoseconds) that the latest alignment took. */ private long latestAlignmentDurationNanos; /** Flag to indicate whether we have drawn all available input. */ private boolean endOfStream; /** Indicate end of the input. Set to true after encountering {@link * {@link private boolean isFinished; /** * Creates a new checkpoint stream aligner. * * <p>There is no limit to how much data may be buffered during an alignment. * * @param inputGate The input gate to draw the buffers and events from. * @param bufferBlocker The buffer blocker to hold the buffers and events for channels with barrier. */ @VisibleForTesting BarrierBuffer(InputGate inputGate, BufferBlocker bufferBlocker) { this (inputGate, bufferBlocker, -1, "Testing: No task associated"); } /** * Creates a new checkpoint stream aligner. * * <p>The aligner will allow only alignments that buffer up to the given number of bytes. * When that number is exceeded, it will stop the alignment and notify the task that the * checkpoint has been cancelled. * * @param inputGate The input gate to draw the buffers and events from. * @param bufferBlocker The buffer blocker to hold the buffers and events for channels with barrier. * @param maxBufferedBytes The maximum bytes to be buffered before the checkpoint aborts. * @param taskName The task name for logging. */ BarrierBuffer(InputGate inputGate, BufferBlocker bufferBlocker, long maxBufferedBytes, String taskName) { checkArgument(maxBufferedBytes == -1 || maxBufferedBytes > 0); this.inputGate = inputGate; this.maxBufferedBytes = maxBufferedBytes; this.totalNumberOfInputChannels = inputGate.getNumberOfInputChannels(); this.blockedChannels = new boolean[this.totalNumberOfInputChannels]; this.bufferBlocker = checkNotNull(bufferBlocker); this.queuedBuffered = new ArrayDeque<BufferOrEventSequence>(); this.taskName = taskName; } @Override public CompletableFuture<?> isAvailable() { if (currentBuffered == null) { return inputGate.isAvailable(); } return AVAILABLE; } @Override private Optional<BufferOrEvent> handleEmptyBuffer() throws Exception { if (!inputGate.isFinished()) { return Optional.empty(); } if (endOfStream) { isFinished = true; return Optional.empty(); } else { endOfStream = true; releaseBlocksAndResetBarriers(); return pollNext(); } } private void completeBufferedSequence() throws IOException { LOG.debug("{}: Finished feeding back buffered data.", taskName); currentBuffered.cleanup(); currentBuffered = queuedBuffered.pollFirst(); if (currentBuffered != null) { currentBuffered.open(); numQueuedBytes -= currentBuffered.size(); } } private void processBarrier(CheckpointBarrier receivedBarrier, int channelIndex) throws Exception { final long barrierId = receivedBarrier.getId(); if (totalNumberOfInputChannels == 1) { if (barrierId > currentCheckpointId) { currentCheckpointId = barrierId; notifyCheckpoint(receivedBarrier); } return; } if (numBarriersReceived > 0) { if (barrierId == currentCheckpointId) { onBarrier(channelIndex); } else if (barrierId > currentCheckpointId) { LOG.warn("{}: Received checkpoint barrier for checkpoint {} before completing current checkpoint {}. " + "Skipping current checkpoint.", taskName, barrierId, currentCheckpointId); notifyAbort(currentCheckpointId, new CheckpointDeclineSubsumedException(barrierId)); releaseBlocksAndResetBarriers(); beginNewAlignment(barrierId, channelIndex); } else { return; } } else if (barrierId > currentCheckpointId) { beginNewAlignment(barrierId, channelIndex); } else { return; } if (numBarriersReceived + numClosedChannels == totalNumberOfInputChannels) { if (LOG.isDebugEnabled()) { LOG.debug("{}: Received all barriers, triggering checkpoint {} at {}.", taskName, receivedBarrier.getId(), receivedBarrier.getTimestamp()); } releaseBlocksAndResetBarriers(); notifyCheckpoint(receivedBarrier); } } private void processCancellationBarrier(CancelCheckpointMarker cancelBarrier) throws Exception { final long barrierId = cancelBarrier.getCheckpointId(); if (totalNumberOfInputChannels == 1) { if (barrierId > currentCheckpointId) { currentCheckpointId = barrierId; notifyAbortOnCancellationBarrier(barrierId); } return; } if (numBarriersReceived > 0) { if (barrierId == currentCheckpointId) { if (LOG.isDebugEnabled()) { LOG.debug("{}: Checkpoint {} canceled, aborting alignment.", taskName, barrierId); } releaseBlocksAndResetBarriers(); notifyAbortOnCancellationBarrier(barrierId); } else if (barrierId > currentCheckpointId) { LOG.warn("{}: Received cancellation barrier for checkpoint {} before completing current checkpoint {}. " + "Skipping current checkpoint.", taskName, barrierId, currentCheckpointId); releaseBlocksAndResetBarriers(); currentCheckpointId = barrierId; startOfAlignmentTimestamp = 0L; latestAlignmentDurationNanos = 0L; notifyAbort(currentCheckpointId, new CheckpointDeclineSubsumedException(barrierId)); notifyAbortOnCancellationBarrier(barrierId); } } else if (barrierId > currentCheckpointId) { currentCheckpointId = barrierId; startOfAlignmentTimestamp = 0L; latestAlignmentDurationNanos = 0L; if (LOG.isDebugEnabled()) { LOG.debug("{}: Checkpoint {} canceled, skipping alignment.", taskName, barrierId); } notifyAbortOnCancellationBarrier(barrierId); } } private void processEndOfPartition() throws Exception { numClosedChannels++; if (numBarriersReceived > 0) { notifyAbort(currentCheckpointId, new InputEndOfStreamException()); releaseBlocksAndResetBarriers(); } } private void notifyCheckpoint(CheckpointBarrier checkpointBarrier) throws Exception { if (toNotifyOnCheckpoint != null) { CheckpointMetaData checkpointMetaData = new CheckpointMetaData(checkpointBarrier.getId(), checkpointBarrier.getTimestamp()); long bytesBuffered = currentBuffered != null ? currentBuffered.size() : 0L; CheckpointMetrics checkpointMetrics = new CheckpointMetrics() .setBytesBufferedInAlignment(bytesBuffered) .setAlignmentDurationNanos(latestAlignmentDurationNanos); toNotifyOnCheckpoint.triggerCheckpointOnBarrier( checkpointMetaData, checkpointBarrier.getCheckpointOptions(), checkpointMetrics); } } private void notifyAbortOnCancellationBarrier(long checkpointId) throws Exception { notifyAbort(checkpointId, new CheckpointDeclineOnCancellationBarrierException()); } private void notifyAbort(long checkpointId, CheckpointDeclineException cause) throws Exception { if (toNotifyOnCheckpoint != null) { toNotifyOnCheckpoint.abortCheckpointOnBarrier(checkpointId, cause); } } private void checkSizeLimit() throws Exception { if (maxBufferedBytes > 0 && (numQueuedBytes + bufferBlocker.getBytesBlocked()) > maxBufferedBytes) { LOG.info("{}: Checkpoint {} aborted because alignment volume limit ({} bytes) exceeded.", taskName, currentCheckpointId, maxBufferedBytes); releaseBlocksAndResetBarriers(); notifyAbort(currentCheckpointId, new AlignmentLimitExceededException(maxBufferedBytes)); } } @Override public void registerCheckpointEventHandler(AbstractInvokable toNotifyOnCheckpoint) { if (this.toNotifyOnCheckpoint == null) { this.toNotifyOnCheckpoint = toNotifyOnCheckpoint; } else { throw new IllegalStateException("BarrierBuffer already has a registered checkpoint notifyee"); } } @Override public boolean isEmpty() { return currentBuffered == null; } @Override public boolean isFinished() { return isFinished; } @Override public void cleanup() throws IOException { bufferBlocker.close(); if (currentBuffered != null) { currentBuffered.cleanup(); } for (BufferOrEventSequence seq : queuedBuffered) { seq.cleanup(); } queuedBuffered.clear(); numQueuedBytes = 0L; } private void beginNewAlignment(long checkpointId, int channelIndex) throws IOException { currentCheckpointId = checkpointId; onBarrier(channelIndex); startOfAlignmentTimestamp = System.nanoTime(); if (LOG.isDebugEnabled()) { LOG.debug("{}: Starting stream alignment for checkpoint {}.", taskName, checkpointId); } } /** * Checks whether the channel with the given index is blocked. * * @param channelIndex The channel index to check. * @return True if the channel is blocked, false if not. */ private boolean isBlocked(int channelIndex) { return blockedChannels[channelIndex]; } /** * Blocks the given channel index, from which a barrier has been received. * * @param channelIndex The channel index to block. */ private void onBarrier(int channelIndex) throws IOException { if (!blockedChannels[channelIndex]) { blockedChannels[channelIndex] = true; numBarriersReceived++; if (LOG.isDebugEnabled()) { LOG.debug("{}: Received barrier from channel {}.", taskName, channelIndex); } } else { throw new IOException("Stream corrupt: Repeated barrier for same checkpoint on input " + channelIndex); } } /** * Releases the blocks on all channels and resets the barrier count. * Makes sure the just written data is the next to be consumed. */ private void releaseBlocksAndResetBarriers() throws IOException { LOG.debug("{}: End of stream alignment, feeding buffered data back.", taskName); for (int i = 0; i < blockedChannels.length; i++) { blockedChannels[i] = false; } if (currentBuffered == null) { currentBuffered = bufferBlocker.rollOverReusingResources(); if (currentBuffered != null) { currentBuffered.open(); } } else { LOG.debug("{}: Checkpoint skipped via buffered data:" + "Pushing back current alignment buffers and feeding back new alignment data first.", taskName); BufferOrEventSequence bufferedNow = bufferBlocker.rollOverWithoutReusingResources(); if (bufferedNow != null) { bufferedNow.open(); queuedBuffered.addFirst(currentBuffered); numQueuedBytes += currentBuffered.size(); currentBuffered = bufferedNow; } } if (LOG.isDebugEnabled()) { LOG.debug("{}: Size of buffered data: {} bytes", taskName, currentBuffered == null ? 0L : currentBuffered.size()); } numBarriersReceived = 0; if (startOfAlignmentTimestamp > 0) { latestAlignmentDurationNanos = System.nanoTime() - startOfAlignmentTimestamp; startOfAlignmentTimestamp = 0; } } /** * Gets the ID defining the current pending, or just completed, checkpoint. * * @return The ID of the pending of completed checkpoint. */ public long getCurrentCheckpointId() { return this.currentCheckpointId; } @Override public long getAlignmentDurationNanos() { long start = this.startOfAlignmentTimestamp; if (start <= 0) { return latestAlignmentDurationNanos; } else { return System.nanoTime() - start; } } @Override public String toString() { return String.format("%s: last checkpoint: %d, current barriers: %d, closed channels: %d", taskName, currentCheckpointId, numBarriersReceived, numClosedChannels); } }
fileSystem.listLocatedStatus is an api to list all statuses and block locations of the files in the given path in one operation. The performance is better than getting status and block location one by one.
private List<HdfsFileDesc> getHdfsFileDescs(String dirPath) throws Exception { URI uri = new URI(dirPath); FileSystem fileSystem = getFileSystem(uri); RemoteIterator<LocatedFileStatus> blockIterator = fileSystem.listLocatedStatus(new Path(uri.getPath())); List<HdfsFileDesc> fileDescs = Lists.newArrayList(); while (blockIterator.hasNext()) { LocatedFileStatus locatedFileStatus = blockIterator.next(); if (!isValidDataFile(locatedFileStatus)) { continue; } String fileName = Utils.getSuffixName(dirPath, locatedFileStatus.getPath().toString()); BlockLocation[] blockLocations = locatedFileStatus.getBlockLocations(); List<HdfsFileBlockDesc> fileBlockDescs = getHdfsFileBlockDescs(blockLocations); fileDescs.add(new HdfsFileDesc(fileName, "", locatedFileStatus.getLen(), ImmutableList.copyOf(fileBlockDescs))); } return fileDescs; }
private List<HdfsFileDesc> getHdfsFileDescs(String dirPath) throws Exception { URI uri = new URI(dirPath); FileSystem fileSystem = getFileSystem(uri); RemoteIterator<LocatedFileStatus> blockIterator = fileSystem.listLocatedStatus(new Path(uri.getPath())); List<HdfsFileDesc> fileDescs = Lists.newArrayList(); while (blockIterator.hasNext()) { LocatedFileStatus locatedFileStatus = blockIterator.next(); if (!isValidDataFile(locatedFileStatus)) { continue; } String fileName = Utils.getSuffixName(dirPath, locatedFileStatus.getPath().toString()); BlockLocation[] blockLocations = locatedFileStatus.getBlockLocations(); List<HdfsFileBlockDesc> fileBlockDescs = getHdfsFileBlockDescs(blockLocations); fileDescs.add(new HdfsFileDesc(fileName, "", locatedFileStatus.getLen(), ImmutableList.copyOf(fileBlockDescs))); } return fileDescs; }
class relies on opening if (Thread.currentThread().getContextClassLoader() == null) { Thread.currentThread().setContextClassLoader(ClassLoader.getSystemClassLoader()); }
class relies on opening if (Thread.currentThread().getContextClassLoader() == null) { Thread.currentThread().setContextClassLoader(ClassLoader.getSystemClassLoader()); }
This happens because the `stringValue()` method of the `io.ballerina.runtime.internal.values.FPValue` adds a `function` prefix, in https://github.com/ballerina-platform/ballerina-lang/blob/865850e24ff2882178085c7b0ed3948248e5df8c/bvm/ballerina-runtime/src/main/java/io/ballerina/runtime/internal/values/FPValue.java#L86 I had a call with Waruna and he will try removing the prefix, which would solve the issue.
public void variableReferenceEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, NIL_VAR, "()", "nil"); debugTestRunner.assertExpression(context, BOOLEAN_VAR, "true", "boolean"); debugTestRunner.assertExpression(context, INT_VAR, "20", "int"); debugTestRunner.assertExpression(context, FLOAT_VAR, "-10.0", "float"); debugTestRunner.assertExpression(context, DECIMAL_VAR, "3.5", "decimal"); debugTestRunner.assertExpression(context, STRING_VAR, "foo", "string"); debugTestRunner.assertExpression(context, XML_VAR, "<person " + "gender=\"male\"><firstname>Praveen</firstname><lastname>Nada</lastname></person>", "xml"); debugTestRunner.assertExpression(context, ARRAY_VAR, "any[4]", "array"); debugTestRunner.assertExpression(context, TUPLE_VAR, "tuple[int,string]", "tuple"); debugTestRunner.assertExpression(context, MAP_VAR, "map", "map"); debugTestRunner.assertExpression(context, RECORD_VAR, " /:@[`{~π_123_ƮέŞŢ_Student", "record"); debugTestRunner.assertExpression(context, ANON_RECORD_VAR, "record {| string city; string country; |}", "record"); debugTestRunner.assertExpression(context, ERROR_VAR, "SimpleErrorType", "error"); debugTestRunner.assertExpression(context, ANON_FUNCTION_VAR, "function isolated function (string,string) " + "returns (string)", "function"); debugTestRunner.assertExpression(context, FUTURE_VAR, "future", "future"); debugTestRunner.assertExpression(context, OBJECT_VAR, "Person_\\ /<>:@[`{~π_ƮέŞŢ", "object"); debugTestRunner.assertExpression(context, TYPEDESC_VAR, "int", "typedesc"); debugTestRunner.assertExpression(context, UNION_VAR, "foo", "string"); debugTestRunner.assertExpression(context, OPTIONAL_VAR, "foo", "string"); debugTestRunner.assertExpression(context, ANY_VAR, "15.0", "float"); debugTestRunner.assertExpression(context, ANYDATA_VAR, "619", "int"); debugTestRunner.assertExpression(context, BYTE_VAR, "128", "int"); debugTestRunner.assertExpression(context, TABLE_VAR, "table<Employee>[3]", "table"); debugTestRunner.assertExpression(context, STREAM_VAR, "stream<int>", "stream"); debugTestRunner.assertExpression(context, NEVER_VAR, "", "xml"); debugTestRunner.assertExpression(context, JSON_VAR, "map<json>", "json"); debugTestRunner.assertExpression(context, ANON_OBJECT_VAR, "Person_\\ /<>:@[`{~π_ƮέŞŢ", "object"); debugTestRunner.assertExpression(context, GLOBAL_VAR_03, "map", "map"); debugTestRunner.assertExpression(context, GLOBAL_VAR_04, "()", "nil"); debugTestRunner.assertExpression(context, GLOBAL_VAR_05, "()", "nil"); debugTestRunner.assertExpression(context, GLOBAL_VAR_06, "Ballerina", "string"); debugTestRunner.assertExpression(context, GLOBAL_VAR_07, "100.0", "decimal"); debugTestRunner.assertExpression(context, GLOBAL_VAR_08, "2", "int"); debugTestRunner.assertExpression(context, GLOBAL_VAR_09, "2.0", "float"); debugTestRunner.assertExpression(context, GLOBAL_VAR_10, "map<json>", "json"); debugTestRunner.assertExpression(context, GLOBAL_VAR_11, "IL with global var", "string"); } @Override @Test public void fieldAccessEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, OBJECT_VAR + ".address", "No 20, Palm grove", "string"); debugTestRunner.assertExpression(context, RECORD_VAR + ".'Ȧɢέ_\\ \\/\\:\\@\\[\\`\\{\\~π", "20", "int"); debugTestRunner.assertExpression(context, JSON_VAR + ".name", "John", "string"); debugTestRunner.assertExpression(context, RECORD_VAR + ".grades.maths", "80", "int"); debugTestRunner.assertExpression(context, RECORD_VAR + "?.undefined", "()", "nil"); } @Override @Test public void xmlAttributeAccessEvaluationTest() throws BallerinaTestException { } @Override @Test public void annotationAccessEvaluationTest() throws BallerinaTestException { } @Override @Test public void memberAccessEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, STRING_VAR + "[0]", "f", "string"); debugTestRunner.assertExpression(context, ARRAY_VAR + "[0]", "1", "int"); } @Override @Test public void functionCallEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, " calculate ( 5 , 6 , 7 ) ;", "38", "int"); debugTestRunner.assertExpression(context, "calculate ( 5 , c = 7 , b = 6 );", "38", "int"); debugTestRunner.assertExpression(context, "printSalaryDetails(2500)", "[2500, 20, 0.02]", "string"); debugTestRunner.assertExpression(context, "printSalaryDetails(2500, annualIncrement = 100)", "[2500, 100, 0.02]", "string"); debugTestRunner.assertExpression(context, "printSalaryDetails(2500, 100);", "[2500, 100, 0.02]", "string"); debugTestRunner.assertExpression(context, "printSalaryDetails(2500, bonusRate = 0.1);", "[2500, 20, 0.1]", "string"); debugTestRunner.assertExpression(context, "printSalaryDetails(2500, 20, 0.1);", "[2500, 20, 0.1]", "string"); debugTestRunner.assertExpression(context, "printSalaryDetails(2500, annualIncrement = 100, bonusRate = 0.1);", "[2500, 100, 0.1]", "string"); debugTestRunner.assertExpression(context, "printSalaryDetails(annualIncrement = 100, baseSalary = 2500, bonusRate = 0.1);", "[2500, 100, 0.1]", "string"); } @Override @Test public void methodCallEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, OBJECT_VAR + ".getSum(34,56)", "90", "int"); debugTestRunner.assertExpression(context, ARRAY_VAR + ".length()", "4", "int"); debugTestRunner.assertExpression(context, ARRAY_VAR + ".slice(1,3)", "any[2]", "array"); debugTestRunner.assertExpression(context, DECIMAL_VAR + ".round()", "4", "decimal"); debugTestRunner.assertExpression(context, DECIMAL_VAR + ".abs()", "3.5", "decimal"); debugTestRunner.assertExpression(context, ERROR_VAR + ".message()", "SimpleErrorType", "string"); debugTestRunner.assertExpression(context, FLOAT_VAR + ".sin()", "0.5440211108893698", "float"); debugTestRunner.assertExpression(context, FLOAT_VAR + ".pow(3.0)", "-1000.0", "float"); debugTestRunner.assertExpression(context, FUTURE_VAR + ".cancel()", "()", "nil"); debugTestRunner.assertExpression(context, INT_VAR + ".abs()", "20", "int"); debugTestRunner.assertExpression(context, MAP_VAR + ".get(\"country\")", "Sri Lanka", "string"); debugTestRunner.assertExpression(context, STRING_VAR + ".getCodePoint(1)", "111", "int"); debugTestRunner.assertExpression(context, STRING_VAR + ".substring(1,3)", "oo", "string"); debugTestRunner.assertExpression(context, TYPEDESC_VAR + ".toBalString()", "typedesc int", "string"); debugTestRunner.assertExpression(context, XML_VAR + ".getName()", "person", "string"); debugTestRunner.assertExpression(context, XML_VAR + ".children()", "<firstname>Praveen</firstname><lastname>Nada</lastname>", "xml"); } @Override @Test public void errorConstructorEvaluationTest() throws BallerinaTestException { } @Override @Test public void anonymousFunctionEvaluationTest() throws BallerinaTestException { } @Override @Test public void letExpressionEvaluationTest() throws BallerinaTestException { } @Override @Test public void typeCastEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("<float>%s", ANYDATA_VAR), "619.0", "float"); debugTestRunner.assertExpression(context, String.format("<float|boolean>%s", ANYDATA_VAR), "619.0", "float"); } @Override @Test public void typeOfExpressionEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("typeof %s", BOOLEAN_VAR), "boolean", "typedesc"); debugTestRunner.assertExpression(context, String.format("typeof %s", INT_VAR), "int", "typedesc"); debugTestRunner.assertExpression(context, String.format("typeof %s", FLOAT_VAR), "float", "typedesc"); debugTestRunner.assertExpression(context, String.format("typeof %s", JSON_VAR), "map<json>", "typedesc"); debugTestRunner.assertExpression(context, String.format("typeof %s[0]", STRING_VAR), "string", "typedesc"); debugTestRunner.assertExpression(context, String.format("typeof typeof %s", BOOLEAN_VAR), "typedesc", "typedesc"); } @Override @Test public void unaryExpressionEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("+%s", INT_VAR), "20", "int"); debugTestRunner.assertExpression(context, String.format("+%s", FLOAT_VAR), "-10.0", "float"); debugTestRunner.assertExpression(context, String.format("+%s", DECIMAL_VAR), "3.5", "decimal"); debugTestRunner.assertExpression(context, String.format("-%s", INT_VAR), "-20", "int"); debugTestRunner.assertExpression(context, String.format("-%s", FLOAT_VAR), "10.0", "float"); debugTestRunner.assertExpression(context, String.format("-%s", DECIMAL_VAR), "-3.5", "decimal"); debugTestRunner.assertExpression(context, String.format("~%s", INT_VAR), "-21", "int"); debugTestRunner.assertExpression(context, String.format("!%s", BOOLEAN_VAR), "false", "boolean"); } @Override @Test public void multiplicativeExpressionEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s * %s", INT_VAR, INT_VAR), "400", "int"); debugTestRunner.assertExpression(context, String.format("%s * %s", FLOAT_VAR, FLOAT_VAR), "100.0", "float"); debugTestRunner.assertExpression(context, String.format("%s * %s", DECIMAL_VAR, DECIMAL_VAR), "12.25", "decimal"); debugTestRunner.assertExpression(context, String.format("%s / %s", INT_VAR, INT_VAR), "1", "int"); debugTestRunner.assertExpression(context, String.format("%s / %s", FLOAT_VAR, FLOAT_VAR), "1.0", "float"); debugTestRunner.assertExpression(context, String.format("%s / %s", DECIMAL_VAR, DECIMAL_VAR), "1", "decimal"); debugTestRunner.assertExpression(context, String.format("%s %% %s", INT_VAR, INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s %% %s", FLOAT_VAR, FLOAT_VAR), "-0.0", "float"); debugTestRunner.assertExpression(context, String.format("%s %% %s", DECIMAL_VAR, DECIMAL_VAR), "0", "decimal"); } @Override @Test public void additiveExpressionEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s + %s", INT_VAR, INT_VAR), "40", "int"); debugTestRunner.assertExpression(context, String.format("%s + %s", FLOAT_VAR, FLOAT_VAR), "-20.0", "float"); debugTestRunner.assertExpression(context, String.format("%s + %s", DECIMAL_VAR, DECIMAL_VAR), "7.0", "decimal"); debugTestRunner.assertExpression(context, String.format("%s + %s", STRING_VAR, STRING_VAR), "foofoo", "string"); debugTestRunner.assertExpression(context, "\" one \" + \" two \" + \" three \"", " one two three ", "string"); String bStringTemplateExpr = String.format("string `name: ${%s}, age: ${%s}`", STRING_VAR, INT_VAR); debugTestRunner.assertExpression(context, String.format("%s + %s + %s", bStringTemplateExpr, bStringTemplateExpr, bStringTemplateExpr), "name: foo, age: 20name: foo, age: 20name: foo, age: 20", "string"); debugTestRunner.assertExpression(context, String.format("%s + %s", XML_VAR, XML_VAR), "<person gender=\"male\">" + "<firstname>Praveen</firstname><lastname>Nada</lastname></person><person gender=\"male\">" + "<firstname>Praveen</firstname><lastname>Nada</lastname></person>", "xml"); debugTestRunner.assertExpression(context, String.format("%s - %s", INT_VAR, INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s - %s", FLOAT_VAR, FLOAT_VAR), "0.0", "float"); debugTestRunner.assertExpression(context, String.format("%s - %s", DECIMAL_VAR, DECIMAL_VAR), "0", "decimal"); } @Override @Test public void shiftExpressionEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s << %s", INT_VAR, INT_VAR), "20971520", "int"); debugTestRunner.assertExpression(context, String.format("%s << %s", SIGNED32INT_VAR, SIGNED8INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s << %s", SIGNED32INT_VAR, UNSIGNED8INT_VAR), "-2000", "int"); debugTestRunner.assertExpression(context, String.format("%s << %s", UNSIGNED32INT_VAR, SIGNED8INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s << %s", UNSIGNED32INT_VAR, UNSIGNED8INT_VAR), "2000", "int"); debugTestRunner.assertExpression(context, String.format("%s >> %s", INT_VAR, INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s >> %s", SIGNED32INT_VAR, SIGNED8INT_VAR), "-1", "int"); debugTestRunner.assertExpression(context, String.format("%s >> %s", SIGNED32INT_VAR, UNSIGNED8INT_VAR), "-500", "int"); debugTestRunner.assertExpression(context, String.format("%s >> %s", UNSIGNED32INT_VAR, SIGNED8INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s >> %s", UNSIGNED32INT_VAR, UNSIGNED8INT_VAR), "500", "int"); debugTestRunner.assertExpression(context, String.format("%s >>> %s", INT_VAR, INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s >>> %s", SIGNED32INT_VAR, SIGNED8INT_VAR), "1", "int"); debugTestRunner.assertExpression(context, String.format("%s >>> %s", SIGNED32INT_VAR, UNSIGNED8INT_VAR), "9223372036854775308", "int"); debugTestRunner.assertExpression(context, String.format("%s >>> %s", UNSIGNED32INT_VAR, SIGNED8INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s >>> %s", UNSIGNED32INT_VAR, UNSIGNED8INT_VAR), "500", "int"); } @Override @Test public void rangeExpressionEvaluationTest() throws BallerinaTestException { } @Override @Test public void comparisonEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s < %s", NIL_VAR, NIL_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", BOOLEAN_VAR, BOOLEAN_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", INT_VAR, INT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", FLOAT_VAR, FLOAT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", DECIMAL_VAR, DECIMAL_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", STRING_VAR, STRING_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", "booleanArrayVar", "booleanArrayVar"), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", "intArrayVar", "intArrayVar"), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", "floatArrayVar", "floatArrayVar"), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", "decimalArrayVar", "decimalArrayVar"), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", "stringArrayVar", "stringArrayVar"), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", NIL_VAR, NIL_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", BOOLEAN_VAR, BOOLEAN_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", INT_VAR, INT_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", FLOAT_VAR, FLOAT_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", DECIMAL_VAR, DECIMAL_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", STRING_VAR, STRING_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", "booleanArrayVar", "booleanArrayVar"), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", "intArrayVar", "intArrayVar"), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", "floatArrayVar", "floatArrayVar"), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", "decimalArrayVar", "decimalArrayVar"), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", "stringArrayVar", "stringArrayVar"), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s > %s", INT_VAR, INT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s >= %s", INT_VAR, INT_VAR), "true", "boolean"); } @Override @Test public void typeTestEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s is string", INT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s is int", INT_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s is error", ERROR_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s is int | string", STRING_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s is 'Person_\\\\\\ \\/\\<\\>\\:\\@\\[\\`\\{\\~" + "\\u{03C0}_ƮέŞŢ", OBJECT_VAR), "true", "boolean"); } @Override @Test public void equalityEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, "2.0==2.00", "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s==%s", INT_VAR, FLOAT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s==%s", OBJECT_VAR, ANON_OBJECT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, "2.0!=2.00", "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s!=%s", INT_VAR, FLOAT_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s!=%s", OBJECT_VAR, ANON_OBJECT_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, "2.0===2.00", "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s===%s", INT_VAR, FLOAT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s===%s", OBJECT_VAR, ANON_OBJECT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, "2.0!==2.00", "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s!==%s", INT_VAR, FLOAT_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s!==%s", OBJECT_VAR, ANON_OBJECT_VAR), "true", "boolean"); } @Override @Test public void binaryBitwiseEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s & %s", INT_VAR, INT_VAR), "20", "int"); debugTestRunner.assertExpression(context, String.format("%s | %s", INT_VAR, INT_VAR), "20", "int"); debugTestRunner.assertExpression(context, String.format("%s ^ %s", INT_VAR, INT_VAR), "0", "int"); } @Override @Test public void logicalEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s && false", BOOLEAN_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s || false", BOOLEAN_VAR), "true", "boolean"); } @Override @Test public void conditionalExpressionEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s ? %s : %s", BOOLEAN_VAR, INT_VAR, FLOAT_VAR), "20", "int"); debugTestRunner.assertExpression(context, String.format("%s ?: %s", INT_VAR, FLOAT_VAR), "20", "int"); debugTestRunner.assertExpression(context, String.format("%s ?: %s", NIL_VAR, FLOAT_VAR), "-10.0", "float"); } @Override @Test public void checkingExpressionEvaluationTest() throws BallerinaTestException { } @Override @Test public void trapExpressionEvaluationTest() throws BallerinaTestException { } @Override @Test public void queryExpressionEvaluationTest() throws BallerinaTestException { } @Override @Test public void xmlNavigationEvaluationTest() throws BallerinaTestException { } @AfterClass(alwaysRun = true) public void cleanUp() { debugTestRunner.terminateDebugSession(); this.context = null; } }
debugTestRunner.assertExpression(context, ANON_FUNCTION_VAR, "function isolated function (string,string) " +
public void variableReferenceEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, NIL_VAR, "()", "nil"); debugTestRunner.assertExpression(context, BOOLEAN_VAR, "true", "boolean"); debugTestRunner.assertExpression(context, INT_VAR, "20", "int"); debugTestRunner.assertExpression(context, FLOAT_VAR, "-10.0", "float"); debugTestRunner.assertExpression(context, DECIMAL_VAR, "3.5", "decimal"); debugTestRunner.assertExpression(context, STRING_VAR, "foo", "string"); debugTestRunner.assertExpression(context, XML_VAR, "<person " + "gender=\"male\"><firstname>Praveen</firstname><lastname>Nada</lastname></person>", "xml"); debugTestRunner.assertExpression(context, ARRAY_VAR, "any[4]", "array"); debugTestRunner.assertExpression(context, TUPLE_VAR, "tuple[int,string]", "tuple"); debugTestRunner.assertExpression(context, MAP_VAR, "map", "map"); debugTestRunner.assertExpression(context, RECORD_VAR, " /:@[`{~π_123_ƮέŞŢ_Student", "record"); debugTestRunner.assertExpression(context, ANON_RECORD_VAR, "record {| string city; string country; |}", "record"); debugTestRunner.assertExpression(context, ERROR_VAR, "SimpleErrorType", "error"); debugTestRunner.assertExpression(context, ANON_FUNCTION_VAR, "function isolated function (string,string) " + "returns (string)", "function"); debugTestRunner.assertExpression(context, FUTURE_VAR, "future", "future"); debugTestRunner.assertExpression(context, OBJECT_VAR, "Person_\\ /<>:@[`{~π_ƮέŞŢ", "object"); debugTestRunner.assertExpression(context, TYPEDESC_VAR, "int", "typedesc"); debugTestRunner.assertExpression(context, UNION_VAR, "foo", "string"); debugTestRunner.assertExpression(context, OPTIONAL_VAR, "foo", "string"); debugTestRunner.assertExpression(context, ANY_VAR, "15.0", "float"); debugTestRunner.assertExpression(context, ANYDATA_VAR, "619", "int"); debugTestRunner.assertExpression(context, BYTE_VAR, "128", "int"); debugTestRunner.assertExpression(context, TABLE_VAR, "table<Employee>[3]", "table"); debugTestRunner.assertExpression(context, STREAM_VAR, "stream<int>", "stream"); debugTestRunner.assertExpression(context, NEVER_VAR, "", "xml"); debugTestRunner.assertExpression(context, JSON_VAR, "map<json>", "json"); debugTestRunner.assertExpression(context, ANON_OBJECT_VAR, "Person_\\ /<>:@[`{~π_ƮέŞŢ", "object"); debugTestRunner.assertExpression(context, GLOBAL_VAR_03, "map", "map"); debugTestRunner.assertExpression(context, GLOBAL_VAR_04, "()", "nil"); debugTestRunner.assertExpression(context, GLOBAL_VAR_05, "()", "nil"); debugTestRunner.assertExpression(context, GLOBAL_VAR_06, "Ballerina", "string"); debugTestRunner.assertExpression(context, GLOBAL_VAR_07, "100.0", "decimal"); debugTestRunner.assertExpression(context, GLOBAL_VAR_08, "2", "int"); debugTestRunner.assertExpression(context, GLOBAL_VAR_09, "2.0", "float"); debugTestRunner.assertExpression(context, GLOBAL_VAR_10, "map<json>", "json"); debugTestRunner.assertExpression(context, GLOBAL_VAR_11, "IL with global var", "string"); } @Override @Test public void fieldAccessEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, OBJECT_VAR + ".address", "No 20, Palm grove", "string"); debugTestRunner.assertExpression(context, RECORD_VAR + ".'Ȧɢέ_\\ \\/\\:\\@\\[\\`\\{\\~π", "20", "int"); debugTestRunner.assertExpression(context, JSON_VAR + ".name", "John", "string"); debugTestRunner.assertExpression(context, RECORD_VAR + ".grades.maths", "80", "int"); debugTestRunner.assertExpression(context, RECORD_VAR + "?.undefined", "()", "nil"); } @Override @Test public void xmlAttributeAccessEvaluationTest() throws BallerinaTestException { } @Override @Test public void annotationAccessEvaluationTest() throws BallerinaTestException { } @Override @Test public void memberAccessEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, STRING_VAR + "[0]", "f", "string"); debugTestRunner.assertExpression(context, ARRAY_VAR + "[0]", "1", "int"); } @Override @Test public void functionCallEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, " calculate ( 5 , 6 , 7 ) ;", "38", "int"); debugTestRunner.assertExpression(context, "calculate ( 5 , c = 7 , b = 6 );", "38", "int"); debugTestRunner.assertExpression(context, "printSalaryDetails(2500)", "[2500, 20, 0.02]", "string"); debugTestRunner.assertExpression(context, "printSalaryDetails(2500, annualIncrement = 100)", "[2500, 100, 0.02]", "string"); debugTestRunner.assertExpression(context, "printSalaryDetails(2500, 100);", "[2500, 100, 0.02]", "string"); debugTestRunner.assertExpression(context, "printSalaryDetails(2500, bonusRate = 0.1);", "[2500, 20, 0.1]", "string"); debugTestRunner.assertExpression(context, "printSalaryDetails(2500, 20, 0.1);", "[2500, 20, 0.1]", "string"); debugTestRunner.assertExpression(context, "printSalaryDetails(2500, annualIncrement = 100, bonusRate = 0.1);", "[2500, 100, 0.1]", "string"); debugTestRunner.assertExpression(context, "printSalaryDetails(annualIncrement = 100, baseSalary = 2500, bonusRate = 0.1);", "[2500, 100, 0.1]", "string"); } @Override @Test public void methodCallEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, OBJECT_VAR + ".getSum(34,56)", "90", "int"); debugTestRunner.assertExpression(context, ARRAY_VAR + ".length()", "4", "int"); debugTestRunner.assertExpression(context, ARRAY_VAR + ".slice(1,3)", "any[2]", "array"); debugTestRunner.assertExpression(context, DECIMAL_VAR + ".round()", "4", "decimal"); debugTestRunner.assertExpression(context, DECIMAL_VAR + ".abs()", "3.5", "decimal"); debugTestRunner.assertExpression(context, ERROR_VAR + ".message()", "SimpleErrorType", "string"); debugTestRunner.assertExpression(context, FLOAT_VAR + ".sin()", "0.5440211108893698", "float"); debugTestRunner.assertExpression(context, FLOAT_VAR + ".pow(3.0)", "-1000.0", "float"); debugTestRunner.assertExpression(context, FUTURE_VAR + ".cancel()", "()", "nil"); debugTestRunner.assertExpression(context, INT_VAR + ".abs()", "20", "int"); debugTestRunner.assertExpression(context, MAP_VAR + ".get(\"country\")", "Sri Lanka", "string"); debugTestRunner.assertExpression(context, STRING_VAR + ".getCodePoint(1)", "111", "int"); debugTestRunner.assertExpression(context, STRING_VAR + ".substring(1,3)", "oo", "string"); debugTestRunner.assertExpression(context, TYPEDESC_VAR + ".toBalString()", "typedesc int", "string"); debugTestRunner.assertExpression(context, XML_VAR + ".getName()", "person", "string"); debugTestRunner.assertExpression(context, XML_VAR + ".children()", "<firstname>Praveen</firstname><lastname>Nada</lastname>", "xml"); } @Override @Test public void errorConstructorEvaluationTest() throws BallerinaTestException { } @Override @Test public void anonymousFunctionEvaluationTest() throws BallerinaTestException { } @Override @Test public void letExpressionEvaluationTest() throws BallerinaTestException { } @Override @Test public void typeCastEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("<float>%s", ANYDATA_VAR), "619.0", "float"); debugTestRunner.assertExpression(context, String.format("<float|boolean>%s", ANYDATA_VAR), "619.0", "float"); } @Override @Test public void typeOfExpressionEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("typeof %s", BOOLEAN_VAR), "boolean", "typedesc"); debugTestRunner.assertExpression(context, String.format("typeof %s", INT_VAR), "int", "typedesc"); debugTestRunner.assertExpression(context, String.format("typeof %s", FLOAT_VAR), "float", "typedesc"); debugTestRunner.assertExpression(context, String.format("typeof %s", JSON_VAR), "map<json>", "typedesc"); debugTestRunner.assertExpression(context, String.format("typeof %s[0]", STRING_VAR), "string", "typedesc"); debugTestRunner.assertExpression(context, String.format("typeof typeof %s", BOOLEAN_VAR), "typedesc", "typedesc"); } @Override @Test public void unaryExpressionEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("+%s", INT_VAR), "20", "int"); debugTestRunner.assertExpression(context, String.format("+%s", FLOAT_VAR), "-10.0", "float"); debugTestRunner.assertExpression(context, String.format("+%s", DECIMAL_VAR), "3.5", "decimal"); debugTestRunner.assertExpression(context, String.format("-%s", INT_VAR), "-20", "int"); debugTestRunner.assertExpression(context, String.format("-%s", FLOAT_VAR), "10.0", "float"); debugTestRunner.assertExpression(context, String.format("-%s", DECIMAL_VAR), "-3.5", "decimal"); debugTestRunner.assertExpression(context, String.format("~%s", INT_VAR), "-21", "int"); debugTestRunner.assertExpression(context, String.format("!%s", BOOLEAN_VAR), "false", "boolean"); } @Override @Test public void multiplicativeExpressionEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s * %s", INT_VAR, INT_VAR), "400", "int"); debugTestRunner.assertExpression(context, String.format("%s * %s", FLOAT_VAR, FLOAT_VAR), "100.0", "float"); debugTestRunner.assertExpression(context, String.format("%s * %s", DECIMAL_VAR, DECIMAL_VAR), "12.25", "decimal"); debugTestRunner.assertExpression(context, String.format("%s / %s", INT_VAR, INT_VAR), "1", "int"); debugTestRunner.assertExpression(context, String.format("%s / %s", FLOAT_VAR, FLOAT_VAR), "1.0", "float"); debugTestRunner.assertExpression(context, String.format("%s / %s", DECIMAL_VAR, DECIMAL_VAR), "1", "decimal"); debugTestRunner.assertExpression(context, String.format("%s %% %s", INT_VAR, INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s %% %s", FLOAT_VAR, FLOAT_VAR), "-0.0", "float"); debugTestRunner.assertExpression(context, String.format("%s %% %s", DECIMAL_VAR, DECIMAL_VAR), "0", "decimal"); } @Override @Test public void additiveExpressionEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s + %s", INT_VAR, INT_VAR), "40", "int"); debugTestRunner.assertExpression(context, String.format("%s + %s", FLOAT_VAR, FLOAT_VAR), "-20.0", "float"); debugTestRunner.assertExpression(context, String.format("%s + %s", DECIMAL_VAR, DECIMAL_VAR), "7.0", "decimal"); debugTestRunner.assertExpression(context, String.format("%s + %s", STRING_VAR, STRING_VAR), "foofoo", "string"); debugTestRunner.assertExpression(context, "\" one \" + \" two \" + \" three \"", " one two three ", "string"); String bStringTemplateExpr = String.format("string `name: ${%s}, age: ${%s}`", STRING_VAR, INT_VAR); debugTestRunner.assertExpression(context, String.format("%s + %s + %s", bStringTemplateExpr, bStringTemplateExpr, bStringTemplateExpr), "name: foo, age: 20name: foo, age: 20name: foo, age: 20", "string"); debugTestRunner.assertExpression(context, String.format("%s + %s", XML_VAR, XML_VAR), "<person gender=\"male\">" + "<firstname>Praveen</firstname><lastname>Nada</lastname></person><person gender=\"male\">" + "<firstname>Praveen</firstname><lastname>Nada</lastname></person>", "xml"); debugTestRunner.assertExpression(context, String.format("%s - %s", INT_VAR, INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s - %s", FLOAT_VAR, FLOAT_VAR), "0.0", "float"); debugTestRunner.assertExpression(context, String.format("%s - %s", DECIMAL_VAR, DECIMAL_VAR), "0", "decimal"); } @Override @Test public void shiftExpressionEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s << %s", INT_VAR, INT_VAR), "20971520", "int"); debugTestRunner.assertExpression(context, String.format("%s << %s", SIGNED32INT_VAR, SIGNED8INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s << %s", SIGNED32INT_VAR, UNSIGNED8INT_VAR), "-2000", "int"); debugTestRunner.assertExpression(context, String.format("%s << %s", UNSIGNED32INT_VAR, SIGNED8INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s << %s", UNSIGNED32INT_VAR, UNSIGNED8INT_VAR), "2000", "int"); debugTestRunner.assertExpression(context, String.format("%s >> %s", INT_VAR, INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s >> %s", SIGNED32INT_VAR, SIGNED8INT_VAR), "-1", "int"); debugTestRunner.assertExpression(context, String.format("%s >> %s", SIGNED32INT_VAR, UNSIGNED8INT_VAR), "-500", "int"); debugTestRunner.assertExpression(context, String.format("%s >> %s", UNSIGNED32INT_VAR, SIGNED8INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s >> %s", UNSIGNED32INT_VAR, UNSIGNED8INT_VAR), "500", "int"); debugTestRunner.assertExpression(context, String.format("%s >>> %s", INT_VAR, INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s >>> %s", SIGNED32INT_VAR, SIGNED8INT_VAR), "1", "int"); debugTestRunner.assertExpression(context, String.format("%s >>> %s", SIGNED32INT_VAR, UNSIGNED8INT_VAR), "9223372036854775308", "int"); debugTestRunner.assertExpression(context, String.format("%s >>> %s", UNSIGNED32INT_VAR, SIGNED8INT_VAR), "0", "int"); debugTestRunner.assertExpression(context, String.format("%s >>> %s", UNSIGNED32INT_VAR, UNSIGNED8INT_VAR), "500", "int"); } @Override @Test public void rangeExpressionEvaluationTest() throws BallerinaTestException { } @Override @Test public void comparisonEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s < %s", NIL_VAR, NIL_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", BOOLEAN_VAR, BOOLEAN_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", INT_VAR, INT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", FLOAT_VAR, FLOAT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", DECIMAL_VAR, DECIMAL_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", STRING_VAR, STRING_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", "booleanArrayVar", "booleanArrayVar"), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", "intArrayVar", "intArrayVar"), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", "floatArrayVar", "floatArrayVar"), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", "decimalArrayVar", "decimalArrayVar"), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s < %s", "stringArrayVar", "stringArrayVar"), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", NIL_VAR, NIL_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", BOOLEAN_VAR, BOOLEAN_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", INT_VAR, INT_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", FLOAT_VAR, FLOAT_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", DECIMAL_VAR, DECIMAL_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", STRING_VAR, STRING_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", "booleanArrayVar", "booleanArrayVar"), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", "intArrayVar", "intArrayVar"), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", "floatArrayVar", "floatArrayVar"), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", "decimalArrayVar", "decimalArrayVar"), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s <= %s", "stringArrayVar", "stringArrayVar"), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s > %s", INT_VAR, INT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s >= %s", INT_VAR, INT_VAR), "true", "boolean"); } @Override @Test public void typeTestEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s is string", INT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s is int", INT_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s is error", ERROR_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s is int | string", STRING_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s is 'Person_\\\\\\ \\/\\<\\>\\:\\@\\[\\`\\{\\~" + "\\u{03C0}_ƮέŞŢ", OBJECT_VAR), "true", "boolean"); } @Override @Test public void equalityEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, "2.0==2.00", "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s==%s", INT_VAR, FLOAT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s==%s", OBJECT_VAR, ANON_OBJECT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, "2.0!=2.00", "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s!=%s", INT_VAR, FLOAT_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s!=%s", OBJECT_VAR, ANON_OBJECT_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, "2.0===2.00", "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s===%s", INT_VAR, FLOAT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s===%s", OBJECT_VAR, ANON_OBJECT_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, "2.0!==2.00", "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s!==%s", INT_VAR, FLOAT_VAR), "true", "boolean"); debugTestRunner.assertExpression(context, String.format("%s!==%s", OBJECT_VAR, ANON_OBJECT_VAR), "true", "boolean"); } @Override @Test public void binaryBitwiseEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s & %s", INT_VAR, INT_VAR), "20", "int"); debugTestRunner.assertExpression(context, String.format("%s | %s", INT_VAR, INT_VAR), "20", "int"); debugTestRunner.assertExpression(context, String.format("%s ^ %s", INT_VAR, INT_VAR), "0", "int"); } @Override @Test public void logicalEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s && false", BOOLEAN_VAR), "false", "boolean"); debugTestRunner.assertExpression(context, String.format("%s || false", BOOLEAN_VAR), "true", "boolean"); } @Override @Test public void conditionalExpressionEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, String.format("%s ? %s : %s", BOOLEAN_VAR, INT_VAR, FLOAT_VAR), "20", "int"); debugTestRunner.assertExpression(context, String.format("%s ?: %s", INT_VAR, FLOAT_VAR), "20", "int"); debugTestRunner.assertExpression(context, String.format("%s ?: %s", NIL_VAR, FLOAT_VAR), "-10.0", "float"); } @Override @Test public void checkingExpressionEvaluationTest() throws BallerinaTestException { } @Override @Test public void trapExpressionEvaluationTest() throws BallerinaTestException { } @Override @Test public void queryExpressionEvaluationTest() throws BallerinaTestException { } @Override @Test public void xmlNavigationEvaluationTest() throws BallerinaTestException { } @AfterClass(alwaysRun = true) public void cleanUp() { debugTestRunner.terminateDebugSession(); this.context = null; } }
class ExpressionEvaluationTest extends ExpressionEvaluationBaseTest { @BeforeClass(alwaysRun = true) public void setup() throws BallerinaTestException { prepareForEvaluation(); } @Override @Test public void literalEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, "()", "()", "nil"); debugTestRunner.assertExpression(context, "true", "true", "boolean"); debugTestRunner.assertExpression(context, "10", "10", "int"); debugTestRunner.assertExpression(context, "-20", "-20", "int"); debugTestRunner.assertExpression(context, "0xabc", "2748", "int"); debugTestRunner.assertExpression(context, "-0X999", "-2457", "int"); debugTestRunner.assertExpression(context, "20.0", "20.0", "float"); debugTestRunner.assertExpression(context, "-30.0f", "-30.0", "float"); debugTestRunner.assertExpression(context, "-40.0F", "-40.0", "float"); debugTestRunner.assertExpression(context, "-5.0e34f", "-5.0E34", "float"); debugTestRunner.assertExpression(context, "-30.0d", "-30.0", "decimal"); debugTestRunner.assertExpression(context, "-40.0D", "-40.0", "decimal"); debugTestRunner.assertExpression(context, "-5.0e34d", "-5.0E+34", "decimal"); } @Override @Test public void listConstructorEvaluationTest() throws BallerinaTestException { } @Override @Test public void mappingConstructorEvaluationTest() throws BallerinaTestException { } @Override @Test public void stringTemplateEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, "string `name: John, age: 20`", "name: John, age: 20", "string"); debugTestRunner.assertExpression(context, "string `name: ${" + STRING_VAR + "}, age: ${" + INT_VAR + "}`", "name: foo, age: 20", "string"); } @Override @Test public void xmlTemplateEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, "xml `<book>The Lost World</book>`", "<book>The Lost World</book>", "xml"); } @Override @Test public void newConstructorEvaluationTest() throws BallerinaTestException { } @Override @Test
class ExpressionEvaluationTest extends ExpressionEvaluationBaseTest { @BeforeClass(alwaysRun = true) public void setup() throws BallerinaTestException { prepareForEvaluation(); } @Override @Test public void literalEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, "()", "()", "nil"); debugTestRunner.assertExpression(context, "true", "true", "boolean"); debugTestRunner.assertExpression(context, "10", "10", "int"); debugTestRunner.assertExpression(context, "-20", "-20", "int"); debugTestRunner.assertExpression(context, "0xabc", "2748", "int"); debugTestRunner.assertExpression(context, "-0X999", "-2457", "int"); debugTestRunner.assertExpression(context, "20.0", "20.0", "float"); debugTestRunner.assertExpression(context, "-30.0f", "-30.0", "float"); debugTestRunner.assertExpression(context, "-40.0F", "-40.0", "float"); debugTestRunner.assertExpression(context, "-5.0e34f", "-5.0E34", "float"); debugTestRunner.assertExpression(context, "-30.0d", "-30.0", "decimal"); debugTestRunner.assertExpression(context, "-40.0D", "-40.0", "decimal"); debugTestRunner.assertExpression(context, "-5.0e34d", "-5.0E+34", "decimal"); } @Override @Test public void listConstructorEvaluationTest() throws BallerinaTestException { } @Override @Test public void mappingConstructorEvaluationTest() throws BallerinaTestException { } @Override @Test public void stringTemplateEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, "string `name: John, age: 20`", "name: John, age: 20", "string"); debugTestRunner.assertExpression(context, "string `name: ${" + STRING_VAR + "}, age: ${" + INT_VAR + "}`", "name: foo, age: 20", "string"); } @Override @Test public void xmlTemplateEvaluationTest() throws BallerinaTestException { debugTestRunner.assertExpression(context, "xml `<book>The Lost World</book>`", "<book>The Lost World</book>", "xml"); } @Override @Test public void newConstructorEvaluationTest() throws BallerinaTestException { } @Override @Test
Is it possible to merge this section with the `lookAheadForDocumentationReference` method? i.e: lookAheadForDocumentationReference will process all the tokens and returns the token, instead of the lookahead count
private STToken processQuotedIdentifier() { while (!reader.isEOF()) { int nextChar = reader.peek(); if (isIdentifierFollowingChar(nextChar)) { reader.advance(); continue; } if (nextChar != '\\') { break; } nextChar = reader.peek(1); switch (nextChar) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.TAB: break; case 'u': if (reader.peek(2) == '{') { processStringNumericEscape(); } else { reader.advance(2); } continue; default: if ('A' <= nextChar && nextChar <= 'Z') { break; } if ('a' <= nextChar && nextChar <= 'z') { break; } reader.advance(2); continue; } break; } return getIdentifierToken(getLexeme()); } private STToken processTokenStartWithGt() { if (peek() == LexerTerminals.EQUAL) { reader.advance(); return getSyntaxToken(SyntaxKind.GT_EQUAL_TOKEN); } if (reader.peek() != LexerTerminals.GT) { return getSyntaxToken(SyntaxKind.GT_TOKEN); } char nextChar = reader.peek(1); switch (nextChar) { case LexerTerminals.GT: if (reader.peek(2) == LexerTerminals.EQUAL) { reader.advance(2); return getSyntaxToken(SyntaxKind.TRIPPLE_GT_TOKEN); } return getSyntaxToken(SyntaxKind.GT_TOKEN); case LexerTerminals.EQUAL: reader.advance(1); return getSyntaxToken(SyntaxKind.DOUBLE_GT_TOKEN); default: return getSyntaxToken(SyntaxKind.GT_TOKEN); } } /* * ------------------------------------------------------------------------------------------------------------ * INTERPOLATION Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readTokenInInterpolation() { reader.mark(); int nextChar = peek(); switch (nextChar) { case LexerTerminals.OPEN_BRACE: startMode(ParserMode.INTERPOLATION_BRACED_CONTENT); return readToken(); case LexerTerminals.CLOSE_BRACE: endMode(); reader.advance(); return getSyntaxTokenWithoutTrailingTrivia(SyntaxKind.CLOSE_BRACE_TOKEN); case LexerTerminals.BACKTICK: default: return readToken(); } } private STToken getSyntaxTokenWithoutTrailingTrivia(SyntaxKind kind) { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); STNode trailingTrivia = STNodeFactory.createNodeList(new ArrayList<>(0)); return STNodeFactory.createToken(kind, leadingTrivia, trailingTrivia); } /* * ------------------------------------------------------------------------------------------------------------ * INTERPOLATION_BRACED_CONTENT Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readTokenInBracedContentInInterpolation() { reader.mark(); int nextChar = peek(); switch (nextChar) { case LexerTerminals.OPEN_BRACE: startMode(ParserMode.INTERPOLATION_BRACED_CONTENT); break; case LexerTerminals.CLOSE_BRACE: case LexerTerminals.BACKTICK: endMode(); break; default: break; } return readToken(); } /* * ------------------------------------------------------------------------------------------------------------ * DOCUMENTATION Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readDocumentationToken() { reader.mark(); int nextChar = peek(); switch (nextChar) { case LexerTerminals.PLUS: reader.advance(); switchMode(ParserMode.DOCUMENTATION_PARAMETER); return getDocumentationSyntaxToken(SyntaxKind.PLUS_TOKEN); case LexerTerminals.BACKTICK: if (reader.peek(1) != LexerTerminals.BACKTICK) { reader.advance(); switchMode(ParserMode.DOCUMENTATION_BACKTICK_CONTENT); return getSyntaxToken(SyntaxKind.BACKTICK_TOKEN); } default: while (!reader.isEOF()) { switch (nextChar) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: endMode(); break; case LexerTerminals.BACKTICK: if (reader.peek(1) != LexerTerminals.BACKTICK) { break; } else if (reader.peek(2) != LexerTerminals.BACKTICK) { reader.advance(2); advanceReaderForBacktickContent(false); } else { reader.advance(3); advanceReaderForBacktickContent(true); } nextChar = peek(); continue; default: if (isIdentifierInitialChar(nextChar)) { int readerAdvanceCount = lookAheadForDocumentationReference(nextChar); if (readerAdvanceCount == 0) { switchMode(ParserMode.DOCUMENTATION_REFERENCE_TYPE); if (getLexeme().equals("")) { return readDocumentationReferenceTypeToken(); } break; } reader.advance(readerAdvanceCount); } else { reader.advance(); } nextChar = peek(); continue; } break; } } return getTemplateString(SyntaxKind.DOCUMENTATION_DESCRIPTION); } private void advanceReaderForBacktickContent(boolean isTripleBacktick) { int nextChar = peek(); while (!reader.isEOF()) { switch (nextChar) { case LexerTerminals.BACKTICK: if (isTripleBacktick) { reader.advance(); if (peek() != LexerTerminals.BACKTICK) { nextChar = peek(); continue; } } reader.advance(); if (peek() != LexerTerminals.BACKTICK) { nextChar = peek(); continue; } reader.advance(); if (peek() != LexerTerminals.BACKTICK) { return; } nextChar = peek(); continue; case LexerTerminals.NEWLINE: int lookAheadCount = 1; int lookAheadChar = reader.peek(lookAheadCount); while (lookAheadChar == LexerTerminals.SPACE || lookAheadChar == LexerTerminals.TAB) { lookAheadCount++; lookAheadChar = reader.peek(lookAheadCount); } if (lookAheadChar != LexerTerminals.HASH) { return; } reader.advance(lookAheadCount); nextChar = peek(); continue; default: reader.advance(); nextChar = peek(); } } } private int lookAheadForDocumentationReference(int nextChar) { int lookAheadChar = nextChar; int lookAheadCount = 0; String identifier = ""; while (isIdentifierInitialChar(lookAheadChar)) { identifier = identifier.concat(String.valueOf((char) lookAheadChar)); lookAheadCount++; lookAheadChar = reader.peek(lookAheadCount); } switch (identifier) { case LexerTerminals.TYPE: case LexerTerminals.SERVICE: case LexerTerminals.VARIABLE: case LexerTerminals.VAR: case LexerTerminals.ANNOTATION: case LexerTerminals.MODULE: case LexerTerminals.FUNCTION: case LexerTerminals.PARAMETER: while (true) { switch (lookAheadChar) { case LexerTerminals.SPACE: lookAheadCount++; lookAheadChar = reader.peek(lookAheadCount); continue; case LexerTerminals.BACKTICK: if (reader.peek(lookAheadCount + 1) != LexerTerminals.BACKTICK) { return 0; } default: break; } break; } default: return lookAheadCount; } } /* * ------------------------------------------------------------------------------------------------------------ * DOCUMENTATION_PARAMETER Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readDocumentationParameterToken() { reader.mark(); int nextChar = peek(); if (isIdentifierInitialChar(nextChar)) { reader.advance(); while (isIdentifierInitialChar(peek())) { reader.advance(); } return getDocumentationLiteral(SyntaxKind.PARAMETER_NAME); } else if (nextChar == LexerTerminals.MINUS) { reader.advance(); switchMode(ParserMode.DOCUMENTATION); return getDocumentationSyntaxToken(SyntaxKind.MINUS_TOKEN); } else { switchMode(ParserMode.DOCUMENTATION); return readDocumentationToken(); } } /* * ------------------------------------------------------------------------------------------------------------ * DOCUMENTATION_REFERENCE_TYPE Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readDocumentationReferenceTypeToken() { int nextChar = peek(); if (nextChar == LexerTerminals.BACKTICK) { reader.advance(); switchMode(ParserMode.DOCUMENTATION_BACKTICK_CONTENT); return getDocumentationSyntaxToken(SyntaxKind.BACKTICK_TOKEN); } while (isIdentifierInitialChar(peek())) { reader.advance(); } return processReferenceType(); } private STToken processReferenceType() { String tokenText = getLexeme(); switch (tokenText) { case LexerTerminals.TYPE: return getDocumentationSyntaxToken(SyntaxKind.TYPE_DOC_REFERENCE_TOKEN); case LexerTerminals.SERVICE: return getDocumentationSyntaxToken(SyntaxKind.SERVICE_DOC_REFERENCE_TOKEN); case LexerTerminals.VARIABLE: return getDocumentationSyntaxToken(SyntaxKind.VARIABLE_DOC_REFERENCE_TOKEN); case LexerTerminals.VAR: return getDocumentationSyntaxToken(SyntaxKind.VAR_DOC_REFERENCE_TOKEN); case LexerTerminals.ANNOTATION: return getDocumentationSyntaxToken(SyntaxKind.ANNOTATION_DOC_REFERENCE_TOKEN); case LexerTerminals.MODULE: return getDocumentationSyntaxToken(SyntaxKind.MODULE_DOC_REFERENCE_TOKEN); case LexerTerminals.FUNCTION: return getDocumentationSyntaxToken(SyntaxKind.FUNCTION_DOC_REFERENCE_TOKEN); case LexerTerminals.PARAMETER: return getDocumentationSyntaxToken(SyntaxKind.PARAMETER_DOC_REFERENCE_TOKEN); default: throw new IllegalStateException(); } } /* * ------------------------------------------------------------------------------------------------------------ * DOCUMENTATION_BACKTICK_CONTENT Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readDocumentationBacktickContentToken() { reader.mark(); int nextToken = peek(); if (nextToken == LexerTerminals.BACKTICK) { reader.advance(); switchMode(ParserMode.DOCUMENTATION); return getDocumentationSyntaxToken(SyntaxKind.BACKTICK_TOKEN); } while (!reader.isEOF()) { switch (nextToken) { case LexerTerminals.BACKTICK: case LexerTerminals.NEWLINE: break; default: reader.advance(); nextToken = peek(); continue; } break; } return getDocumentationLiteral(SyntaxKind.BACKTICK_CONTENT); } }
reader.advance(readerAdvanceCount);
private STToken processQuotedIdentifier() { while (!reader.isEOF()) { int nextChar = reader.peek(); if (isIdentifierFollowingChar(nextChar)) { reader.advance(); continue; } if (nextChar != '\\') { break; } nextChar = reader.peek(1); switch (nextChar) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.TAB: break; case 'u': if (reader.peek(2) == '{') { processStringNumericEscape(); } else { reader.advance(2); } continue; default: if ('A' <= nextChar && nextChar <= 'Z') { break; } if ('a' <= nextChar && nextChar <= 'z') { break; } reader.advance(2); continue; } break; } return getIdentifierToken(getLexeme()); } private STToken processTokenStartWithGt() { if (peek() == LexerTerminals.EQUAL) { reader.advance(); return getSyntaxToken(SyntaxKind.GT_EQUAL_TOKEN); } if (reader.peek() != LexerTerminals.GT) { return getSyntaxToken(SyntaxKind.GT_TOKEN); } char nextChar = reader.peek(1); switch (nextChar) { case LexerTerminals.GT: if (reader.peek(2) == LexerTerminals.EQUAL) { reader.advance(2); return getSyntaxToken(SyntaxKind.TRIPPLE_GT_TOKEN); } return getSyntaxToken(SyntaxKind.GT_TOKEN); case LexerTerminals.EQUAL: reader.advance(1); return getSyntaxToken(SyntaxKind.DOUBLE_GT_TOKEN); default: return getSyntaxToken(SyntaxKind.GT_TOKEN); } } /* * ------------------------------------------------------------------------------------------------------------ * INTERPOLATION Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readTokenInInterpolation() { reader.mark(); int nextChar = peek(); switch (nextChar) { case LexerTerminals.OPEN_BRACE: startMode(ParserMode.INTERPOLATION_BRACED_CONTENT); return readToken(); case LexerTerminals.CLOSE_BRACE: endMode(); reader.advance(); return getSyntaxTokenWithoutTrailingTrivia(SyntaxKind.CLOSE_BRACE_TOKEN); case LexerTerminals.BACKTICK: default: return readToken(); } } private STToken getSyntaxTokenWithoutTrailingTrivia(SyntaxKind kind) { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); STNode trailingTrivia = STNodeFactory.createNodeList(new ArrayList<>(0)); return STNodeFactory.createToken(kind, leadingTrivia, trailingTrivia); } /* * ------------------------------------------------------------------------------------------------------------ * INTERPOLATION_BRACED_CONTENT Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readTokenInBracedContentInInterpolation() { reader.mark(); int nextChar = peek(); switch (nextChar) { case LexerTerminals.OPEN_BRACE: startMode(ParserMode.INTERPOLATION_BRACED_CONTENT); break; case LexerTerminals.CLOSE_BRACE: case LexerTerminals.BACKTICK: endMode(); break; default: break; } return readToken(); } /* * ------------------------------------------------------------------------------------------------------------ * DOCUMENTATION Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readDocumentationToken() { int nextChar = peek(); if (nextChar == LexerTerminals.PLUS) { reader.advance(); switchMode(ParserMode.DOCUMENTATION_PARAMETER); return getDocumentationSyntaxToken(SyntaxKind.PLUS_TOKEN); } return readDocumentationInternalToken(); } /* * ------------------------------------------------------------------------------------------------------------ * DOCUMENTATION_INTERNAL Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readDocumentationInternalToken() { reader.mark(); int nextChar = peek(); if (nextChar == LexerTerminals.BACKTICK && reader.peek(1) != LexerTerminals.BACKTICK) { reader.advance(); switchMode(ParserMode.DOCUMENTATION_BACKTICK_CONTENT); return getDocumentationSyntaxToken(SyntaxKind.BACKTICK_TOKEN); } while (!reader.isEOF()) { switch (nextChar) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: endMode(); break; case LexerTerminals.BACKTICK: if (reader.peek(1) != LexerTerminals.BACKTICK) { break; } else if (reader.peek(2) != LexerTerminals.BACKTICK) { reader.advance(2); processDocumentationCodeContent(false); } else { reader.advance(3); processDocumentationCodeContent(true); } nextChar = peek(); continue; default: if (isIdentifierInitialChar(nextChar)) { boolean hasDocumentationReference = processDocumentationReference(nextChar); if (hasDocumentationReference) { switchMode(ParserMode.DOCUMENTATION_REFERENCE_TYPE); break; } } else { reader.advance(); } nextChar = peek(); continue; } break; } if (getLexeme().isEmpty()) { return readDocumentationReferenceTypeToken(); } return getTemplateString(SyntaxKind.DOCUMENTATION_DESCRIPTION); } private void processDocumentationCodeContent(boolean isTripleBacktick) { int nextChar = peek(); while (!reader.isEOF()) { switch (nextChar) { case LexerTerminals.BACKTICK: if (isTripleBacktick) { reader.advance(); if (peek() != LexerTerminals.BACKTICK) { nextChar = peek(); continue; } } reader.advance(); if (peek() != LexerTerminals.BACKTICK) { nextChar = peek(); continue; } reader.advance(); if (peek() != LexerTerminals.BACKTICK) { return; } nextChar = peek(); continue; case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.NEWLINE: int lookAheadCount = 1; if (peek() == LexerTerminals.CARRIAGE_RETURN && reader.peek(1) == LexerTerminals.NEWLINE) { lookAheadCount++; } int lookAheadChar = reader.peek(lookAheadCount); while (lookAheadChar == LexerTerminals.SPACE || lookAheadChar == LexerTerminals.TAB) { lookAheadCount++; lookAheadChar = reader.peek(lookAheadCount); } if (lookAheadChar != LexerTerminals.HASH) { return; } reader.advance(lookAheadCount); nextChar = peek(); continue; default: reader.advance(); nextChar = peek(); } } } private boolean processDocumentationReference(int nextChar) { int lookAheadChar = nextChar; int lookAheadCount = 0; String identifier = ""; while (isIdentifierInitialChar(lookAheadChar)) { identifier = identifier.concat(String.valueOf((char) lookAheadChar)); lookAheadCount++; lookAheadChar = reader.peek(lookAheadCount); } switch (identifier) { case LexerTerminals.TYPE: case LexerTerminals.SERVICE: case LexerTerminals.VARIABLE: case LexerTerminals.VAR: case LexerTerminals.ANNOTATION: case LexerTerminals.MODULE: case LexerTerminals.FUNCTION: case LexerTerminals.PARAMETER: while (true) { switch (lookAheadChar) { case LexerTerminals.SPACE: case LexerTerminals.TAB: lookAheadCount++; lookAheadChar = reader.peek(lookAheadCount); continue; case LexerTerminals.BACKTICK: if (reader.peek(lookAheadCount + 1) != LexerTerminals.BACKTICK) { return true; } default: break; } break; } default: reader.advance(lookAheadCount); return false; } } /* * ------------------------------------------------------------------------------------------------------------ * DOCUMENTATION_PARAMETER Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readDocumentationParameterToken() { reader.mark(); int nextChar = peek(); if (isIdentifierInitialChar(nextChar)) { STToken token; reader.advance(); while (isIdentifierInitialChar(peek())) { reader.advance(); } if (LexerTerminals.RETURN.equals(getLexeme())) { token = getDocumentationSyntaxToken(SyntaxKind.RETURN_KEYWORD); } else { token = getDocumentationLiteral(SyntaxKind.PARAMETER_NAME); } if (peek() != LexerTerminals.MINUS) { switchMode(ParserMode.DOCUMENTATION_INTERNAL); } return token; } else if (nextChar == LexerTerminals.MINUS) { reader.advance(); switchMode(ParserMode.DOCUMENTATION_INTERNAL); return getDocumentationSyntaxToken(SyntaxKind.MINUS_TOKEN); } else { switchMode(ParserMode.DOCUMENTATION_INTERNAL); return readDocumentationInternalToken(); } } /* * ------------------------------------------------------------------------------------------------------------ * DOCUMENTATION_REFERENCE_TYPE Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readDocumentationReferenceTypeToken() { int nextChar = peek(); if (nextChar == LexerTerminals.BACKTICK) { reader.advance(); switchMode(ParserMode.DOCUMENTATION_BACKTICK_CONTENT); return getDocumentationSyntaxToken(SyntaxKind.BACKTICK_TOKEN); } while (isIdentifierInitialChar(peek())) { reader.advance(); } return processReferenceType(); } private STToken processReferenceType() { String tokenText = getLexeme(); switch (tokenText) { case LexerTerminals.TYPE: return getDocumentationSyntaxToken(SyntaxKind.TYPE_DOC_REFERENCE_TOKEN); case LexerTerminals.SERVICE: return getDocumentationSyntaxToken(SyntaxKind.SERVICE_DOC_REFERENCE_TOKEN); case LexerTerminals.VARIABLE: return getDocumentationSyntaxToken(SyntaxKind.VARIABLE_DOC_REFERENCE_TOKEN); case LexerTerminals.VAR: return getDocumentationSyntaxToken(SyntaxKind.VAR_DOC_REFERENCE_TOKEN); case LexerTerminals.ANNOTATION: return getDocumentationSyntaxToken(SyntaxKind.ANNOTATION_DOC_REFERENCE_TOKEN); case LexerTerminals.MODULE: return getDocumentationSyntaxToken(SyntaxKind.MODULE_DOC_REFERENCE_TOKEN); case LexerTerminals.FUNCTION: return getDocumentationSyntaxToken(SyntaxKind.FUNCTION_DOC_REFERENCE_TOKEN); case LexerTerminals.PARAMETER: return getDocumentationSyntaxToken(SyntaxKind.PARAMETER_DOC_REFERENCE_TOKEN); default: throw new IllegalStateException(); } } /* * ------------------------------------------------------------------------------------------------------------ * DOCUMENTATION_BACKTICK_CONTENT Mode * ------------------------------------------------------------------------------------------------------------ */ private STToken readDocumentationBacktickContentToken() { reader.mark(); int nextToken = peek(); if (nextToken == LexerTerminals.BACKTICK) { reader.advance(); switchMode(ParserMode.DOCUMENTATION_INTERNAL); return getDocumentationSyntaxToken(SyntaxKind.BACKTICK_TOKEN); } while (!reader.isEOF()) { switch (nextToken) { case LexerTerminals.BACKTICK: case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: break; default: reader.advance(); nextToken = peek(); continue; } break; } return getDocumentationLiteral(SyntaxKind.BACKTICK_CONTENT); } }
class BallerinaLexer extends AbstractLexer { public BallerinaLexer(CharReader charReader) { super(charReader, ParserMode.DEFAULT); } /** * Get the next lexical token. * * @return Next lexical token. */ public STToken nextToken() { STToken token; switch (this.mode) { case TEMPLATE: this.leadingTriviaList = new ArrayList<>(0); token = readTemplateToken(); break; case INTERPOLATION: processLeadingTrivia(); token = readTokenInInterpolation(); break; case INTERPOLATION_BRACED_CONTENT: processLeadingTrivia(); token = readTokenInBracedContentInInterpolation(); break; case DOCUMENTATION: processLeadingTrivia(); token = readDocumentationToken(); break; case DOCUMENTATION_PARAMETER: processLeadingTrivia(); token = readDocumentationParameterToken(); break; case DOCUMENTATION_REFERENCE_TYPE: processLeadingTrivia(); token = readDocumentationReferenceTypeToken(); break; case DOCUMENTATION_BACKTICK_CONTENT: processLeadingTrivia(); token = readDocumentationBacktickContentToken(); break; case DEFAULT: case IMPORT: default: processLeadingTrivia(); token = readToken(); } return cloneWithDiagnostics(token); } private STToken nextTokenInternal() { switch (this.mode) { case TEMPLATE: return readTemplateToken(); case INTERPOLATION: return readTokenInInterpolation(); case INTERPOLATION_BRACED_CONTENT: return readTokenInBracedContentInInterpolation(); case DOCUMENTATION: return readDocumentationToken(); case DOCUMENTATION_PARAMETER: return readDocumentationParameterToken(); case DOCUMENTATION_REFERENCE_TYPE: return readDocumentationReferenceTypeToken(); case DOCUMENTATION_BACKTICK_CONTENT: return readDocumentationBacktickContentToken(); case DEFAULT: case IMPORT: default: return readToken(); } } /* * Private Methods */ private STToken readToken() { reader.mark(); if (reader.isEOF()) { return getSyntaxToken(SyntaxKind.EOF_TOKEN); } int c = reader.peek(); reader.advance(); STToken token; switch (c) { case LexerTerminals.COLON: token = getSyntaxToken(SyntaxKind.COLON_TOKEN); break; case LexerTerminals.SEMICOLON: token = getSyntaxToken(SyntaxKind.SEMICOLON_TOKEN); break; case LexerTerminals.DOT: token = processDot(); break; case LexerTerminals.COMMA: token = getSyntaxToken(SyntaxKind.COMMA_TOKEN); break; case LexerTerminals.OPEN_PARANTHESIS: token = getSyntaxToken(SyntaxKind.OPEN_PAREN_TOKEN); break; case LexerTerminals.CLOSE_PARANTHESIS: token = getSyntaxToken(SyntaxKind.CLOSE_PAREN_TOKEN); break; case LexerTerminals.OPEN_BRACE: if (peek() == LexerTerminals.PIPE) { reader.advance(); token = getSyntaxToken(SyntaxKind.OPEN_BRACE_PIPE_TOKEN); } else { token = getSyntaxToken(SyntaxKind.OPEN_BRACE_TOKEN); } break; case LexerTerminals.CLOSE_BRACE: token = getSyntaxToken(SyntaxKind.CLOSE_BRACE_TOKEN); break; case LexerTerminals.OPEN_BRACKET: token = getSyntaxToken(SyntaxKind.OPEN_BRACKET_TOKEN); break; case LexerTerminals.CLOSE_BRACKET: token = getSyntaxToken(SyntaxKind.CLOSE_BRACKET_TOKEN); break; case LexerTerminals.PIPE: token = processPipeOperator(); break; case LexerTerminals.QUESTION_MARK: if (peek() == LexerTerminals.DOT) { reader.advance(); token = getSyntaxToken(SyntaxKind.OPTIONAL_CHAINING_TOKEN); } else if (peek() == LexerTerminals.COLON) { reader.advance(); token = getSyntaxToken(SyntaxKind.ELVIS_TOKEN); } else { token = getSyntaxToken(SyntaxKind.QUESTION_MARK_TOKEN); } break; case LexerTerminals.DOUBLE_QUOTE: token = processStringLiteral(); break; case LexerTerminals.HASH: startMode(ParserMode.DOCUMENTATION); token = getDocumentationSyntaxToken(SyntaxKind.HASH_TOKEN); break; case LexerTerminals.AT: token = getSyntaxToken(SyntaxKind.AT_TOKEN); break; case LexerTerminals.EQUAL: token = processEqualOperator(); break; case LexerTerminals.PLUS: token = getSyntaxToken(SyntaxKind.PLUS_TOKEN); break; case LexerTerminals.MINUS: if (reader.peek() == LexerTerminals.GT) { reader.advance(); if (peek() == LexerTerminals.GT) { reader.advance(); token = getSyntaxToken(SyntaxKind.SYNC_SEND_TOKEN); } else { token = getSyntaxToken(SyntaxKind.RIGHT_ARROW_TOKEN); } } else { token = getSyntaxToken(SyntaxKind.MINUS_TOKEN); } break; case LexerTerminals.ASTERISK: token = getSyntaxToken(SyntaxKind.ASTERISK_TOKEN); break; case LexerTerminals.SLASH: token = processSlashToken(); break; case LexerTerminals.PERCENT: token = getSyntaxToken(SyntaxKind.PERCENT_TOKEN); break; case LexerTerminals.LT: int nextChar = peek(); if (nextChar == LexerTerminals.EQUAL) { reader.advance(); token = getSyntaxToken(SyntaxKind.LT_EQUAL_TOKEN); } else if (nextChar == LexerTerminals.MINUS) { reader.advance(); token = getSyntaxToken(SyntaxKind.LEFT_ARROW_TOKEN); } else if (nextChar == LexerTerminals.LT) { reader.advance(); token = getSyntaxToken(SyntaxKind.DOUBLE_LT_TOKEN); } else { token = getSyntaxToken(SyntaxKind.LT_TOKEN); } break; case LexerTerminals.GT: token = processTokenStartWithGt(); break; case LexerTerminals.EXCLAMATION_MARK: token = processExclamationMarkOperator(); break; case LexerTerminals.BITWISE_AND: if (peek() == LexerTerminals.BITWISE_AND) { reader.advance(); token = getSyntaxToken(SyntaxKind.LOGICAL_AND_TOKEN); } else { token = getSyntaxToken(SyntaxKind.BITWISE_AND_TOKEN); } break; case LexerTerminals.BITWISE_XOR: token = getSyntaxToken(SyntaxKind.BITWISE_XOR_TOKEN); break; case LexerTerminals.NEGATION: token = getSyntaxToken(SyntaxKind.NEGATION_TOKEN); break; case LexerTerminals.BACKTICK: startMode(ParserMode.TEMPLATE); token = getBacktickToken(); break; case LexerTerminals.SINGLE_QUOTE: token = processQuotedIdentifier(); break; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': token = processNumericLiteral(c); break; case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': case '_': token = processIdentifierOrKeyword(); break; default: processInvalidToken(); token = nextTokenInternal(); break; } return token; } private STToken getSyntaxToken(SyntaxKind kind) { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); STNode trailingTrivia = processTrailingTrivia(); return STNodeFactory.createToken(kind, leadingTrivia, trailingTrivia); } private STToken getDocumentationSyntaxToken(SyntaxKind kind) { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); STNode trailingTrivia = processTrailingTrivia(); for (int i = 0; i < trailingTrivia.bucketCount(); i++) { if (trailingTrivia.childInBucket(i).kind == SyntaxKind.END_OF_LINE_MINUTIAE) { endMode(); } } return STNodeFactory.createToken(kind, leadingTrivia, trailingTrivia); } private STToken getIdentifierToken(String tokenText) { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); String lexeme = getLexeme(); STNode trailingTrivia = processTrailingTrivia(); return STNodeFactory.createIdentifierToken(lexeme, leadingTrivia, trailingTrivia); } private STToken getLiteral(SyntaxKind kind) { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); String lexeme = getLexeme(); STNode trailingTrivia = processTrailingTrivia(); return STNodeFactory.createLiteralValueToken(kind, lexeme, leadingTrivia, trailingTrivia); } private STToken getDocumentationLiteral(SyntaxKind kind) { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); String lexeme = getLexeme(); STNode trailingTrivia = processTrailingTrivia(); for (int i = 0; i < trailingTrivia.bucketCount(); i++) { if (trailingTrivia.childInBucket(i).kind == SyntaxKind.END_OF_LINE_MINUTIAE) { endMode(); } } return STNodeFactory.createLiteralValueToken(kind, lexeme, leadingTrivia, trailingTrivia); } /** * Process leading trivia. */ private void processLeadingTrivia() { this.leadingTriviaList = new ArrayList<>(10); processSyntaxTrivia(this.leadingTriviaList, true); } /** * Process and return trailing trivia. * * @return Trailing trivia */ private STNode processTrailingTrivia() { List<STNode> triviaList = new ArrayList<>(10); processSyntaxTrivia(triviaList, false); return STNodeFactory.createNodeList(triviaList); } /** * Process syntax trivia and add it to the provided list. * <p> * <code>syntax-trivia := whitespace | end-of-line | comments</code> * * @param triviaList List of trivia * @param isLeading Flag indicating whether the currently processing leading trivia or not */ private void processSyntaxTrivia(List<STNode> triviaList, boolean isLeading) { while (!reader.isEOF()) { reader.mark(); char c = reader.peek(); switch (c) { case LexerTerminals.SPACE: case LexerTerminals.TAB: case LexerTerminals.FORM_FEED: triviaList.add(processWhitespaces()); break; case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.NEWLINE: triviaList.add(processEndOfLine()); if (isLeading) { break; } return; case LexerTerminals.SLASH: if (reader.peek(1) == LexerTerminals.SLASH) { triviaList.add(processComment()); break; } return; default: return; } } } /** * Process whitespace up to an end of line. * <p> * <code>whitespace := 0x9 | 0xC | 0x20</code> * * @return Whitespace trivia */ private STNode processWhitespaces() { while (!reader.isEOF()) { char c = reader.peek(); switch (c) { case LexerTerminals.SPACE: case LexerTerminals.TAB: case LexerTerminals.FORM_FEED: reader.advance(); continue; case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.NEWLINE: break; default: break; } break; } return STNodeFactory.createMinutiae(SyntaxKind.WHITESPACE_MINUTIAE, getLexeme()); } /** * Process end of line. * <p> * <code>end-of-line := 0xA | 0xD</code> * * @return End of line trivia */ private STNode processEndOfLine() { char c = reader.peek(); switch (c) { case LexerTerminals.NEWLINE: reader.advance(); return STNodeFactory.createMinutiae(SyntaxKind.END_OF_LINE_MINUTIAE, getLexeme()); case LexerTerminals.CARRIAGE_RETURN: reader.advance(); if (reader.peek() == LexerTerminals.NEWLINE) { reader.advance(); } return STNodeFactory.createMinutiae(SyntaxKind.END_OF_LINE_MINUTIAE, getLexeme()); default: throw new IllegalStateException(); } } /** * Process dot, ellipsis or decimal floating point token. * * @return Dot, ellipsis or decimal floating point token */ private STToken processDot() { int nextChar = reader.peek(); if (nextChar == LexerTerminals.DOT) { int nextNextChar = reader.peek(1); if (nextNextChar == LexerTerminals.DOT) { reader.advance(2); return getSyntaxToken(SyntaxKind.ELLIPSIS_TOKEN); } else if (nextNextChar == LexerTerminals.LT) { reader.advance(2); return getSyntaxToken(SyntaxKind.DOUBLE_DOT_LT_TOKEN); } } else if (nextChar == LexerTerminals.AT) { reader.advance(); return getSyntaxToken(SyntaxKind.ANNOT_CHAINING_TOKEN); } else if (nextChar == LexerTerminals.LT) { reader.advance(); return getSyntaxToken(SyntaxKind.DOT_LT_TOKEN); } if (this.mode != ParserMode.IMPORT && isDigit(nextChar)) { return processDecimalFloatLiteral(); } return getSyntaxToken(SyntaxKind.DOT_TOKEN); } /** * <p> * Process a comment, and add it to trivia list. * </p> * <code>Comment := * <br/><br/>AnyCharButNewline := ^ 0xA</code> */ private STNode processComment() { reader.advance(2); int nextToken = peek(); while (!reader.isEOF()) { switch (nextToken) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: break; default: reader.advance(); nextToken = peek(); continue; } break; } return STNodeFactory.createMinutiae(SyntaxKind.COMMENT_MINUTIAE, getLexeme()); } /** * Process any token that starts with '='. * * @return One of the tokens: <code>'=', '==', '=>', '==='</code> */ private STToken processEqualOperator() { switch (peek()) { case LexerTerminals.EQUAL: reader.advance(); if (peek() == LexerTerminals.EQUAL) { reader.advance(); return getSyntaxToken(SyntaxKind.TRIPPLE_EQUAL_TOKEN); } else { return getSyntaxToken(SyntaxKind.DOUBLE_EQUAL_TOKEN); } case LexerTerminals.GT: reader.advance(); return getSyntaxToken(SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN); default: return getSyntaxToken(SyntaxKind.EQUAL_TOKEN); } } /** * <p> * Process and returns a numeric literal. * </p> * <code> * numeric-literal := int-literal | floating-point-literal * <br/> * floating-point-literal := DecimalFloatingPointNumber | HexFloatingPointLiteral * <br/> * int-literal := DecimalNumber | HexIntLiteral * <br/> * DecimalNumber := 0 | NonZeroDigit Digit* * <br/> * Digit := 0 .. 9 * <br/> * NonZeroDigit := 1 .. 9 * </code> * * @return The numeric literal. */ private STToken processNumericLiteral(int startChar) { int nextChar = peek(); if (isHexIndicator(startChar, nextChar)) { return processHexLiteral(); } int len = 1; while (!reader.isEOF()) { switch (nextChar) { case LexerTerminals.DOT: case 'e': case 'E': case 'f': case 'F': case 'd': case 'D': if (reader.peek(1) == LexerTerminals.DOT) { break; } if (this.mode == ParserMode.IMPORT) { break; } if (startChar == '0' && len > 1) { reportLexerError(DiagnosticErrorCode.ERROR_LEADING_ZEROS_IN_NUMERIC_LITERALS); } return processDecimalFloatLiteral(); default: if (isDigit(nextChar)) { reader.advance(); len++; nextChar = peek(); continue; } break; } break; } if (startChar == '0' && len > 1) { reportLexerError(DiagnosticErrorCode.ERROR_LEADING_ZEROS_IN_NUMERIC_LITERALS); } return getLiteral(SyntaxKind.DECIMAL_INTEGER_LITERAL); } /** * <p> * Process and returns a decimal floating point literal. * </p> * <code> * DecimalFloatingPointNumber := * DecimalNumber Exponent [FloatingPointTypeSuffix] * | DottedDecimalNumber [Exponent] [FloatingPointTypeSuffix] * | DecimalNumber FloatingPointTypeSuffix * <br/> * DottedDecimalNumber := DecimalNumber . Digit* | . Digit+ * <br/> * FloatingPointTypeSuffix := DecimalTypeSuffix | FloatTypeSuffix * <br/> * DecimalTypeSuffix := d | D * <br/> * FloatTypeSuffix := f | F * </code> * * @return The decimal floating point literal. */ private STToken processDecimalFloatLiteral() { int nextChar = peek(); if (nextChar == LexerTerminals.DOT) { reader.advance(); nextChar = peek(); } while (isDigit(nextChar)) { reader.advance(); nextChar = peek(); } switch (nextChar) { case 'e': case 'E': return processExponent(false); case 'f': case 'F': case 'd': case 'D': return parseFloatingPointTypeSuffix(); } return getLiteral(SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL); } /** * <p> * Process an exponent or hex-exponent. * </p> * <code> * exponent := Exponent | HexExponent * <br/> * Exponent := ExponentIndicator [Sign] Digit+ * <br/> * HexExponent := HexExponentIndicator [Sign] Digit+ * <br/> * ExponentIndicator := e | E * <br/> * HexExponentIndicator := p | P * <br/> * Sign := + | - * <br/> * Digit := 0 .. 9 * </code> * * @param isHex HexExponent or not * @return The decimal floating point literal. */ private STToken processExponent(boolean isHex) { reader.advance(); int nextChar = peek(); if (nextChar == LexerTerminals.PLUS || nextChar == LexerTerminals.MINUS) { reader.advance(); nextChar = peek(); } if (!isDigit(nextChar)) { reportLexerError(DiagnosticErrorCode.ERROR_MISSING_DIGIT_AFTER_EXPONENT_INDICATOR); } while (isDigit(nextChar)) { reader.advance(); nextChar = peek(); } if (isHex) { return getLiteral(SyntaxKind.HEX_FLOATING_POINT_LITERAL); } switch (nextChar) { case 'f': case 'F': case 'd': case 'D': return parseFloatingPointTypeSuffix(); } return getLiteral(SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL); } /** * <p> * Parse floating point type suffix. * </p> * <code> * FloatingPointTypeSuffix := DecimalTypeSuffix | FloatTypeSuffix * <br/> * DecimalTypeSuffix := d | D * <br/> * FloatTypeSuffix := f | F * </code> * * @return The decimal floating point literal. */ private STToken parseFloatingPointTypeSuffix() { reader.advance(); return getLiteral(SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL); } /** * <p> * Process and returns a hex literal. * </p> * <code> * hex-literal := HexIntLiteral | HexFloatingPointLiteral * <br/> * HexIntLiteral := HexIndicator HexNumber * <br/> * HexNumber := HexDigit+ * <br/> * HexIndicator := 0x | 0X * <br/> * HexDigit := Digit | a .. f | A .. F * <br/> * HexFloatingPointLiteral := HexIndicator HexFloatingPointNumber * <br/> * HexFloatingPointNumber := HexNumber HexExponent | DottedHexNumber [HexExponent] * <br/> * DottedHexNumber := HexDigit+ . HexDigit* | . HexDigit+ * </code> * * @return The hex literal. */ private STToken processHexLiteral() { reader.advance(); if (peek() == LexerTerminals.DOT && !isHexDigit(reader.peek(1))) { reportLexerError(DiagnosticErrorCode.ERROR_MISSING_HEX_DIGIT_AFTER_DOT); } int nextChar; while (isHexDigit(peek())) { reader.advance(); } nextChar = peek(); switch (nextChar) { case LexerTerminals.DOT: reader.advance(); nextChar = peek(); while (isHexDigit(nextChar)) { reader.advance(); nextChar = peek(); } switch (nextChar) { case 'p': case 'P': return processExponent(true); } break; case 'p': case 'P': return processExponent(true); default: return getLiteral(SyntaxKind.HEX_INTEGER_LITERAL); } return getLiteral(SyntaxKind.HEX_FLOATING_POINT_LITERAL); } /** * Process and returns an identifier or a keyword. * * @return An identifier or a keyword. */ private STToken processIdentifierOrKeyword() { while (isIdentifierFollowingChar(peek())) { reader.advance(); } String tokenText = getLexeme(); switch (tokenText) { case LexerTerminals.INT: return getSyntaxToken(SyntaxKind.INT_KEYWORD); case LexerTerminals.FLOAT: return getSyntaxToken(SyntaxKind.FLOAT_KEYWORD); case LexerTerminals.STRING: return getSyntaxToken(SyntaxKind.STRING_KEYWORD); case LexerTerminals.BOOLEAN: return getSyntaxToken(SyntaxKind.BOOLEAN_KEYWORD); case LexerTerminals.DECIMAL: return getSyntaxToken(SyntaxKind.DECIMAL_KEYWORD); case LexerTerminals.XML: return getSyntaxToken(SyntaxKind.XML_KEYWORD); case LexerTerminals.JSON: return getSyntaxToken(SyntaxKind.JSON_KEYWORD); case LexerTerminals.HANDLE: return getSyntaxToken(SyntaxKind.HANDLE_KEYWORD); case LexerTerminals.ANY: return getSyntaxToken(SyntaxKind.ANY_KEYWORD); case LexerTerminals.ANYDATA: return getSyntaxToken(SyntaxKind.ANYDATA_KEYWORD); case LexerTerminals.NEVER: return getSyntaxToken(SyntaxKind.NEVER_KEYWORD); case LexerTerminals.BYTE: return getSyntaxToken(SyntaxKind.BYTE_KEYWORD); case LexerTerminals.PUBLIC: return getSyntaxToken(SyntaxKind.PUBLIC_KEYWORD); case LexerTerminals.PRIVATE: return getSyntaxToken(SyntaxKind.PRIVATE_KEYWORD); case LexerTerminals.FUNCTION: return getSyntaxToken(SyntaxKind.FUNCTION_KEYWORD); case LexerTerminals.RETURN: return getSyntaxToken(SyntaxKind.RETURN_KEYWORD); case LexerTerminals.RETURNS: return getSyntaxToken(SyntaxKind.RETURNS_KEYWORD); case LexerTerminals.EXTERNAL: return getSyntaxToken(SyntaxKind.EXTERNAL_KEYWORD); case LexerTerminals.TYPE: return getSyntaxToken(SyntaxKind.TYPE_KEYWORD); case LexerTerminals.RECORD: return getSyntaxToken(SyntaxKind.RECORD_KEYWORD); case LexerTerminals.OBJECT: return getSyntaxToken(SyntaxKind.OBJECT_KEYWORD); case LexerTerminals.REMOTE: return getSyntaxToken(SyntaxKind.REMOTE_KEYWORD); case LexerTerminals.ABSTRACT: return getSyntaxToken(SyntaxKind.ABSTRACT_KEYWORD); case LexerTerminals.CLIENT: return getSyntaxToken(SyntaxKind.CLIENT_KEYWORD); case LexerTerminals.IF: return getSyntaxToken(SyntaxKind.IF_KEYWORD); case LexerTerminals.ELSE: return getSyntaxToken(SyntaxKind.ELSE_KEYWORD); case LexerTerminals.WHILE: return getSyntaxToken(SyntaxKind.WHILE_KEYWORD); case LexerTerminals.TRUE: return getSyntaxToken(SyntaxKind.TRUE_KEYWORD); case LexerTerminals.FALSE: return getSyntaxToken(SyntaxKind.FALSE_KEYWORD); case LexerTerminals.CHECK: return getSyntaxToken(SyntaxKind.CHECK_KEYWORD); case LexerTerminals.CHECKPANIC: return getSyntaxToken(SyntaxKind.CHECKPANIC_KEYWORD); case LexerTerminals.CONTINUE: return getSyntaxToken(SyntaxKind.CONTINUE_KEYWORD); case LexerTerminals.BREAK: return getSyntaxToken(SyntaxKind.BREAK_KEYWORD); case LexerTerminals.PANIC: return getSyntaxToken(SyntaxKind.PANIC_KEYWORD); case LexerTerminals.IMPORT: return getSyntaxToken(SyntaxKind.IMPORT_KEYWORD); case LexerTerminals.VERSION: return getSyntaxToken(SyntaxKind.VERSION_KEYWORD); case LexerTerminals.AS: return getSyntaxToken(SyntaxKind.AS_KEYWORD); case LexerTerminals.SERVICE: return getSyntaxToken(SyntaxKind.SERVICE_KEYWORD); case LexerTerminals.ON: return getSyntaxToken(SyntaxKind.ON_KEYWORD); case LexerTerminals.RESOURCE: return getSyntaxToken(SyntaxKind.RESOURCE_KEYWORD); case LexerTerminals.LISTENER: return getSyntaxToken(SyntaxKind.LISTENER_KEYWORD); case LexerTerminals.CONST: return getSyntaxToken(SyntaxKind.CONST_KEYWORD); case LexerTerminals.FINAL: return getSyntaxToken(SyntaxKind.FINAL_KEYWORD); case LexerTerminals.TYPEOF: return getSyntaxToken(SyntaxKind.TYPEOF_KEYWORD); case LexerTerminals.IS: return getSyntaxToken(SyntaxKind.IS_KEYWORD); case LexerTerminals.NULL: return getSyntaxToken(SyntaxKind.NULL_KEYWORD); case LexerTerminals.LOCK: return getSyntaxToken(SyntaxKind.LOCK_KEYWORD); case LexerTerminals.ANNOTATION: return getSyntaxToken(SyntaxKind.ANNOTATION_KEYWORD); case LexerTerminals.SOURCE: return getSyntaxToken(SyntaxKind.SOURCE_KEYWORD); case LexerTerminals.VAR: return getSyntaxToken(SyntaxKind.VAR_KEYWORD); case LexerTerminals.WORKER: return getSyntaxToken(SyntaxKind.WORKER_KEYWORD); case LexerTerminals.PARAMETER: return getSyntaxToken(SyntaxKind.PARAMETER_KEYWORD); case LexerTerminals.FIELD: return getSyntaxToken(SyntaxKind.FIELD_KEYWORD); case LexerTerminals.XMLNS: return getSyntaxToken(SyntaxKind.XMLNS_KEYWORD); case LexerTerminals.FORK: return getSyntaxToken(SyntaxKind.FORK_KEYWORD); case LexerTerminals.MAP: return getSyntaxToken(SyntaxKind.MAP_KEYWORD); case LexerTerminals.FUTURE: return getSyntaxToken(SyntaxKind.FUTURE_KEYWORD); case LexerTerminals.TYPEDESC: return getSyntaxToken(SyntaxKind.TYPEDESC_KEYWORD); case LexerTerminals.TRAP: return getSyntaxToken(SyntaxKind.TRAP_KEYWORD); case LexerTerminals.IN: return getSyntaxToken(SyntaxKind.IN_KEYWORD); case LexerTerminals.FOREACH: return getSyntaxToken(SyntaxKind.FOREACH_KEYWORD); case LexerTerminals.TABLE: return getSyntaxToken(SyntaxKind.TABLE_KEYWORD); case LexerTerminals.ERROR: return getSyntaxToken(SyntaxKind.ERROR_KEYWORD); case LexerTerminals.LET: return getSyntaxToken(SyntaxKind.LET_KEYWORD); case LexerTerminals.STREAM: return getSyntaxToken(SyntaxKind.STREAM_KEYWORD); case LexerTerminals.NEW: return getSyntaxToken(SyntaxKind.NEW_KEYWORD); case LexerTerminals.READONLY: return getSyntaxToken(SyntaxKind.READONLY_KEYWORD); case LexerTerminals.DISTINCT: return getSyntaxToken(SyntaxKind.DISTINCT_KEYWORD); case LexerTerminals.FROM: return getSyntaxToken(SyntaxKind.FROM_KEYWORD); case LexerTerminals.WHERE: return getSyntaxToken(SyntaxKind.WHERE_KEYWORD); case LexerTerminals.SELECT: return getSyntaxToken(SyntaxKind.SELECT_KEYWORD); case LexerTerminals.START: return getSyntaxToken(SyntaxKind.START_KEYWORD); case LexerTerminals.FLUSH: return getSyntaxToken(SyntaxKind.FLUSH_KEYWORD); case LexerTerminals.DEFAULT: return getSyntaxToken(SyntaxKind.DEFAULT_KEYWORD); case LexerTerminals.WAIT: return getSyntaxToken(SyntaxKind.WAIT_KEYWORD); case LexerTerminals.DO: return getSyntaxToken(SyntaxKind.DO_KEYWORD); case LexerTerminals.TRANSACTION: return getSyntaxToken(SyntaxKind.TRANSACTION_KEYWORD); case LexerTerminals.COMMIT: return getSyntaxToken(SyntaxKind.COMMIT_KEYWORD); case LexerTerminals.RETRY: return getSyntaxToken(SyntaxKind.RETRY_KEYWORD); case LexerTerminals.ROLLBACK: return getSyntaxToken(SyntaxKind.ROLLBACK_KEYWORD); case LexerTerminals.TRANSACTIONAL: return getSyntaxToken(SyntaxKind.TRANSACTIONAL_KEYWORD); case LexerTerminals.ENUM: return getSyntaxToken(SyntaxKind.ENUM_KEYWORD); case LexerTerminals.BASE16: return getSyntaxToken(SyntaxKind.BASE16_KEYWORD); case LexerTerminals.BASE64: return getSyntaxToken(SyntaxKind.BASE64_KEYWORD); case LexerTerminals.MATCH: return getSyntaxToken(SyntaxKind.MATCH_KEYWORD); case LexerTerminals.CONFLICT: return getSyntaxToken(SyntaxKind.CONFLICT_KEYWORD); case LexerTerminals.LIMIT: return getSyntaxToken(SyntaxKind.LIMIT_KEYWORD); case LexerTerminals.JOIN: return getSyntaxToken(SyntaxKind.JOIN_KEYWORD); case LexerTerminals.OUTER: return getSyntaxToken(SyntaxKind.OUTER_KEYWORD); case LexerTerminals.EQUALS: return getSyntaxToken(SyntaxKind.EQUALS_KEYWORD); default: return getIdentifierToken(tokenText); } } /** * Process and returns an invalid token. Consumes the input until {@link * is reached. */ private void processInvalidToken() { while (!isEndOfInvalidToken()) { reader.advance(); } String tokenText = getLexeme(); STNode invalidToken = STNodeFactory.createInvalidToken(tokenText); STNode invalidNodeMinutiae = STNodeFactory.createInvalidNodeMinutiae(invalidToken); this.leadingTriviaList.add(invalidNodeMinutiae); } /** * Check whether the current index is pointing to an end of an invalid lexer-token. * An invalid token is considered to end if one of the below is reached: * <ul> * <li>a whitespace</li> * <li>semicolon</li> * <li>newline</li> * </ul> * * @return <code>true</code>, if the end of an invalid token is reached, <code>false</code> otherwise */ private boolean isEndOfInvalidToken() { if (reader.isEOF()) { return true; } int currentChar = peek(); switch (currentChar) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: case LexerTerminals.TAB: case LexerTerminals.SEMICOLON: case LexerTerminals.OPEN_BRACE: case LexerTerminals.CLOSE_BRACE: case LexerTerminals.OPEN_BRACKET: case LexerTerminals.CLOSE_BRACKET: case LexerTerminals.OPEN_PARANTHESIS: case LexerTerminals.CLOSE_PARANTHESIS: return true; default: return false; } } /** * <p> * Check whether a given char is an identifier start char. * </p> * <code>IdentifierInitialChar := A .. Z | a .. z | _ | UnicodeIdentifierChar</code> * * @param c character to check * @return <code>true</code>, if the character is an identifier start char. <code>false</code> otherwise. */ private boolean isIdentifierInitialChar(int c) { if ('A' <= c && c <= 'Z') { return true; } if ('a' <= c && c <= 'z') { return true; } if (c == '_') { return true; } return false; } /** * <p> * Check whether a given char is an identifier following char. * </p> * <code>IdentifierFollowingChar := IdentifierInitialChar | Digit</code> * * @param c character to check * @return <code>true</code>, if the character is an identifier following char. <code>false</code> otherwise. */ private boolean isIdentifierFollowingChar(int c) { return isIdentifierInitialChar(c) || isDigit(c); } /** * <p> * Check whether a given char is a digit. * </p> * <code>Digit := 0..9</code> * * @param c character to check * @return <code>true</code>, if the character represents a digit. <code>false</code> otherwise. */ static boolean isDigit(int c) { return ('0' <= c && c <= '9'); } /** * <p> * Check whether a given char is a hexa digit. * </p> * <code>HexDigit := Digit | a .. f | A .. F</code> * * @param c character to check * @return <code>true</code>, if the character represents a hex digit. <code>false</code> otherwise. */ static boolean isHexDigit(int c) { if ('a' <= c && c <= 'f') { return true; } if ('A' <= c && c <= 'F') { return true; } return isDigit(c); } /** * <p> * Check whether current input index points to a start of a hex-numeric literal. * </p> * <code>HexIndicator := 0x | 0X</code> * * @param startChar Starting character of the literal * @param nextChar Second character of the literal * @return <code>true</code>, if the current input points to a start of a hex-numeric literal. * <code>false</code> otherwise. */ private boolean isHexIndicator(int startChar, int nextChar) { return startChar == '0' && (nextChar == 'x' || nextChar == 'X'); } /** * Returns the next character from the reader, without consuming the stream. * * @return Next character */ private int peek() { return this.reader.peek(); } /** * Get the text associated with the current token. * * @return Text associated with the current token. */ private String getLexeme() { return reader.getMarkedChars(); } /** * Process and return double-quoted string literal. * <p> * <code>string-literal := DoubleQuotedStringLiteral * <br/> * DoubleQuotedStringLiteral := " (StringChar | StringEscape)* " * <br/> * StringChar := ^ ( 0xA | 0xD | \ | " ) * <br/> * StringEscape := StringSingleEscape | StringNumericEscape * <br/> * StringSingleEscape := \t | \n | \r | \\ | \" * <br/> * StringNumericEscape := \ u{ CodePoint } * <br/> * CodePoint := HexDigit+ * </code> * * @return String literal token */ private STToken processStringLiteral() { int nextChar; while (!reader.isEOF()) { nextChar = peek(); switch (nextChar) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: reportLexerError(DiagnosticErrorCode.ERROR_MISSING_DOUBLE_QUOTE); break; case LexerTerminals.DOUBLE_QUOTE: this.reader.advance(); break; case LexerTerminals.BACKSLASH: switch (this.reader.peek(1)) { case 'n': case 't': case 'r': case LexerTerminals.BACKSLASH: case LexerTerminals.DOUBLE_QUOTE: this.reader.advance(2); continue; case 'u': if (this.reader.peek(2) == LexerTerminals.OPEN_BRACE) { processStringNumericEscape(); } else { reportLexerError(DiagnosticErrorCode.ERROR_INVALID_STRING_NUMERIC_ESCAPE_SEQUENCE); this.reader.advance(2); } continue; default: reportLexerError(DiagnosticErrorCode.ERROR_INVALID_ESCAPE_SEQUENCE); this.reader.advance(); continue; } default: this.reader.advance(); continue; } break; } return getLiteral(SyntaxKind.STRING_LITERAL); } /** * Process string numeric escape. * <p> * <code>StringNumericEscape := \ u { CodePoint }</code> */ private void processStringNumericEscape() { this.reader.advance(3); if (!isHexDigit(peek())) { reportLexerError(DiagnosticErrorCode.ERROR_INVALID_STRING_NUMERIC_ESCAPE_SEQUENCE); return; } reader.advance(); while (isHexDigit(peek())) { reader.advance(); } if (peek() != LexerTerminals.CLOSE_BRACE) { reportLexerError(DiagnosticErrorCode.ERROR_INVALID_STRING_NUMERIC_ESCAPE_SEQUENCE); return; } this.reader.advance(); } /** * Process any token that starts with '!'. * * @return One of the tokens: <code>'!', '!=', '!=='</code> */ private STToken processExclamationMarkOperator() { switch (peek()) { case LexerTerminals.EQUAL: reader.advance(); if (peek() == LexerTerminals.EQUAL) { reader.advance(); return getSyntaxToken(SyntaxKind.NOT_DOUBLE_EQUAL_TOKEN); } else { return getSyntaxToken(SyntaxKind.NOT_EQUAL_TOKEN); } default: return getSyntaxToken(SyntaxKind.EXCLAMATION_MARK_TOKEN); } } /** * Process any token that starts with '|'. * * @return One of the tokens: <code>'|', '|}', '||'</code> */ private STToken processPipeOperator() { switch (peek()) { case LexerTerminals.CLOSE_BRACE: reader.advance(); return getSyntaxToken(SyntaxKind.CLOSE_BRACE_PIPE_TOKEN); case LexerTerminals.PIPE: reader.advance(); return getSyntaxToken(SyntaxKind.LOGICAL_OR_TOKEN); default: return getSyntaxToken(SyntaxKind.PIPE_TOKEN); } } /** * Process any token that starts with '/'. * * @return One of the tokens: <code>'/', '/<', '/*', '/**\/<' </code> */ private STToken processSlashToken() { switch (peek()) { case LexerTerminals.LT: reader.advance(); return getSyntaxToken(SyntaxKind.SLASH_LT_TOKEN); case LexerTerminals.ASTERISK: reader.advance(); if (peek() != LexerTerminals.ASTERISK) { return getSyntaxToken(SyntaxKind.SLASH_ASTERISK_TOKEN); } else if (reader.peek(1) == LexerTerminals.SLASH && reader.peek(2) == LexerTerminals.LT) { reader.advance(3); return getSyntaxToken(SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN); } else { return getSyntaxToken(SyntaxKind.SLASH_ASTERISK_TOKEN); } default: return getSyntaxToken(SyntaxKind.SLASH_TOKEN); } } private STToken getBacktickToken() { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); STNode trailingTrivia = STNodeFactory.createEmptyNodeList(); return STNodeFactory.createToken(SyntaxKind.BACKTICK_TOKEN, leadingTrivia, trailingTrivia); } private STToken readTemplateToken() { reader.mark(); if (reader.isEOF()) { return getSyntaxToken(SyntaxKind.EOF_TOKEN); } char nextChar = this.reader.peek(); switch (nextChar) { case LexerTerminals.BACKTICK: reader.advance(); endMode(); return getSyntaxToken(SyntaxKind.BACKTICK_TOKEN); case LexerTerminals.DOLLAR: if (reader.peek(1) == LexerTerminals.OPEN_BRACE) { startMode(ParserMode.INTERPOLATION); reader.advance(2); return getSyntaxToken(SyntaxKind.INTERPOLATION_START_TOKEN); } default: while (!reader.isEOF()) { nextChar = this.reader.peek(); switch (nextChar) { case LexerTerminals.DOLLAR: if (this.reader.peek(1) == LexerTerminals.OPEN_BRACE) { break; } reader.advance(); continue; case LexerTerminals.BACKTICK: break; default: reader.advance(); continue; } break; } } return getTemplateString(SyntaxKind.TEMPLATE_STRING); } private STToken getTemplateString(SyntaxKind kind) { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); String lexeme = getLexeme(); STNode trailingTrivia = processTrailingTrivia(); return STNodeFactory.createLiteralValueToken(kind, lexeme, leadingTrivia, trailingTrivia); } /** * Process quoted Identifier token. * * <code> * QuotedIdentifierChar := IdentifierFollowingChar | QuotedIdentifierEscape | StringNumericEscape * </code> * * @return Quoted identifier token */
class BallerinaLexer extends AbstractLexer { public BallerinaLexer(CharReader charReader) { super(charReader, ParserMode.DEFAULT); } /** * Get the next lexical token. * * @return Next lexical token. */ public STToken nextToken() { STToken token; switch (this.mode) { case TEMPLATE: this.leadingTriviaList = new ArrayList<>(0); token = readTemplateToken(); break; case INTERPOLATION: processLeadingTrivia(); token = readTokenInInterpolation(); break; case INTERPOLATION_BRACED_CONTENT: processLeadingTrivia(); token = readTokenInBracedContentInInterpolation(); break; case DOCUMENTATION: processLeadingTrivia(); token = readDocumentationToken(); break; case DOCUMENTATION_INTERNAL: processLeadingTrivia(); token = readDocumentationInternalToken(); break; case DOCUMENTATION_PARAMETER: processLeadingTrivia(); token = readDocumentationParameterToken(); break; case DOCUMENTATION_REFERENCE_TYPE: processLeadingTrivia(); token = readDocumentationReferenceTypeToken(); break; case DOCUMENTATION_BACKTICK_CONTENT: processLeadingTrivia(); token = readDocumentationBacktickContentToken(); break; case DEFAULT: case IMPORT: default: processLeadingTrivia(); token = readToken(); } return cloneWithDiagnostics(token); } private STToken nextTokenInternal() { switch (this.mode) { case TEMPLATE: return readTemplateToken(); case INTERPOLATION: return readTokenInInterpolation(); case INTERPOLATION_BRACED_CONTENT: return readTokenInBracedContentInInterpolation(); case DOCUMENTATION: return readDocumentationToken(); case DOCUMENTATION_INTERNAL: return readDocumentationInternalToken(); case DOCUMENTATION_PARAMETER: return readDocumentationParameterToken(); case DOCUMENTATION_REFERENCE_TYPE: return readDocumentationReferenceTypeToken(); case DOCUMENTATION_BACKTICK_CONTENT: return readDocumentationBacktickContentToken(); case DEFAULT: case IMPORT: default: return readToken(); } } /* * Private Methods */ private STToken readToken() { reader.mark(); if (reader.isEOF()) { return getSyntaxToken(SyntaxKind.EOF_TOKEN); } int c = reader.peek(); reader.advance(); STToken token; switch (c) { case LexerTerminals.COLON: token = getSyntaxToken(SyntaxKind.COLON_TOKEN); break; case LexerTerminals.SEMICOLON: token = getSyntaxToken(SyntaxKind.SEMICOLON_TOKEN); break; case LexerTerminals.DOT: token = processDot(); break; case LexerTerminals.COMMA: token = getSyntaxToken(SyntaxKind.COMMA_TOKEN); break; case LexerTerminals.OPEN_PARANTHESIS: token = getSyntaxToken(SyntaxKind.OPEN_PAREN_TOKEN); break; case LexerTerminals.CLOSE_PARANTHESIS: token = getSyntaxToken(SyntaxKind.CLOSE_PAREN_TOKEN); break; case LexerTerminals.OPEN_BRACE: if (peek() == LexerTerminals.PIPE) { reader.advance(); token = getSyntaxToken(SyntaxKind.OPEN_BRACE_PIPE_TOKEN); } else { token = getSyntaxToken(SyntaxKind.OPEN_BRACE_TOKEN); } break; case LexerTerminals.CLOSE_BRACE: token = getSyntaxToken(SyntaxKind.CLOSE_BRACE_TOKEN); break; case LexerTerminals.OPEN_BRACKET: token = getSyntaxToken(SyntaxKind.OPEN_BRACKET_TOKEN); break; case LexerTerminals.CLOSE_BRACKET: token = getSyntaxToken(SyntaxKind.CLOSE_BRACKET_TOKEN); break; case LexerTerminals.PIPE: token = processPipeOperator(); break; case LexerTerminals.QUESTION_MARK: if (peek() == LexerTerminals.DOT) { reader.advance(); token = getSyntaxToken(SyntaxKind.OPTIONAL_CHAINING_TOKEN); } else if (peek() == LexerTerminals.COLON) { reader.advance(); token = getSyntaxToken(SyntaxKind.ELVIS_TOKEN); } else { token = getSyntaxToken(SyntaxKind.QUESTION_MARK_TOKEN); } break; case LexerTerminals.DOUBLE_QUOTE: token = processStringLiteral(); break; case LexerTerminals.HASH: startMode(ParserMode.DOCUMENTATION); token = getDocumentationSyntaxToken(SyntaxKind.HASH_TOKEN); break; case LexerTerminals.AT: token = getSyntaxToken(SyntaxKind.AT_TOKEN); break; case LexerTerminals.EQUAL: token = processEqualOperator(); break; case LexerTerminals.PLUS: token = getSyntaxToken(SyntaxKind.PLUS_TOKEN); break; case LexerTerminals.MINUS: if (reader.peek() == LexerTerminals.GT) { reader.advance(); if (peek() == LexerTerminals.GT) { reader.advance(); token = getSyntaxToken(SyntaxKind.SYNC_SEND_TOKEN); } else { token = getSyntaxToken(SyntaxKind.RIGHT_ARROW_TOKEN); } } else { token = getSyntaxToken(SyntaxKind.MINUS_TOKEN); } break; case LexerTerminals.ASTERISK: token = getSyntaxToken(SyntaxKind.ASTERISK_TOKEN); break; case LexerTerminals.SLASH: token = processSlashToken(); break; case LexerTerminals.PERCENT: token = getSyntaxToken(SyntaxKind.PERCENT_TOKEN); break; case LexerTerminals.LT: int nextChar = peek(); if (nextChar == LexerTerminals.EQUAL) { reader.advance(); token = getSyntaxToken(SyntaxKind.LT_EQUAL_TOKEN); } else if (nextChar == LexerTerminals.MINUS) { reader.advance(); token = getSyntaxToken(SyntaxKind.LEFT_ARROW_TOKEN); } else if (nextChar == LexerTerminals.LT) { reader.advance(); token = getSyntaxToken(SyntaxKind.DOUBLE_LT_TOKEN); } else { token = getSyntaxToken(SyntaxKind.LT_TOKEN); } break; case LexerTerminals.GT: token = processTokenStartWithGt(); break; case LexerTerminals.EXCLAMATION_MARK: token = processExclamationMarkOperator(); break; case LexerTerminals.BITWISE_AND: if (peek() == LexerTerminals.BITWISE_AND) { reader.advance(); token = getSyntaxToken(SyntaxKind.LOGICAL_AND_TOKEN); } else { token = getSyntaxToken(SyntaxKind.BITWISE_AND_TOKEN); } break; case LexerTerminals.BITWISE_XOR: token = getSyntaxToken(SyntaxKind.BITWISE_XOR_TOKEN); break; case LexerTerminals.NEGATION: token = getSyntaxToken(SyntaxKind.NEGATION_TOKEN); break; case LexerTerminals.BACKTICK: startMode(ParserMode.TEMPLATE); token = getBacktickToken(); break; case LexerTerminals.SINGLE_QUOTE: token = processQuotedIdentifier(); break; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': token = processNumericLiteral(c); break; case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': case '_': token = processIdentifierOrKeyword(); break; default: processInvalidToken(); token = nextTokenInternal(); break; } return token; } private STToken getSyntaxToken(SyntaxKind kind) { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); STNode trailingTrivia = processTrailingTrivia(); return STNodeFactory.createToken(kind, leadingTrivia, trailingTrivia); } private STToken getDocumentationSyntaxToken(SyntaxKind kind) { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); STNode trailingTrivia = processTrailingTrivia(); int bucketCount = trailingTrivia.bucketCount(); if (bucketCount > 0 && trailingTrivia.childInBucket(bucketCount - 1).kind == SyntaxKind.END_OF_LINE_MINUTIAE) { endMode(); } return STNodeFactory.createToken(kind, leadingTrivia, trailingTrivia); } private STToken getIdentifierToken(String tokenText) { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); String lexeme = getLexeme(); STNode trailingTrivia = processTrailingTrivia(); return STNodeFactory.createIdentifierToken(lexeme, leadingTrivia, trailingTrivia); } private STToken getLiteral(SyntaxKind kind) { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); String lexeme = getLexeme(); STNode trailingTrivia = processTrailingTrivia(); return STNodeFactory.createLiteralValueToken(kind, lexeme, leadingTrivia, trailingTrivia); } private STToken getDocumentationLiteral(SyntaxKind kind) { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); String lexeme = getLexeme(); STNode trailingTrivia = processTrailingTrivia(); int bucketCount = trailingTrivia.bucketCount(); if (bucketCount > 0 && trailingTrivia.childInBucket(bucketCount - 1).kind == SyntaxKind.END_OF_LINE_MINUTIAE) { endMode(); } return STNodeFactory.createLiteralValueToken(kind, lexeme, leadingTrivia, trailingTrivia); } /** * Process leading trivia. */ private void processLeadingTrivia() { this.leadingTriviaList = new ArrayList<>(10); processSyntaxTrivia(this.leadingTriviaList, true); } /** * Process and return trailing trivia. * * @return Trailing trivia */ private STNode processTrailingTrivia() { List<STNode> triviaList = new ArrayList<>(10); processSyntaxTrivia(triviaList, false); return STNodeFactory.createNodeList(triviaList); } /** * Process syntax trivia and add it to the provided list. * <p> * <code>syntax-trivia := whitespace | end-of-line | comments</code> * * @param triviaList List of trivia * @param isLeading Flag indicating whether the currently processing leading trivia or not */ private void processSyntaxTrivia(List<STNode> triviaList, boolean isLeading) { while (!reader.isEOF()) { reader.mark(); char c = reader.peek(); switch (c) { case LexerTerminals.SPACE: case LexerTerminals.TAB: case LexerTerminals.FORM_FEED: triviaList.add(processWhitespaces()); break; case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.NEWLINE: triviaList.add(processEndOfLine()); if (isLeading) { break; } return; case LexerTerminals.SLASH: if (reader.peek(1) == LexerTerminals.SLASH) { triviaList.add(processComment()); break; } return; default: return; } } } /** * Process whitespace up to an end of line. * <p> * <code>whitespace := 0x9 | 0xC | 0x20</code> * * @return Whitespace trivia */ private STNode processWhitespaces() { while (!reader.isEOF()) { char c = reader.peek(); switch (c) { case LexerTerminals.SPACE: case LexerTerminals.TAB: case LexerTerminals.FORM_FEED: reader.advance(); continue; case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.NEWLINE: break; default: break; } break; } return STNodeFactory.createMinutiae(SyntaxKind.WHITESPACE_MINUTIAE, getLexeme()); } /** * Process end of line. * <p> * <code>end-of-line := 0xA | 0xD</code> * * @return End of line trivia */ private STNode processEndOfLine() { char c = reader.peek(); switch (c) { case LexerTerminals.NEWLINE: reader.advance(); return STNodeFactory.createMinutiae(SyntaxKind.END_OF_LINE_MINUTIAE, getLexeme()); case LexerTerminals.CARRIAGE_RETURN: reader.advance(); if (reader.peek() == LexerTerminals.NEWLINE) { reader.advance(); } return STNodeFactory.createMinutiae(SyntaxKind.END_OF_LINE_MINUTIAE, getLexeme()); default: throw new IllegalStateException(); } } /** * Process dot, ellipsis or decimal floating point token. * * @return Dot, ellipsis or decimal floating point token */ private STToken processDot() { int nextChar = reader.peek(); if (nextChar == LexerTerminals.DOT) { int nextNextChar = reader.peek(1); if (nextNextChar == LexerTerminals.DOT) { reader.advance(2); return getSyntaxToken(SyntaxKind.ELLIPSIS_TOKEN); } else if (nextNextChar == LexerTerminals.LT) { reader.advance(2); return getSyntaxToken(SyntaxKind.DOUBLE_DOT_LT_TOKEN); } } else if (nextChar == LexerTerminals.AT) { reader.advance(); return getSyntaxToken(SyntaxKind.ANNOT_CHAINING_TOKEN); } else if (nextChar == LexerTerminals.LT) { reader.advance(); return getSyntaxToken(SyntaxKind.DOT_LT_TOKEN); } if (this.mode != ParserMode.IMPORT && isDigit(nextChar)) { return processDecimalFloatLiteral(); } return getSyntaxToken(SyntaxKind.DOT_TOKEN); } /** * <p> * Process a comment, and add it to trivia list. * </p> * <code>Comment := * <br/><br/>AnyCharButNewline := ^ 0xA</code> */ private STNode processComment() { reader.advance(2); int nextToken = peek(); while (!reader.isEOF()) { switch (nextToken) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: break; default: reader.advance(); nextToken = peek(); continue; } break; } return STNodeFactory.createMinutiae(SyntaxKind.COMMENT_MINUTIAE, getLexeme()); } /** * Process any token that starts with '='. * * @return One of the tokens: <code>'=', '==', '=>', '==='</code> */ private STToken processEqualOperator() { switch (peek()) { case LexerTerminals.EQUAL: reader.advance(); if (peek() == LexerTerminals.EQUAL) { reader.advance(); return getSyntaxToken(SyntaxKind.TRIPPLE_EQUAL_TOKEN); } else { return getSyntaxToken(SyntaxKind.DOUBLE_EQUAL_TOKEN); } case LexerTerminals.GT: reader.advance(); return getSyntaxToken(SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN); default: return getSyntaxToken(SyntaxKind.EQUAL_TOKEN); } } /** * <p> * Process and returns a numeric literal. * </p> * <code> * numeric-literal := int-literal | floating-point-literal * <br/> * floating-point-literal := DecimalFloatingPointNumber | HexFloatingPointLiteral * <br/> * int-literal := DecimalNumber | HexIntLiteral * <br/> * DecimalNumber := 0 | NonZeroDigit Digit* * <br/> * Digit := 0 .. 9 * <br/> * NonZeroDigit := 1 .. 9 * </code> * * @return The numeric literal. */ private STToken processNumericLiteral(int startChar) { int nextChar = peek(); if (isHexIndicator(startChar, nextChar)) { return processHexLiteral(); } int len = 1; while (!reader.isEOF()) { switch (nextChar) { case LexerTerminals.DOT: case 'e': case 'E': case 'f': case 'F': case 'd': case 'D': if (reader.peek(1) == LexerTerminals.DOT) { break; } if (this.mode == ParserMode.IMPORT) { break; } if (startChar == '0' && len > 1) { reportLexerError(DiagnosticErrorCode.ERROR_LEADING_ZEROS_IN_NUMERIC_LITERALS); } return processDecimalFloatLiteral(); default: if (isDigit(nextChar)) { reader.advance(); len++; nextChar = peek(); continue; } break; } break; } if (startChar == '0' && len > 1) { reportLexerError(DiagnosticErrorCode.ERROR_LEADING_ZEROS_IN_NUMERIC_LITERALS); } return getLiteral(SyntaxKind.DECIMAL_INTEGER_LITERAL); } /** * <p> * Process and returns a decimal floating point literal. * </p> * <code> * DecimalFloatingPointNumber := * DecimalNumber Exponent [FloatingPointTypeSuffix] * | DottedDecimalNumber [Exponent] [FloatingPointTypeSuffix] * | DecimalNumber FloatingPointTypeSuffix * <br/> * DottedDecimalNumber := DecimalNumber . Digit* | . Digit+ * <br/> * FloatingPointTypeSuffix := DecimalTypeSuffix | FloatTypeSuffix * <br/> * DecimalTypeSuffix := d | D * <br/> * FloatTypeSuffix := f | F * </code> * * @return The decimal floating point literal. */ private STToken processDecimalFloatLiteral() { int nextChar = peek(); if (nextChar == LexerTerminals.DOT) { reader.advance(); nextChar = peek(); } while (isDigit(nextChar)) { reader.advance(); nextChar = peek(); } switch (nextChar) { case 'e': case 'E': return processExponent(false); case 'f': case 'F': case 'd': case 'D': return parseFloatingPointTypeSuffix(); } return getLiteral(SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL); } /** * <p> * Process an exponent or hex-exponent. * </p> * <code> * exponent := Exponent | HexExponent * <br/> * Exponent := ExponentIndicator [Sign] Digit+ * <br/> * HexExponent := HexExponentIndicator [Sign] Digit+ * <br/> * ExponentIndicator := e | E * <br/> * HexExponentIndicator := p | P * <br/> * Sign := + | - * <br/> * Digit := 0 .. 9 * </code> * * @param isHex HexExponent or not * @return The decimal floating point literal. */ private STToken processExponent(boolean isHex) { reader.advance(); int nextChar = peek(); if (nextChar == LexerTerminals.PLUS || nextChar == LexerTerminals.MINUS) { reader.advance(); nextChar = peek(); } if (!isDigit(nextChar)) { reportLexerError(DiagnosticErrorCode.ERROR_MISSING_DIGIT_AFTER_EXPONENT_INDICATOR); } while (isDigit(nextChar)) { reader.advance(); nextChar = peek(); } if (isHex) { return getLiteral(SyntaxKind.HEX_FLOATING_POINT_LITERAL); } switch (nextChar) { case 'f': case 'F': case 'd': case 'D': return parseFloatingPointTypeSuffix(); } return getLiteral(SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL); } /** * <p> * Parse floating point type suffix. * </p> * <code> * FloatingPointTypeSuffix := DecimalTypeSuffix | FloatTypeSuffix * <br/> * DecimalTypeSuffix := d | D * <br/> * FloatTypeSuffix := f | F * </code> * * @return The decimal floating point literal. */ private STToken parseFloatingPointTypeSuffix() { reader.advance(); return getLiteral(SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL); } /** * <p> * Process and returns a hex literal. * </p> * <code> * hex-literal := HexIntLiteral | HexFloatingPointLiteral * <br/> * HexIntLiteral := HexIndicator HexNumber * <br/> * HexNumber := HexDigit+ * <br/> * HexIndicator := 0x | 0X * <br/> * HexDigit := Digit | a .. f | A .. F * <br/> * HexFloatingPointLiteral := HexIndicator HexFloatingPointNumber * <br/> * HexFloatingPointNumber := HexNumber HexExponent | DottedHexNumber [HexExponent] * <br/> * DottedHexNumber := HexDigit+ . HexDigit* | . HexDigit+ * </code> * * @return The hex literal. */ private STToken processHexLiteral() { reader.advance(); if (peek() == LexerTerminals.DOT && !isHexDigit(reader.peek(1))) { reportLexerError(DiagnosticErrorCode.ERROR_MISSING_HEX_DIGIT_AFTER_DOT); } int nextChar; while (isHexDigit(peek())) { reader.advance(); } nextChar = peek(); switch (nextChar) { case LexerTerminals.DOT: reader.advance(); nextChar = peek(); while (isHexDigit(nextChar)) { reader.advance(); nextChar = peek(); } switch (nextChar) { case 'p': case 'P': return processExponent(true); } break; case 'p': case 'P': return processExponent(true); default: return getLiteral(SyntaxKind.HEX_INTEGER_LITERAL); } return getLiteral(SyntaxKind.HEX_FLOATING_POINT_LITERAL); } /** * Process and returns an identifier or a keyword. * * @return An identifier or a keyword. */ private STToken processIdentifierOrKeyword() { while (isIdentifierFollowingChar(peek())) { reader.advance(); } String tokenText = getLexeme(); switch (tokenText) { case LexerTerminals.INT: return getSyntaxToken(SyntaxKind.INT_KEYWORD); case LexerTerminals.FLOAT: return getSyntaxToken(SyntaxKind.FLOAT_KEYWORD); case LexerTerminals.STRING: return getSyntaxToken(SyntaxKind.STRING_KEYWORD); case LexerTerminals.BOOLEAN: return getSyntaxToken(SyntaxKind.BOOLEAN_KEYWORD); case LexerTerminals.DECIMAL: return getSyntaxToken(SyntaxKind.DECIMAL_KEYWORD); case LexerTerminals.XML: return getSyntaxToken(SyntaxKind.XML_KEYWORD); case LexerTerminals.JSON: return getSyntaxToken(SyntaxKind.JSON_KEYWORD); case LexerTerminals.HANDLE: return getSyntaxToken(SyntaxKind.HANDLE_KEYWORD); case LexerTerminals.ANY: return getSyntaxToken(SyntaxKind.ANY_KEYWORD); case LexerTerminals.ANYDATA: return getSyntaxToken(SyntaxKind.ANYDATA_KEYWORD); case LexerTerminals.NEVER: return getSyntaxToken(SyntaxKind.NEVER_KEYWORD); case LexerTerminals.BYTE: return getSyntaxToken(SyntaxKind.BYTE_KEYWORD); case LexerTerminals.PUBLIC: return getSyntaxToken(SyntaxKind.PUBLIC_KEYWORD); case LexerTerminals.PRIVATE: return getSyntaxToken(SyntaxKind.PRIVATE_KEYWORD); case LexerTerminals.FUNCTION: return getSyntaxToken(SyntaxKind.FUNCTION_KEYWORD); case LexerTerminals.RETURN: return getSyntaxToken(SyntaxKind.RETURN_KEYWORD); case LexerTerminals.RETURNS: return getSyntaxToken(SyntaxKind.RETURNS_KEYWORD); case LexerTerminals.EXTERNAL: return getSyntaxToken(SyntaxKind.EXTERNAL_KEYWORD); case LexerTerminals.TYPE: return getSyntaxToken(SyntaxKind.TYPE_KEYWORD); case LexerTerminals.RECORD: return getSyntaxToken(SyntaxKind.RECORD_KEYWORD); case LexerTerminals.OBJECT: return getSyntaxToken(SyntaxKind.OBJECT_KEYWORD); case LexerTerminals.REMOTE: return getSyntaxToken(SyntaxKind.REMOTE_KEYWORD); case LexerTerminals.ABSTRACT: return getSyntaxToken(SyntaxKind.ABSTRACT_KEYWORD); case LexerTerminals.CLIENT: return getSyntaxToken(SyntaxKind.CLIENT_KEYWORD); case LexerTerminals.IF: return getSyntaxToken(SyntaxKind.IF_KEYWORD); case LexerTerminals.ELSE: return getSyntaxToken(SyntaxKind.ELSE_KEYWORD); case LexerTerminals.WHILE: return getSyntaxToken(SyntaxKind.WHILE_KEYWORD); case LexerTerminals.TRUE: return getSyntaxToken(SyntaxKind.TRUE_KEYWORD); case LexerTerminals.FALSE: return getSyntaxToken(SyntaxKind.FALSE_KEYWORD); case LexerTerminals.CHECK: return getSyntaxToken(SyntaxKind.CHECK_KEYWORD); case LexerTerminals.CHECKPANIC: return getSyntaxToken(SyntaxKind.CHECKPANIC_KEYWORD); case LexerTerminals.CONTINUE: return getSyntaxToken(SyntaxKind.CONTINUE_KEYWORD); case LexerTerminals.BREAK: return getSyntaxToken(SyntaxKind.BREAK_KEYWORD); case LexerTerminals.PANIC: return getSyntaxToken(SyntaxKind.PANIC_KEYWORD); case LexerTerminals.IMPORT: return getSyntaxToken(SyntaxKind.IMPORT_KEYWORD); case LexerTerminals.VERSION: return getSyntaxToken(SyntaxKind.VERSION_KEYWORD); case LexerTerminals.AS: return getSyntaxToken(SyntaxKind.AS_KEYWORD); case LexerTerminals.SERVICE: return getSyntaxToken(SyntaxKind.SERVICE_KEYWORD); case LexerTerminals.ON: return getSyntaxToken(SyntaxKind.ON_KEYWORD); case LexerTerminals.RESOURCE: return getSyntaxToken(SyntaxKind.RESOURCE_KEYWORD); case LexerTerminals.LISTENER: return getSyntaxToken(SyntaxKind.LISTENER_KEYWORD); case LexerTerminals.CONST: return getSyntaxToken(SyntaxKind.CONST_KEYWORD); case LexerTerminals.FINAL: return getSyntaxToken(SyntaxKind.FINAL_KEYWORD); case LexerTerminals.TYPEOF: return getSyntaxToken(SyntaxKind.TYPEOF_KEYWORD); case LexerTerminals.IS: return getSyntaxToken(SyntaxKind.IS_KEYWORD); case LexerTerminals.NULL: return getSyntaxToken(SyntaxKind.NULL_KEYWORD); case LexerTerminals.LOCK: return getSyntaxToken(SyntaxKind.LOCK_KEYWORD); case LexerTerminals.ANNOTATION: return getSyntaxToken(SyntaxKind.ANNOTATION_KEYWORD); case LexerTerminals.SOURCE: return getSyntaxToken(SyntaxKind.SOURCE_KEYWORD); case LexerTerminals.VAR: return getSyntaxToken(SyntaxKind.VAR_KEYWORD); case LexerTerminals.WORKER: return getSyntaxToken(SyntaxKind.WORKER_KEYWORD); case LexerTerminals.PARAMETER: return getSyntaxToken(SyntaxKind.PARAMETER_KEYWORD); case LexerTerminals.FIELD: return getSyntaxToken(SyntaxKind.FIELD_KEYWORD); case LexerTerminals.XMLNS: return getSyntaxToken(SyntaxKind.XMLNS_KEYWORD); case LexerTerminals.FORK: return getSyntaxToken(SyntaxKind.FORK_KEYWORD); case LexerTerminals.MAP: return getSyntaxToken(SyntaxKind.MAP_KEYWORD); case LexerTerminals.FUTURE: return getSyntaxToken(SyntaxKind.FUTURE_KEYWORD); case LexerTerminals.TYPEDESC: return getSyntaxToken(SyntaxKind.TYPEDESC_KEYWORD); case LexerTerminals.TRAP: return getSyntaxToken(SyntaxKind.TRAP_KEYWORD); case LexerTerminals.IN: return getSyntaxToken(SyntaxKind.IN_KEYWORD); case LexerTerminals.FOREACH: return getSyntaxToken(SyntaxKind.FOREACH_KEYWORD); case LexerTerminals.TABLE: return getSyntaxToken(SyntaxKind.TABLE_KEYWORD); case LexerTerminals.ERROR: return getSyntaxToken(SyntaxKind.ERROR_KEYWORD); case LexerTerminals.LET: return getSyntaxToken(SyntaxKind.LET_KEYWORD); case LexerTerminals.STREAM: return getSyntaxToken(SyntaxKind.STREAM_KEYWORD); case LexerTerminals.NEW: return getSyntaxToken(SyntaxKind.NEW_KEYWORD); case LexerTerminals.READONLY: return getSyntaxToken(SyntaxKind.READONLY_KEYWORD); case LexerTerminals.DISTINCT: return getSyntaxToken(SyntaxKind.DISTINCT_KEYWORD); case LexerTerminals.FROM: return getSyntaxToken(SyntaxKind.FROM_KEYWORD); case LexerTerminals.WHERE: return getSyntaxToken(SyntaxKind.WHERE_KEYWORD); case LexerTerminals.SELECT: return getSyntaxToken(SyntaxKind.SELECT_KEYWORD); case LexerTerminals.START: return getSyntaxToken(SyntaxKind.START_KEYWORD); case LexerTerminals.FLUSH: return getSyntaxToken(SyntaxKind.FLUSH_KEYWORD); case LexerTerminals.DEFAULT: return getSyntaxToken(SyntaxKind.DEFAULT_KEYWORD); case LexerTerminals.WAIT: return getSyntaxToken(SyntaxKind.WAIT_KEYWORD); case LexerTerminals.DO: return getSyntaxToken(SyntaxKind.DO_KEYWORD); case LexerTerminals.TRANSACTION: return getSyntaxToken(SyntaxKind.TRANSACTION_KEYWORD); case LexerTerminals.COMMIT: return getSyntaxToken(SyntaxKind.COMMIT_KEYWORD); case LexerTerminals.RETRY: return getSyntaxToken(SyntaxKind.RETRY_KEYWORD); case LexerTerminals.ROLLBACK: return getSyntaxToken(SyntaxKind.ROLLBACK_KEYWORD); case LexerTerminals.TRANSACTIONAL: return getSyntaxToken(SyntaxKind.TRANSACTIONAL_KEYWORD); case LexerTerminals.ENUM: return getSyntaxToken(SyntaxKind.ENUM_KEYWORD); case LexerTerminals.BASE16: return getSyntaxToken(SyntaxKind.BASE16_KEYWORD); case LexerTerminals.BASE64: return getSyntaxToken(SyntaxKind.BASE64_KEYWORD); case LexerTerminals.MATCH: return getSyntaxToken(SyntaxKind.MATCH_KEYWORD); case LexerTerminals.CONFLICT: return getSyntaxToken(SyntaxKind.CONFLICT_KEYWORD); case LexerTerminals.LIMIT: return getSyntaxToken(SyntaxKind.LIMIT_KEYWORD); case LexerTerminals.JOIN: return getSyntaxToken(SyntaxKind.JOIN_KEYWORD); case LexerTerminals.OUTER: return getSyntaxToken(SyntaxKind.OUTER_KEYWORD); case LexerTerminals.EQUALS: return getSyntaxToken(SyntaxKind.EQUALS_KEYWORD); default: return getIdentifierToken(tokenText); } } /** * Process and returns an invalid token. Consumes the input until {@link * is reached. */ private void processInvalidToken() { while (!isEndOfInvalidToken()) { reader.advance(); } String tokenText = getLexeme(); STNode invalidToken = STNodeFactory.createInvalidToken(tokenText); STNode invalidNodeMinutiae = STNodeFactory.createInvalidNodeMinutiae(invalidToken); this.leadingTriviaList.add(invalidNodeMinutiae); } /** * Check whether the current index is pointing to an end of an invalid lexer-token. * An invalid token is considered to end if one of the below is reached: * <ul> * <li>a whitespace</li> * <li>semicolon</li> * <li>newline</li> * </ul> * * @return <code>true</code>, if the end of an invalid token is reached, <code>false</code> otherwise */ private boolean isEndOfInvalidToken() { if (reader.isEOF()) { return true; } int currentChar = peek(); switch (currentChar) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: case LexerTerminals.SPACE: case LexerTerminals.TAB: case LexerTerminals.SEMICOLON: case LexerTerminals.OPEN_BRACE: case LexerTerminals.CLOSE_BRACE: case LexerTerminals.OPEN_BRACKET: case LexerTerminals.CLOSE_BRACKET: case LexerTerminals.OPEN_PARANTHESIS: case LexerTerminals.CLOSE_PARANTHESIS: return true; default: return false; } } /** * <p> * Check whether a given char is an identifier start char. * </p> * <code>IdentifierInitialChar := A .. Z | a .. z | _ | UnicodeIdentifierChar</code> * * @param c character to check * @return <code>true</code>, if the character is an identifier start char. <code>false</code> otherwise. */ private boolean isIdentifierInitialChar(int c) { if ('A' <= c && c <= 'Z') { return true; } if ('a' <= c && c <= 'z') { return true; } if (c == '_') { return true; } return false; } /** * <p> * Check whether a given char is an identifier following char. * </p> * <code>IdentifierFollowingChar := IdentifierInitialChar | Digit</code> * * @param c character to check * @return <code>true</code>, if the character is an identifier following char. <code>false</code> otherwise. */ private boolean isIdentifierFollowingChar(int c) { return isIdentifierInitialChar(c) || isDigit(c); } /** * <p> * Check whether a given char is a digit. * </p> * <code>Digit := 0..9</code> * * @param c character to check * @return <code>true</code>, if the character represents a digit. <code>false</code> otherwise. */ static boolean isDigit(int c) { return ('0' <= c && c <= '9'); } /** * <p> * Check whether a given char is a hexa digit. * </p> * <code>HexDigit := Digit | a .. f | A .. F</code> * * @param c character to check * @return <code>true</code>, if the character represents a hex digit. <code>false</code> otherwise. */ static boolean isHexDigit(int c) { if ('a' <= c && c <= 'f') { return true; } if ('A' <= c && c <= 'F') { return true; } return isDigit(c); } /** * <p> * Check whether current input index points to a start of a hex-numeric literal. * </p> * <code>HexIndicator := 0x | 0X</code> * * @param startChar Starting character of the literal * @param nextChar Second character of the literal * @return <code>true</code>, if the current input points to a start of a hex-numeric literal. * <code>false</code> otherwise. */ private boolean isHexIndicator(int startChar, int nextChar) { return startChar == '0' && (nextChar == 'x' || nextChar == 'X'); } /** * Returns the next character from the reader, without consuming the stream. * * @return Next character */ private int peek() { return this.reader.peek(); } /** * Get the text associated with the current token. * * @return Text associated with the current token. */ private String getLexeme() { return reader.getMarkedChars(); } /** * Process and return double-quoted string literal. * <p> * <code>string-literal := DoubleQuotedStringLiteral * <br/> * DoubleQuotedStringLiteral := " (StringChar | StringEscape)* " * <br/> * StringChar := ^ ( 0xA | 0xD | \ | " ) * <br/> * StringEscape := StringSingleEscape | StringNumericEscape * <br/> * StringSingleEscape := \t | \n | \r | \\ | \" * <br/> * StringNumericEscape := \ u{ CodePoint } * <br/> * CodePoint := HexDigit+ * </code> * * @return String literal token */ private STToken processStringLiteral() { int nextChar; while (!reader.isEOF()) { nextChar = peek(); switch (nextChar) { case LexerTerminals.NEWLINE: case LexerTerminals.CARRIAGE_RETURN: reportLexerError(DiagnosticErrorCode.ERROR_MISSING_DOUBLE_QUOTE); break; case LexerTerminals.DOUBLE_QUOTE: this.reader.advance(); break; case LexerTerminals.BACKSLASH: switch (this.reader.peek(1)) { case 'n': case 't': case 'r': case LexerTerminals.BACKSLASH: case LexerTerminals.DOUBLE_QUOTE: this.reader.advance(2); continue; case 'u': if (this.reader.peek(2) == LexerTerminals.OPEN_BRACE) { processStringNumericEscape(); } else { reportLexerError(DiagnosticErrorCode.ERROR_INVALID_STRING_NUMERIC_ESCAPE_SEQUENCE); this.reader.advance(2); } continue; default: reportLexerError(DiagnosticErrorCode.ERROR_INVALID_ESCAPE_SEQUENCE); this.reader.advance(); continue; } default: this.reader.advance(); continue; } break; } return getLiteral(SyntaxKind.STRING_LITERAL); } /** * Process string numeric escape. * <p> * <code>StringNumericEscape := \ u { CodePoint }</code> */ private void processStringNumericEscape() { this.reader.advance(3); if (!isHexDigit(peek())) { reportLexerError(DiagnosticErrorCode.ERROR_INVALID_STRING_NUMERIC_ESCAPE_SEQUENCE); return; } reader.advance(); while (isHexDigit(peek())) { reader.advance(); } if (peek() != LexerTerminals.CLOSE_BRACE) { reportLexerError(DiagnosticErrorCode.ERROR_INVALID_STRING_NUMERIC_ESCAPE_SEQUENCE); return; } this.reader.advance(); } /** * Process any token that starts with '!'. * * @return One of the tokens: <code>'!', '!=', '!=='</code> */ private STToken processExclamationMarkOperator() { switch (peek()) { case LexerTerminals.EQUAL: reader.advance(); if (peek() == LexerTerminals.EQUAL) { reader.advance(); return getSyntaxToken(SyntaxKind.NOT_DOUBLE_EQUAL_TOKEN); } else { return getSyntaxToken(SyntaxKind.NOT_EQUAL_TOKEN); } default: return getSyntaxToken(SyntaxKind.EXCLAMATION_MARK_TOKEN); } } /** * Process any token that starts with '|'. * * @return One of the tokens: <code>'|', '|}', '||'</code> */ private STToken processPipeOperator() { switch (peek()) { case LexerTerminals.CLOSE_BRACE: reader.advance(); return getSyntaxToken(SyntaxKind.CLOSE_BRACE_PIPE_TOKEN); case LexerTerminals.PIPE: reader.advance(); return getSyntaxToken(SyntaxKind.LOGICAL_OR_TOKEN); default: return getSyntaxToken(SyntaxKind.PIPE_TOKEN); } } /** * Process any token that starts with '/'. * * @return One of the tokens: <code>'/', '/<', '/*', '/**\/<' </code> */ private STToken processSlashToken() { switch (peek()) { case LexerTerminals.LT: reader.advance(); return getSyntaxToken(SyntaxKind.SLASH_LT_TOKEN); case LexerTerminals.ASTERISK: reader.advance(); if (peek() != LexerTerminals.ASTERISK) { return getSyntaxToken(SyntaxKind.SLASH_ASTERISK_TOKEN); } else if (reader.peek(1) == LexerTerminals.SLASH && reader.peek(2) == LexerTerminals.LT) { reader.advance(3); return getSyntaxToken(SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN); } else { return getSyntaxToken(SyntaxKind.SLASH_ASTERISK_TOKEN); } default: return getSyntaxToken(SyntaxKind.SLASH_TOKEN); } } private STToken getBacktickToken() { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); STNode trailingTrivia = STNodeFactory.createEmptyNodeList(); return STNodeFactory.createToken(SyntaxKind.BACKTICK_TOKEN, leadingTrivia, trailingTrivia); } private STToken readTemplateToken() { reader.mark(); if (reader.isEOF()) { return getSyntaxToken(SyntaxKind.EOF_TOKEN); } char nextChar = this.reader.peek(); switch (nextChar) { case LexerTerminals.BACKTICK: reader.advance(); endMode(); return getSyntaxToken(SyntaxKind.BACKTICK_TOKEN); case LexerTerminals.DOLLAR: if (reader.peek(1) == LexerTerminals.OPEN_BRACE) { startMode(ParserMode.INTERPOLATION); reader.advance(2); return getSyntaxToken(SyntaxKind.INTERPOLATION_START_TOKEN); } default: while (!reader.isEOF()) { nextChar = this.reader.peek(); switch (nextChar) { case LexerTerminals.DOLLAR: if (this.reader.peek(1) == LexerTerminals.OPEN_BRACE) { break; } reader.advance(); continue; case LexerTerminals.BACKTICK: break; default: reader.advance(); continue; } break; } } return getTemplateString(SyntaxKind.TEMPLATE_STRING); } private STToken getTemplateString(SyntaxKind kind) { STNode leadingTrivia = STNodeFactory.createNodeList(this.leadingTriviaList); String lexeme = getLexeme(); STNode trailingTrivia = processTrailingTrivia(); return STNodeFactory.createLiteralValueToken(kind, lexeme, leadingTrivia, trailingTrivia); } /** * Process quoted Identifier token. * * <code> * QuotedIdentifierChar := IdentifierFollowingChar | QuotedIdentifierEscape | StringNumericEscape * </code> * * @return Quoted identifier token */
> Previously desiredState = "FAILED" and newState = "CANCELLING" would return true. Now they return false. That's intended. As a side-effect it basically solves the failing cancel problem we've discussed previously. > How about, we use this: TBH I find these early returns super hard to read. Usually I'd try to avoid them unless there the nesting would be too deep / we have a really long method that can't be easily broken into smaller fragment.s
boolean updateTaskExecutionState(TaskExecutionStateTransition taskExecutionStateTransition) { if (taskExecutionStateTransition.getExecutionState() != ExecutionState.FAILED) { return getExecutionGraph().updateState(taskExecutionStateTransition); } Optional<ExecutionVertexID> idOpt = executionGraph.findExecutionVertexId(taskExecutionStateTransition.getID()); final boolean successfulUpdate = getExecutionGraph().updateState(taskExecutionStateTransition); if (!successfulUpdate) { return false; } Throwable cause = extractErrorOrUseDefault(taskExecutionStateTransition); checkState(idOpt.isPresent()); ExecutionVertexID id = idOpt.get(); if (getNonEmptyExecution(id).getFailureInfo().isPresent()) { failureCollection.add(new LocalFailure(cause, id)); } onFailure(cause); return true; }
return true;
boolean updateTaskExecutionState(TaskExecutionStateTransition taskExecutionStateTransition) { final Optional<AccessExecution> maybeExecution = executionGraph.findExecution(taskExecutionStateTransition.getID()); final Optional<String> maybeTaskName = executionGraph.findVertexWithAttempt(taskExecutionStateTransition.getID()); final ExecutionState desiredState = taskExecutionStateTransition.getExecutionState(); boolean successfulUpdate = getExecutionGraph().updateState(taskExecutionStateTransition); if (successfulUpdate && desiredState == ExecutionState.FAILED) { final AccessExecution execution = maybeExecution.orElseThrow(NoSuchElementException::new); final String taskName = maybeTaskName.orElseThrow(NoSuchElementException::new); final ExecutionState currentState = execution.getState(); if (currentState == desiredState) { failureCollection.add(ExceptionHistoryEntry.create(execution, taskName)); onFailure( ErrorInfo.handleMissingThrowable( taskExecutionStateTransition.getError(userCodeClassLoader))); } } return successfulUpdate; }
class StateWithExecutionGraph implements State { private final Context context; private final ExecutionGraph executionGraph; private final ExecutionGraphHandler executionGraphHandler; private final OperatorCoordinatorHandler operatorCoordinatorHandler; private final KvStateHandler kvStateHandler; private final Logger logger; private final ClassLoader userCodeClassLoader; private final List<Failure> failureCollection; StateWithExecutionGraph( Context context, ExecutionGraph executionGraph, ExecutionGraphHandler executionGraphHandler, OperatorCoordinatorHandler operatorCoordinatorHandler, Logger logger, ClassLoader userClassCodeLoader, List<Failure> failureCollection) { this.context = context; this.executionGraph = executionGraph; this.executionGraphHandler = executionGraphHandler; this.operatorCoordinatorHandler = operatorCoordinatorHandler; this.kvStateHandler = new KvStateHandler(executionGraph); this.logger = logger; this.userCodeClassLoader = userClassCodeLoader; this.failureCollection = failureCollection; FutureUtils.assertNoException( executionGraph .getTerminationFuture() .thenAcceptAsync( jobStatus -> { if (jobStatus.isGloballyTerminalState()) { context.runIfState( this, () -> { convertFailures(failureCollection) .ifPresent(context::archiveFailure); onGloballyTerminalState(jobStatus); }); } }, context.getMainThreadExecutor())); } @VisibleForTesting ExecutionGraph getExecutionGraph() { return executionGraph; } JobID getJobId() { return executionGraph.getJobID(); } protected OperatorCoordinatorHandler getOperatorCoordinatorHandler() { return operatorCoordinatorHandler; } protected ExecutionGraphHandler getExecutionGraphHandler() { return executionGraphHandler; } @Override public void onLeave(Class<? extends State> newState) { if (!StateWithExecutionGraph.class.isAssignableFrom(newState)) { operatorCoordinatorHandler.disposeAllOperatorCoordinators(); } } @Override public ArchivedExecutionGraph getJob() { return ArchivedExecutionGraph.createFrom(executionGraph, getJobStatus()); } @Override public void suspend(Throwable cause) { executionGraph.suspend(cause); Preconditions.checkState(executionGraph.getState().isTerminalState()); context.goToFinished(ArchivedExecutionGraph.createFrom(executionGraph)); } @Override public Logger getLogger() { return logger; } void notifyPartitionDataAvailable(ResultPartitionID partitionID) { executionGraph.notifyPartitionDataAvailable(partitionID); } SerializedInputSplit requestNextInputSplit( JobVertexID vertexID, ExecutionAttemptID executionAttempt) throws IOException { return executionGraphHandler.requestNextInputSplit(vertexID, executionAttempt); } ExecutionState requestPartitionState( IntermediateDataSetID intermediateResultId, ResultPartitionID resultPartitionId) throws PartitionProducerDisposedException { return executionGraphHandler.requestPartitionState(intermediateResultId, resultPartitionId); } void acknowledgeCheckpoint( JobID jobID, ExecutionAttemptID executionAttemptID, long checkpointId, CheckpointMetrics checkpointMetrics, TaskStateSnapshot checkpointState) { executionGraphHandler.acknowledgeCheckpoint( jobID, executionAttemptID, checkpointId, checkpointMetrics, checkpointState); } void declineCheckpoint(DeclineCheckpoint decline) { executionGraphHandler.declineCheckpoint(decline); } void reportCheckpointMetrics( ExecutionAttemptID executionAttemptID, long checkpointId, CheckpointMetrics checkpointMetrics) { executionGraphHandler.reportCheckpointMetrics( executionAttemptID, checkpointId, checkpointMetrics); } void updateAccumulators(AccumulatorSnapshot accumulatorSnapshot) { executionGraph.updateAccumulators(accumulatorSnapshot); } KvStateLocation requestKvStateLocation(JobID jobId, String registrationName) throws FlinkJobNotFoundException, UnknownKvStateLocation { return kvStateHandler.requestKvStateLocation(jobId, registrationName); } void notifyKvStateRegistered( JobID jobId, JobVertexID jobVertexId, KeyGroupRange keyGroupRange, String registrationName, KvStateID kvStateId, InetSocketAddress kvStateServerAddress) throws FlinkJobNotFoundException { kvStateHandler.notifyKvStateRegistered( jobId, jobVertexId, keyGroupRange, registrationName, kvStateId, kvStateServerAddress); } void notifyKvStateUnregistered( JobID jobId, JobVertexID jobVertexId, KeyGroupRange keyGroupRange, String registrationName) throws FlinkJobNotFoundException { kvStateHandler.notifyKvStateUnregistered( jobId, jobVertexId, keyGroupRange, registrationName); } CompletableFuture<String> triggerSavepoint( String targetDirectory, boolean cancelJob, SavepointFormatType formatType) { final CheckpointCoordinator checkpointCoordinator = executionGraph.getCheckpointCoordinator(); StopWithSavepointTerminationManager.checkSavepointActionPreconditions( checkpointCoordinator, targetDirectory, getJobId(), logger); logger.info( "Triggering {}savepoint for job {}.", cancelJob ? "cancel-with-" : "", executionGraph.getJobID()); if (cancelJob) { checkpointCoordinator.stopCheckpointScheduler(); } return checkpointCoordinator .triggerSavepoint(targetDirectory, formatType) .thenApply(CompletedCheckpoint::getExternalPointer) .handleAsync( (path, throwable) -> { if (throwable != null) { if (cancelJob && context.isState(this)) { startCheckpointScheduler(checkpointCoordinator); } throw new CompletionException(throwable); } else if (cancelJob && context.isState(this)) { logger.info( "Savepoint stored in {}. Now cancelling {}.", path, executionGraph.getJobID()); cancel(); } return path; }, context.getMainThreadExecutor()); } CompletableFuture<String> triggerCheckpoint() { final CheckpointCoordinator checkpointCoordinator = executionGraph.getCheckpointCoordinator(); final JobID jobID = executionGraph.getJobID(); if (checkpointCoordinator == null) { throw new IllegalStateException(String.format("Job %s is not a streaming job.", jobID)); } logger.info("Triggering a checkpoint for job {}.", jobID); return checkpointCoordinator .triggerCheckpoint(false) .thenApply(CompletedCheckpoint::getExternalPointer) .handleAsync( (path, throwable) -> { if (throwable != null) { throw new CompletionException(throwable); } return path; }, context.getMainThreadExecutor()); } private void startCheckpointScheduler(final CheckpointCoordinator checkpointCoordinator) { if (checkpointCoordinator.isPeriodicCheckpointingConfigured()) { try { checkpointCoordinator.startCheckpointScheduler(); } catch (IllegalStateException ignored) { } } } void deliverOperatorEventToCoordinator( ExecutionAttemptID taskExecutionId, OperatorID operatorId, OperatorEvent evt) throws FlinkException { operatorCoordinatorHandler.deliverOperatorEventToCoordinator( taskExecutionId, operatorId, evt); } CompletableFuture<CoordinationResponse> deliverCoordinationRequestToCoordinator( OperatorID operatorId, CoordinationRequest request) throws FlinkException { return operatorCoordinatorHandler.deliverCoordinationRequestToCoordinator( operatorId, request); } /** Transition to different state when failure occurs. Stays in the same state by default. */ abstract void onFailure(Throwable cause); /** * Transition to different state when the execution graph reaches a globally terminal state. * * @param globallyTerminalState globally terminal state which the execution graph reached */ abstract void onGloballyTerminalState(JobStatus globallyTerminalState); @Override public void handleGlobalFailure(Throwable cause) { failureCollection.add(new GlobalFailure(cause)); onFailure(cause); } /** * Updates the execution graph with the given task execution state transition. * * @param taskExecutionStateTransition taskExecutionStateTransition to update the ExecutionGraph * with * @return {@code true} if the update was successful; otherwise {@code false} */ private Throwable extractErrorOrUseDefault( TaskExecutionStateTransition taskExecutionStateTransition) { Throwable cause = taskExecutionStateTransition.getError(userCodeClassLoader); if (cause == null) { cause = new FlinkException("Unknown failure cause. Probably related to FLINK-21376."); } return cause; } private Execution getNonEmptyExecution(ExecutionVertexID id) { Optional<Execution> execution = executionGraph.findExecution(id); checkState(execution.isPresent()); return execution.get(); } List<Failure> getFailures() { return failureCollection; } private Optional<RootExceptionHistoryEntry> convertFailures(List<Failure> failureCollection) { if (failureCollection.isEmpty()) { return Optional.empty(); } Failure first = failureCollection.remove(0); List<ExceptionHistoryEntry> entries = failureCollection.stream() .map(failure -> failure.toExceptionHistoryEntry(this::getNonEmptyExecution)) .collect(Collectors.toList()); return Optional.of(first.toRootExceptionHistoryEntry(this::getNonEmptyExecution, entries)); } /** Context of the {@link StateWithExecutionGraph} state. */ interface Context extends StateTransitions.ToFinished { /** * Run the given action if the current state equals the expected state. * * @param expectedState expectedState is the expected state * @param action action to run if the current state equals the expected state */ void runIfState(State expectedState, Runnable action); /** * Checks whether the current state is the expected state. * * @param expectedState expectedState is the expected state * @return {@code true} if the current state equals the expected state; otherwise {@code * false} */ boolean isState(State expectedState); /** * Gets the main thread executor. * * @return the main thread executor */ Executor getMainThreadExecutor(); /** Archive failure. */ void archiveFailure(RootExceptionHistoryEntry failure); } }
class StateWithExecutionGraph implements State { private final Context context; private final ExecutionGraph executionGraph; private final ExecutionGraphHandler executionGraphHandler; private final OperatorCoordinatorHandler operatorCoordinatorHandler; private final KvStateHandler kvStateHandler; private final Logger logger; private final ClassLoader userCodeClassLoader; private final List<ExceptionHistoryEntry> failureCollection; StateWithExecutionGraph( Context context, ExecutionGraph executionGraph, ExecutionGraphHandler executionGraphHandler, OperatorCoordinatorHandler operatorCoordinatorHandler, Logger logger, ClassLoader userClassCodeLoader, List<ExceptionHistoryEntry> failureCollection) { this.context = context; this.executionGraph = executionGraph; this.executionGraphHandler = executionGraphHandler; this.operatorCoordinatorHandler = operatorCoordinatorHandler; this.kvStateHandler = new KvStateHandler(executionGraph); this.logger = logger; this.userCodeClassLoader = userClassCodeLoader; this.failureCollection = new ArrayList<>(failureCollection); FutureUtils.assertNoException( executionGraph .getTerminationFuture() .thenAcceptAsync( jobStatus -> { if (jobStatus.isGloballyTerminalState()) { context.runIfState( this, () -> { convertFailures(this.failureCollection) .ifPresent(context::archiveFailure); onGloballyTerminalState(jobStatus); }); } }, context.getMainThreadExecutor())); } ExecutionGraph getExecutionGraph() { return executionGraph; } JobID getJobId() { return executionGraph.getJobID(); } protected OperatorCoordinatorHandler getOperatorCoordinatorHandler() { return operatorCoordinatorHandler; } protected ExecutionGraphHandler getExecutionGraphHandler() { return executionGraphHandler; } @Override public void onLeave(Class<? extends State> newState) { if (!StateWithExecutionGraph.class.isAssignableFrom(newState)) { operatorCoordinatorHandler.disposeAllOperatorCoordinators(); } } @Override public ArchivedExecutionGraph getJob() { return ArchivedExecutionGraph.createFrom(executionGraph, getJobStatus()); } @Override public void suspend(Throwable cause) { executionGraph.suspend(cause); Preconditions.checkState(executionGraph.getState().isTerminalState()); context.goToFinished(ArchivedExecutionGraph.createFrom(executionGraph)); } @Override public Logger getLogger() { return logger; } void notifyPartitionDataAvailable(ResultPartitionID partitionID) { executionGraph.notifyPartitionDataAvailable(partitionID); } SerializedInputSplit requestNextInputSplit( JobVertexID vertexID, ExecutionAttemptID executionAttempt) throws IOException { return executionGraphHandler.requestNextInputSplit(vertexID, executionAttempt); } ExecutionState requestPartitionState( IntermediateDataSetID intermediateResultId, ResultPartitionID resultPartitionId) throws PartitionProducerDisposedException { return executionGraphHandler.requestPartitionState(intermediateResultId, resultPartitionId); } void acknowledgeCheckpoint( JobID jobID, ExecutionAttemptID executionAttemptID, long checkpointId, CheckpointMetrics checkpointMetrics, TaskStateSnapshot checkpointState) { executionGraphHandler.acknowledgeCheckpoint( jobID, executionAttemptID, checkpointId, checkpointMetrics, checkpointState); } void declineCheckpoint(DeclineCheckpoint decline) { executionGraphHandler.declineCheckpoint(decline); } void reportCheckpointMetrics( ExecutionAttemptID executionAttemptID, long checkpointId, CheckpointMetrics checkpointMetrics) { executionGraphHandler.reportCheckpointMetrics( executionAttemptID, checkpointId, checkpointMetrics); } void updateAccumulators(AccumulatorSnapshot accumulatorSnapshot) { executionGraph.updateAccumulators(accumulatorSnapshot); } KvStateLocation requestKvStateLocation(JobID jobId, String registrationName) throws FlinkJobNotFoundException, UnknownKvStateLocation { return kvStateHandler.requestKvStateLocation(jobId, registrationName); } void notifyKvStateRegistered( JobID jobId, JobVertexID jobVertexId, KeyGroupRange keyGroupRange, String registrationName, KvStateID kvStateId, InetSocketAddress kvStateServerAddress) throws FlinkJobNotFoundException { kvStateHandler.notifyKvStateRegistered( jobId, jobVertexId, keyGroupRange, registrationName, kvStateId, kvStateServerAddress); } void notifyKvStateUnregistered( JobID jobId, JobVertexID jobVertexId, KeyGroupRange keyGroupRange, String registrationName) throws FlinkJobNotFoundException { kvStateHandler.notifyKvStateUnregistered( jobId, jobVertexId, keyGroupRange, registrationName); } CompletableFuture<String> triggerSavepoint( String targetDirectory, boolean cancelJob, SavepointFormatType formatType) { final CheckpointCoordinator checkpointCoordinator = executionGraph.getCheckpointCoordinator(); StopWithSavepointTerminationManager.checkSavepointActionPreconditions( checkpointCoordinator, targetDirectory, getJobId(), logger); logger.info( "Triggering {}savepoint for job {}.", cancelJob ? "cancel-with-" : "", executionGraph.getJobID()); if (cancelJob) { checkpointCoordinator.stopCheckpointScheduler(); } return checkpointCoordinator .triggerSavepoint(targetDirectory, formatType) .thenApply(CompletedCheckpoint::getExternalPointer) .handleAsync( (path, throwable) -> { if (throwable != null) { if (cancelJob && context.isState(this)) { startCheckpointScheduler(checkpointCoordinator); } throw new CompletionException(throwable); } else if (cancelJob && context.isState(this)) { logger.info( "Savepoint stored in {}. Now cancelling {}.", path, executionGraph.getJobID()); cancel(); } return path; }, context.getMainThreadExecutor()); } CompletableFuture<String> triggerCheckpoint() { final CheckpointCoordinator checkpointCoordinator = executionGraph.getCheckpointCoordinator(); final JobID jobID = executionGraph.getJobID(); if (checkpointCoordinator == null) { throw new IllegalStateException(String.format("Job %s is not a streaming job.", jobID)); } logger.info("Triggering a checkpoint for job {}.", jobID); return checkpointCoordinator .triggerCheckpoint(false) .thenApply(CompletedCheckpoint::getExternalPointer) .handleAsync( (path, throwable) -> { if (throwable != null) { throw new CompletionException(throwable); } return path; }, context.getMainThreadExecutor()); } private void startCheckpointScheduler(final CheckpointCoordinator checkpointCoordinator) { if (checkpointCoordinator.isPeriodicCheckpointingConfigured()) { try { checkpointCoordinator.startCheckpointScheduler(); } catch (IllegalStateException ignored) { } } } void deliverOperatorEventToCoordinator( ExecutionAttemptID taskExecutionId, OperatorID operatorId, OperatorEvent evt) throws FlinkException { operatorCoordinatorHandler.deliverOperatorEventToCoordinator( taskExecutionId, operatorId, evt); } CompletableFuture<CoordinationResponse> deliverCoordinationRequestToCoordinator( OperatorID operatorId, CoordinationRequest request) throws FlinkException { return operatorCoordinatorHandler.deliverCoordinationRequestToCoordinator( operatorId, request); } /** Transition to different state when failure occurs. Stays in the same state by default. */ abstract void onFailure(Throwable cause); /** * Transition to different state when the execution graph reaches a globally terminal state. * * @param globallyTerminalState globally terminal state which the execution graph reached */ abstract void onGloballyTerminalState(JobStatus globallyTerminalState); @Override public void handleGlobalFailure(Throwable cause) { failureCollection.add(ExceptionHistoryEntry.createGlobal(cause)); onFailure(cause); } /** * Updates the execution graph with the given task execution state transition. * * @param taskExecutionStateTransition taskExecutionStateTransition to update the ExecutionGraph * with * @return {@code true} if the update was successful; otherwise {@code false} */ List<ExceptionHistoryEntry> getFailures() { return failureCollection; } private static Optional<RootExceptionHistoryEntry> convertFailures( List<ExceptionHistoryEntry> failureCollection) { if (failureCollection.isEmpty()) { return Optional.empty(); } final ExceptionHistoryEntry first = failureCollection.remove(0); return Optional.of( RootExceptionHistoryEntry.fromExceptionHistoryEntry(first, failureCollection)); } /** Context of the {@link StateWithExecutionGraph} state. */ interface Context extends StateTransitions.ToFinished { /** * Run the given action if the current state equals the expected state. * * @param expectedState expectedState is the expected state * @param action action to run if the current state equals the expected state */ void runIfState(State expectedState, Runnable action); /** * Checks whether the current state is the expected state. * * @param expectedState expectedState is the expected state * @return {@code true} if the current state equals the expected state; otherwise {@code * false} */ boolean isState(State expectedState); /** * Gets the main thread executor. * * @return the main thread executor */ Executor getMainThreadExecutor(); /** Archive failure. */ void archiveFailure(RootExceptionHistoryEntry failure); } }
adding @yrodiere for awareness and opinions as well..
public void testMetrics() { assertEquals(0L, getCounterValueOrNull("hibernate.query.executions", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); assertEquals(0L, getCounterValueOrNull("hibernate.entities.inserts", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); assertEquals(0L, getCounterValueOrNull("hibernate.cache.query.requests", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME), new Tag("result", "miss"))); Arc.container().requestContext().activate(); try { DummyEntity entity = new DummyEntity(); entity.number = 12345L; em.persist(entity); em.flush(); em.createQuery("from DummyEntity e").getResultList(); assertEquals(1L, getCounterValueOrNull("hibernate.query.executions", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); assertEquals(1L, getCounterValueOrNull("hibernate.entities.inserts", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); } finally { Arc.container().requestContext().terminate(); } }
assertEquals(0L, getCounterValueOrNull("hibernate.query.executions",
public void testMetrics() { assertEquals(0L, getCounterValueOrNull("hibernate.query.executions", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); assertEquals(0L, getCounterValueOrNull("hibernate.entities.inserts", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); assertEquals(0L, getCounterValueOrNull("hibernate.cache.query.requests", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME), new Tag("result", "miss"))); Arc.container().requestContext().activate(); try { DummyEntity entity = new DummyEntity(); entity.number = 12345L; em.persist(entity); em.flush(); em.createQuery("from DummyEntity e").getResultList(); assertEquals(1L, getCounterValueOrNull("hibernate.query.executions", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); assertEquals(1L, getCounterValueOrNull("hibernate.entities.inserts", new Tag("entityManagerFactory", PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME))); } finally { Arc.container().requestContext().terminate(); } }
class DummyEntity { @Id private Long number; public Long getNumber() { return number; } public void setNumber(Long number) { this.number = number; } }
class DummyEntity { @Id private Long number; public Long getNumber() { return number; } public void setNumber(Long number) { this.number = number; } }
At present, we can't get `SessionVariable` from `HiveMetastore.java`, so session variable is not easy to be added. I add the variable in `Config.java` instead.
public HivePartitionStats getTableStatistics(String dbName, String tblName) { org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName); HiveCommonStats commonStats = toHiveCommonStats(table.getParameters()); long totalRowNums = commonStats.getRowNums(); if (totalRowNums == -1) { return HivePartitionStats.empty(); } List<String> dataColumns = table.getSd().getCols().stream() .map(FieldSchema::getName) .collect(toImmutableList()); List<ColumnStatisticsObj> statisticsObjs = client.getTableColumnStats(dbName, tblName, dataColumns); if (statisticsObjs.isEmpty()) { try { if (table.getParameters().keySet().stream().anyMatch(k -> k.startsWith("spark.sql.statistics.colStats."))) { statisticsObjs = HiveMetastoreApiConverter.getColStatsFromSparkParams(table); } } catch (Exception e) { LOG.warn("Failed to get column stats from table [{}.{}]", dbName, tblName); } } Map<String, HiveColumnStats> columnStatistics = HiveMetastoreApiConverter.toSinglePartitionColumnStats(statisticsObjs, totalRowNums); return new HivePartitionStats(commonStats, columnStatistics); }
if (table.getParameters().keySet().stream().anyMatch(k -> k.startsWith("spark.sql.statistics.colStats."))) {
public HivePartitionStats getTableStatistics(String dbName, String tblName) { org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName); HiveCommonStats commonStats = toHiveCommonStats(table.getParameters()); long totalRowNums = commonStats.getRowNums(); if (totalRowNums == -1) { return HivePartitionStats.empty(); } List<String> dataColumns = table.getSd().getCols().stream() .map(FieldSchema::getName) .collect(toImmutableList()); List<ColumnStatisticsObj> statisticsObjs = client.getTableColumnStats(dbName, tblName, dataColumns); if (statisticsObjs.isEmpty() && Config.enable_reuse_spark_column_statistics) { try { if (table.getParameters().keySet().stream().anyMatch(k -> k.startsWith("spark.sql.statistics.colStats."))) { statisticsObjs = HiveMetastoreApiConverter.getColStatsFromSparkParams(table); } } catch (Exception e) { LOG.warn("Failed to get column stats from table [{}.{}]", dbName, tblName); } } Map<String, HiveColumnStats> columnStatistics = HiveMetastoreApiConverter.toSinglePartitionColumnStats(statisticsObjs, totalRowNums); return new HivePartitionStats(commonStats, columnStatistics); }
class HiveMetastore implements IHiveMetastore { private static final Logger LOG = LogManager.getLogger(CachingHiveMetastore.class); private final HiveMetaClient client; private final String catalogName; private final MetastoreType metastoreType; public HiveMetastore(HiveMetaClient client, String catalogName, MetastoreType metastoreType) { this.client = client; this.catalogName = catalogName; this.metastoreType = metastoreType; } @Override public List<String> getAllDatabaseNames() { return client.getAllDatabaseNames(); } @Override public void createDb(String dbName, Map<String, String> properties) { String location = properties.getOrDefault(LOCATION_PROPERTY, ""); long dbId = ConnectorTableId.CONNECTOR_ID_GENERATOR.getNextId().asInt(); Database database = new Database(dbId, dbName, location); client.createDatabase(HiveMetastoreApiConverter.toMetastoreApiDatabase(database)); } @Override public void dropDb(String dbName, boolean deleteData) { client.dropDatabase(dbName, deleteData); } @Override public List<String> getAllTableNames(String dbName) { return client.getAllTableNames(dbName); } @Override public Database getDb(String dbName) { org.apache.hadoop.hive.metastore.api.Database db = client.getDb(dbName); return HiveMetastoreApiConverter.toDatabase(db); } @Override public void createTable(String dbName, Table table) { org.apache.hadoop.hive.metastore.api.Table hiveTable = toMetastoreApiTable((HiveTable) table); client.createTable(hiveTable); } @Override public void dropTable(String dbName, String tableName) { client.dropTable(dbName, tableName); } public Table getTable(String dbName, String tableName) { org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tableName); StorageDescriptor sd = table.getSd(); if (sd == null) { throw new StarRocksConnectorException("Table is missing storage descriptor"); } if (!HiveMetastoreApiConverter.isHudiTable(table.getSd().getInputFormat())) { validateHiveTableType(table.getTableType()); if (AcidUtils.isFullAcidTable(table)) { throw new StarRocksConnectorException( String.format("%s.%s is a hive transactional table(full acid), sr didn't support it yet", dbName, tableName)); } if (table.getTableType().equalsIgnoreCase("VIRTUAL_VIEW")) { return HiveMetastoreApiConverter.toHiveView(table, catalogName); } else { return HiveMetastoreApiConverter.toHiveTable(table, catalogName); } } else { return HiveMetastoreApiConverter.toHudiTable(table, catalogName); } } @Override public boolean tableExists(String dbName, String tableName) { return client.tableExists(dbName, tableName); } @Override public List<String> getPartitionKeysByValue(String dbName, String tableName, List<Optional<String>> partitionValues) { if (partitionValues.isEmpty()) { return client.getPartitionKeys(dbName, tableName); } else { List<String> partitionValuesStr = partitionValues.stream() .map(v -> v.orElse("")).collect(Collectors.toList()); return client.getPartitionKeysByValue(dbName, tableName, partitionValuesStr); } } @Override public boolean partitionExists(Table table, List<String> partitionValues) { HiveTable hiveTable = (HiveTable) table; String dbName = hiveTable.getDbName(); String tableName = hiveTable.getTableName(); if (metastoreType == MetastoreType.GLUE && hiveTable.hasBooleanTypePartitionColumn()) { List<String> allPartitionNames = client.getPartitionKeys(dbName, tableName); String hivePartitionName = toHivePartitionName(hiveTable.getPartitionColumnNames(), partitionValues); return allPartitionNames.contains(hivePartitionName); } else { return !client.getPartitionKeysByValue(dbName, tableName, partitionValues).isEmpty(); } } @Override public Partition getPartition(String dbName, String tblName, List<String> partitionValues) { StorageDescriptor sd; Map<String, String> params; if (partitionValues.size() > 0) { org.apache.hadoop.hive.metastore.api.Partition partition = client.getPartition(dbName, tblName, partitionValues); sd = partition.getSd(); params = partition.getParameters(); } else { org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName); sd = table.getSd(); params = table.getParameters(); } return HiveMetastoreApiConverter.toPartition(sd, params); } public Map<String, Partition> getPartitionsByNames(String dbName, String tblName, List<String> partitionNames) { List<org.apache.hadoop.hive.metastore.api.Partition> partitions = new ArrayList<>(); for (int start = 0; start < partitionNames.size(); start += Config.max_hive_partitions_per_rpc) { int end = Math.min(start + Config.max_hive_partitions_per_rpc, partitionNames.size()); List<String> namesPerRPC = partitionNames.subList(start, end); List<org.apache.hadoop.hive.metastore.api.Partition> partsPerRPC = client.getPartitionsByNames(dbName, tblName, namesPerRPC); partitions.addAll(partsPerRPC); } Map<String, List<String>> partitionNameToPartitionValues = partitionNames.stream() .collect(Collectors.toMap(Function.identity(), PartitionUtil::toPartitionValues)); Map<List<String>, Partition> partitionValuesToPartition = partitions.stream() .collect(Collectors.toMap( org.apache.hadoop.hive.metastore.api.Partition::getValues, partition -> HiveMetastoreApiConverter.toPartition(partition.getSd(), partition.getParameters()))); ImmutableMap.Builder<String, Partition> resultBuilder = ImmutableMap.builder(); for (Map.Entry<String, List<String>> entry : partitionNameToPartitionValues.entrySet()) { Partition partition = partitionValuesToPartition.get(entry.getValue()); resultBuilder.put(entry.getKey(), partition); } return resultBuilder.build(); } @Override public void addPartitions(String dbName, String tableName, List<HivePartitionWithStats> partitions) { List<org.apache.hadoop.hive.metastore.api.Partition> hivePartitions = partitions.stream() .map(HiveMetastoreApiConverter::toMetastoreApiPartition) .collect(Collectors.toList()); client.addPartitions(dbName, tableName, hivePartitions); } @Override public void dropPartition(String dbName, String tableName, List<String> partValues, boolean deleteData) { client.dropPartition(dbName, tableName, partValues, deleteData); } public void updateTableStatistics(String dbName, String tableName, Function<HivePartitionStats, HivePartitionStats> update) { org.apache.hadoop.hive.metastore.api.Table originTable = client.getTable(dbName, tableName); if (originTable == null) { throw new StarRocksConnectorException("Table '%s.%s' not found", dbName, tableName); } org.apache.hadoop.hive.metastore.api.Table newTable = originTable.deepCopy(); HiveCommonStats curCommonStats = toHiveCommonStats(originTable.getParameters()); HivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>()); HivePartitionStats updatedStats = update.apply(curPartitionStats); HiveCommonStats commonStats = updatedStats.getCommonStats(); Map<String, String> originParams = newTable.getParameters(); originParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000)); newTable.setParameters(updateStatisticsParameters(originParams, commonStats)); client.alterTable(dbName, tableName, newTable); } public void updatePartitionStatistics(String dbName, String tableName, String partitionName, Function<HivePartitionStats, HivePartitionStats> update) { List<org.apache.hadoop.hive.metastore.api.Partition> partitions = client.getPartitionsByNames( dbName, tableName, ImmutableList.of(partitionName)); if (partitions.size() != 1) { throw new StarRocksConnectorException("Metastore returned multiple partitions for name: " + partitionName); } org.apache.hadoop.hive.metastore.api.Partition originPartition = getOnlyElement(partitions); HiveCommonStats curCommonStats = toHiveCommonStats(originPartition.getParameters()); HivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>()); HivePartitionStats updatedStats = update.apply(curPartitionStats); org.apache.hadoop.hive.metastore.api.Partition modifiedPartition = originPartition.deepCopy(); HiveCommonStats commonStats = updatedStats.getCommonStats(); Map<String, String> originParams = modifiedPartition.getParameters(); originParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000)); modifiedPartition.setParameters(updateStatisticsParameters(modifiedPartition.getParameters(), commonStats)); client.alterPartition(dbName, tableName, modifiedPartition); } public Map<String, HivePartitionStats> getPartitionStatistics(Table table, List<String> partitionNames) { HiveMetaStoreTable hmsTbl = (HiveMetaStoreTable) table; String dbName = hmsTbl.getDbName(); String tblName = hmsTbl.getTableName(); List<String> dataColumns = hmsTbl.getDataColumnNames(); Map<String, Partition> partitions = getPartitionsByNames(hmsTbl.getDbName(), hmsTbl.getTableName(), partitionNames); Map<String, HiveCommonStats> partitionCommonStats = partitions.entrySet().stream() .collect(toImmutableMap(Map.Entry::getKey, entry -> toHiveCommonStats(entry.getValue().getParameters()))); Map<String, Long> partitionRowNums = partitionCommonStats.entrySet().stream() .collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().getRowNums())); ImmutableMap.Builder<String, HivePartitionStats> resultBuilder = ImmutableMap.builder(); Map<String, List<ColumnStatisticsObj>> partitionNameToColumnStatsObj = client.getPartitionColumnStats(dbName, tblName, partitionNames, dataColumns); Map<String, Map<String, HiveColumnStats>> partitionColumnStats = HiveMetastoreApiConverter .toPartitionColumnStatistics(partitionNameToColumnStatsObj, partitionRowNums); for (String partitionName : partitionCommonStats.keySet()) { HiveCommonStats commonStats = partitionCommonStats.get(partitionName); Map<String, HiveColumnStats> columnStatistics = partitionColumnStats .getOrDefault(partitionName, ImmutableMap.of()); resultBuilder.put(partitionName, new HivePartitionStats(commonStats, columnStatistics)); } return resultBuilder.build(); } public long getCurrentEventId() { return client.getCurrentNotificationEventId().getEventId(); } public NotificationEventResponse getNextEventResponse(long lastSyncedEventId, String catalogName, final boolean getAllEvents) throws MetastoreNotificationFetchException { try { int batchSize = getAllEvents ? -1 : Config.hms_events_batch_size_per_rpc; NotificationEventResponse response = client.getNextNotification(lastSyncedEventId, batchSize, null); if (response.getEvents().size() == 0) { LOG.info("Event size is 0 when pulling events on catalog [{}]", catalogName); return null; } LOG.info(String.format("Received %d events. Start event id : %d. Last synced id : %d on catalog : %s", response.getEvents().size(), response.getEvents().get(0).getEventId(), lastSyncedEventId, catalogName)); return response; } catch (MetastoreNotificationFetchException e) { LOG.error("Unable to fetch notifications from metastore. Last synced event id is {}", lastSyncedEventId, e); throw new MetastoreNotificationFetchException("Unable to fetch notifications from metastore. " + "Last synced event id is " + lastSyncedEventId, e); } } }
class HiveMetastore implements IHiveMetastore { private static final Logger LOG = LogManager.getLogger(CachingHiveMetastore.class); private final HiveMetaClient client; private final String catalogName; private final MetastoreType metastoreType; public HiveMetastore(HiveMetaClient client, String catalogName, MetastoreType metastoreType) { this.client = client; this.catalogName = catalogName; this.metastoreType = metastoreType; } @Override public List<String> getAllDatabaseNames() { return client.getAllDatabaseNames(); } @Override public void createDb(String dbName, Map<String, String> properties) { String location = properties.getOrDefault(LOCATION_PROPERTY, ""); long dbId = ConnectorTableId.CONNECTOR_ID_GENERATOR.getNextId().asInt(); Database database = new Database(dbId, dbName, location); client.createDatabase(HiveMetastoreApiConverter.toMetastoreApiDatabase(database)); } @Override public void dropDb(String dbName, boolean deleteData) { client.dropDatabase(dbName, deleteData); } @Override public List<String> getAllTableNames(String dbName) { return client.getAllTableNames(dbName); } @Override public Database getDb(String dbName) { org.apache.hadoop.hive.metastore.api.Database db = client.getDb(dbName); return HiveMetastoreApiConverter.toDatabase(db); } @Override public void createTable(String dbName, Table table) { org.apache.hadoop.hive.metastore.api.Table hiveTable = toMetastoreApiTable((HiveTable) table); client.createTable(hiveTable); } @Override public void dropTable(String dbName, String tableName) { client.dropTable(dbName, tableName); } public Table getTable(String dbName, String tableName) { org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tableName); StorageDescriptor sd = table.getSd(); if (sd == null) { throw new StarRocksConnectorException("Table is missing storage descriptor"); } if (!HiveMetastoreApiConverter.isHudiTable(table.getSd().getInputFormat())) { validateHiveTableType(table.getTableType()); if (AcidUtils.isFullAcidTable(table)) { throw new StarRocksConnectorException( String.format("%s.%s is a hive transactional table(full acid), sr didn't support it yet", dbName, tableName)); } if (table.getTableType().equalsIgnoreCase("VIRTUAL_VIEW")) { return HiveMetastoreApiConverter.toHiveView(table, catalogName); } else { return HiveMetastoreApiConverter.toHiveTable(table, catalogName); } } else { return HiveMetastoreApiConverter.toHudiTable(table, catalogName); } } @Override public boolean tableExists(String dbName, String tableName) { return client.tableExists(dbName, tableName); } @Override public List<String> getPartitionKeysByValue(String dbName, String tableName, List<Optional<String>> partitionValues) { if (partitionValues.isEmpty()) { return client.getPartitionKeys(dbName, tableName); } else { List<String> partitionValuesStr = partitionValues.stream() .map(v -> v.orElse("")).collect(Collectors.toList()); return client.getPartitionKeysByValue(dbName, tableName, partitionValuesStr); } } @Override public boolean partitionExists(Table table, List<String> partitionValues) { HiveTable hiveTable = (HiveTable) table; String dbName = hiveTable.getDbName(); String tableName = hiveTable.getTableName(); if (metastoreType == MetastoreType.GLUE && hiveTable.hasBooleanTypePartitionColumn()) { List<String> allPartitionNames = client.getPartitionKeys(dbName, tableName); String hivePartitionName = toHivePartitionName(hiveTable.getPartitionColumnNames(), partitionValues); return allPartitionNames.contains(hivePartitionName); } else { return !client.getPartitionKeysByValue(dbName, tableName, partitionValues).isEmpty(); } } @Override public Partition getPartition(String dbName, String tblName, List<String> partitionValues) { StorageDescriptor sd; Map<String, String> params; if (partitionValues.size() > 0) { org.apache.hadoop.hive.metastore.api.Partition partition = client.getPartition(dbName, tblName, partitionValues); sd = partition.getSd(); params = partition.getParameters(); } else { org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName); sd = table.getSd(); params = table.getParameters(); } return HiveMetastoreApiConverter.toPartition(sd, params); } public Map<String, Partition> getPartitionsByNames(String dbName, String tblName, List<String> partitionNames) { List<org.apache.hadoop.hive.metastore.api.Partition> partitions = new ArrayList<>(); for (int start = 0; start < partitionNames.size(); start += Config.max_hive_partitions_per_rpc) { int end = Math.min(start + Config.max_hive_partitions_per_rpc, partitionNames.size()); List<String> namesPerRPC = partitionNames.subList(start, end); List<org.apache.hadoop.hive.metastore.api.Partition> partsPerRPC = client.getPartitionsByNames(dbName, tblName, namesPerRPC); partitions.addAll(partsPerRPC); } Map<String, List<String>> partitionNameToPartitionValues = partitionNames.stream() .collect(Collectors.toMap(Function.identity(), PartitionUtil::toPartitionValues)); Map<List<String>, Partition> partitionValuesToPartition = partitions.stream() .collect(Collectors.toMap( org.apache.hadoop.hive.metastore.api.Partition::getValues, partition -> HiveMetastoreApiConverter.toPartition(partition.getSd(), partition.getParameters()))); ImmutableMap.Builder<String, Partition> resultBuilder = ImmutableMap.builder(); for (Map.Entry<String, List<String>> entry : partitionNameToPartitionValues.entrySet()) { Partition partition = partitionValuesToPartition.get(entry.getValue()); resultBuilder.put(entry.getKey(), partition); } return resultBuilder.build(); } @Override public void addPartitions(String dbName, String tableName, List<HivePartitionWithStats> partitions) { List<org.apache.hadoop.hive.metastore.api.Partition> hivePartitions = partitions.stream() .map(HiveMetastoreApiConverter::toMetastoreApiPartition) .collect(Collectors.toList()); client.addPartitions(dbName, tableName, hivePartitions); } @Override public void dropPartition(String dbName, String tableName, List<String> partValues, boolean deleteData) { client.dropPartition(dbName, tableName, partValues, deleteData); } public void updateTableStatistics(String dbName, String tableName, Function<HivePartitionStats, HivePartitionStats> update) { org.apache.hadoop.hive.metastore.api.Table originTable = client.getTable(dbName, tableName); if (originTable == null) { throw new StarRocksConnectorException("Table '%s.%s' not found", dbName, tableName); } org.apache.hadoop.hive.metastore.api.Table newTable = originTable.deepCopy(); HiveCommonStats curCommonStats = toHiveCommonStats(originTable.getParameters()); HivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>()); HivePartitionStats updatedStats = update.apply(curPartitionStats); HiveCommonStats commonStats = updatedStats.getCommonStats(); Map<String, String> originParams = newTable.getParameters(); originParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000)); newTable.setParameters(updateStatisticsParameters(originParams, commonStats)); client.alterTable(dbName, tableName, newTable); } public void updatePartitionStatistics(String dbName, String tableName, String partitionName, Function<HivePartitionStats, HivePartitionStats> update) { List<org.apache.hadoop.hive.metastore.api.Partition> partitions = client.getPartitionsByNames( dbName, tableName, ImmutableList.of(partitionName)); if (partitions.size() != 1) { throw new StarRocksConnectorException("Metastore returned multiple partitions for name: " + partitionName); } org.apache.hadoop.hive.metastore.api.Partition originPartition = getOnlyElement(partitions); HiveCommonStats curCommonStats = toHiveCommonStats(originPartition.getParameters()); HivePartitionStats curPartitionStats = new HivePartitionStats(curCommonStats, new HashMap<>()); HivePartitionStats updatedStats = update.apply(curPartitionStats); org.apache.hadoop.hive.metastore.api.Partition modifiedPartition = originPartition.deepCopy(); HiveCommonStats commonStats = updatedStats.getCommonStats(); Map<String, String> originParams = modifiedPartition.getParameters(); originParams.put(TRANSIENT_LAST_DDL_TIME, String.valueOf(System.currentTimeMillis() / 1000)); modifiedPartition.setParameters(updateStatisticsParameters(modifiedPartition.getParameters(), commonStats)); client.alterPartition(dbName, tableName, modifiedPartition); } public Map<String, HivePartitionStats> getPartitionStatistics(Table table, List<String> partitionNames) { HiveMetaStoreTable hmsTbl = (HiveMetaStoreTable) table; String dbName = hmsTbl.getDbName(); String tblName = hmsTbl.getTableName(); List<String> dataColumns = hmsTbl.getDataColumnNames(); Map<String, Partition> partitions = getPartitionsByNames(hmsTbl.getDbName(), hmsTbl.getTableName(), partitionNames); Map<String, HiveCommonStats> partitionCommonStats = partitions.entrySet().stream() .collect(toImmutableMap(Map.Entry::getKey, entry -> toHiveCommonStats(entry.getValue().getParameters()))); Map<String, Long> partitionRowNums = partitionCommonStats.entrySet().stream() .collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().getRowNums())); ImmutableMap.Builder<String, HivePartitionStats> resultBuilder = ImmutableMap.builder(); Map<String, List<ColumnStatisticsObj>> partitionNameToColumnStatsObj = client.getPartitionColumnStats(dbName, tblName, partitionNames, dataColumns); Map<String, Map<String, HiveColumnStats>> partitionColumnStats = HiveMetastoreApiConverter .toPartitionColumnStatistics(partitionNameToColumnStatsObj, partitionRowNums); for (String partitionName : partitionCommonStats.keySet()) { HiveCommonStats commonStats = partitionCommonStats.get(partitionName); Map<String, HiveColumnStats> columnStatistics = partitionColumnStats .getOrDefault(partitionName, ImmutableMap.of()); resultBuilder.put(partitionName, new HivePartitionStats(commonStats, columnStatistics)); } return resultBuilder.build(); } public long getCurrentEventId() { return client.getCurrentNotificationEventId().getEventId(); } public NotificationEventResponse getNextEventResponse(long lastSyncedEventId, String catalogName, final boolean getAllEvents) throws MetastoreNotificationFetchException { try { int batchSize = getAllEvents ? -1 : Config.hms_events_batch_size_per_rpc; NotificationEventResponse response = client.getNextNotification(lastSyncedEventId, batchSize, null); if (response.getEvents().size() == 0) { LOG.info("Event size is 0 when pulling events on catalog [{}]", catalogName); return null; } LOG.info(String.format("Received %d events. Start event id : %d. Last synced id : %d on catalog : %s", response.getEvents().size(), response.getEvents().get(0).getEventId(), lastSyncedEventId, catalogName)); return response; } catch (MetastoreNotificationFetchException e) { LOG.error("Unable to fetch notifications from metastore. Last synced event id is {}", lastSyncedEventId, e); throw new MetastoreNotificationFetchException("Unable to fetch notifications from metastore. " + "Last synced event id is " + lastSyncedEventId, e); } } }
runningJobs is only used for replay, so no need to add a lock
public void cancelRunningJobs() { if (!GlobalStateMgr.isCheckpointThread()) { cancelJobExecutorService.submit(() -> { try { while (!GlobalStateMgr.getServingState().isReady()) { try { Thread.sleep(1000); } catch (InterruptedException e) { LOG.warn("InsertOverwriteJobManager runAfterCatalogReady interrupted exception.", e); } } if (runningJobs != null) { for (InsertOverwriteJob job : runningJobs) { LOG.info("start to cancel unfinished insert overwrite job:{}", job.getJobId()); try { InsertOverwriteJobRunner runner = new InsertOverwriteJobRunner(job); runner.cancel(); } finally { deregisterOverwriteJob(job.getJobId()); } } runningJobs.clear(); } } catch (Exception e) { LOG.warn("cancel running jobs failed. cancel thread will exit", e); } }); } }
if (runningJobs != null) {
public void cancelRunningJobs() { if (GlobalStateMgr.isCheckpointThread()) { return; } cancelJobExecutorService.submit(() -> { try { while (!GlobalStateMgr.getServingState().isReady()) { try { Thread.sleep(1000); } catch (InterruptedException e) { LOG.warn("InsertOverwriteJobManager runAfterCatalogReady interrupted exception.", e); } } if (runningJobs != null) { for (InsertOverwriteJob job : runningJobs) { LOG.info("start to cancel unfinished insert overwrite job:{}", job.getJobId()); try { InsertOverwriteJobRunner runner = new InsertOverwriteJobRunner(job); runner.cancel(); } finally { deregisterOverwriteJob(job.getJobId()); } } runningJobs.clear(); } } catch (Exception e) { LOG.warn("cancel running jobs failed. cancel thread will exit", e); } }); }
class InsertOverwriteJobManager implements Writable, GsonPostProcessable { private static final Logger LOG = LogManager.getLogger(InsertOverwriteJobManager.class); @SerializedName(value = "overwriteJobMap") private Map<Long, InsertOverwriteJob> overwriteJobMap; @SerializedName(value = "partitionsWithOverwrite") private Map<Long, List<Long>> partitionsWithOverwrite; @SerializedName(value = "jobNum") private long jobNum; private ExecutorService cancelJobExecutorService; private List<InsertOverwriteJob> runningJobs; private ReentrantReadWriteLock lock; public InsertOverwriteJobManager() { this.overwriteJobMap = Maps.newHashMap(); this.partitionsWithOverwrite = Maps.newHashMap(); ThreadFactory threadFactory = new DefaultThreadFactory("cancel-thread"); this.cancelJobExecutorService = Executors.newSingleThreadExecutor(threadFactory); this.runningJobs = Lists.newArrayList(); this.lock = new ReentrantReadWriteLock(); this.jobNum = 0; } public void submitJob(ConnectContext context, StmtExecutor stmtExecutor, InsertOverwriteJob job) throws Exception { boolean registered = registerOverwriteJob(job); if (!registered) { LOG.warn("register insert overwrite job:{} failed", job.getJobId()); throw new RuntimeException("register insert overwrite job failed"); } try { Database database = MetaUtils.getDatabase(context, job.getTargetDbId()); OlapTable table = (OlapTable) MetaUtils.getTable(context, database.getId(), job.getTargetTableId()); InsertOverwriteJobRunner jobRunner = new InsertOverwriteJobRunner(job, context, stmtExecutor, database, table); jobRunner.run(); } finally { deregisterOverwriteJob(job.getJobId()); } } public boolean registerOverwriteJob(InsertOverwriteJob job) { lock.writeLock().lock(); try { if (overwriteJobMap.containsKey(job.getJobId())) { LOG.warn("insert overwrite job:{} is running", job.getJobId()); return false; } overwriteJobMap.put(job.getJobId(), job); List<Long> runningPartitions = partitionsWithOverwrite.getOrDefault(job.getTargetTableId(), Lists.newArrayList()); if (job.getOriginalTargetPartitionIds() != null) { runningPartitions.addAll(job.getOriginalTargetPartitionIds()); } partitionsWithOverwrite.put(job.getTargetTableId(), runningPartitions); jobNum++; return true; } finally { lock.writeLock().unlock(); } } public boolean deregisterOverwriteJob(long jobid) { lock.writeLock().lock(); try { if (!overwriteJobMap.containsKey(jobid)) { return true; } InsertOverwriteJob job = overwriteJobMap.get(jobid); List<Long> partitionIds = partitionsWithOverwrite.get(job.getTargetTableId()); if (partitionIds != null) { partitionIds.removeAll(job.getOriginalTargetPartitionIds()); if (partitionIds.isEmpty()) { partitionsWithOverwrite.remove(job.getTargetTableId()); } } else { partitionsWithOverwrite.remove(job.getTargetTableId()); } overwriteJobMap.remove(jobid); jobNum--; return true; } catch (Exception e) { LOG.warn("deregister overwrite job failed", e); throw e; } finally { lock.writeLock().unlock(); } } public boolean hasRunningOverwriteJob(long tableId) { lock.readLock().lock(); try { return partitionsWithOverwrite.containsKey(tableId); } finally { lock.readLock().unlock(); } } public void replayCreateInsertOverwrite(CreateInsertOverwriteJobInfo jobInfo) { InsertOverwriteJob insertOverwriteJob = new InsertOverwriteJob(jobInfo.getJobId(), jobInfo.getDbId(), jobInfo.getTableId(), jobInfo.getTargetPartitionIds()); boolean registered = registerOverwriteJob(insertOverwriteJob); if (!registered) { LOG.warn("register insert overwrite job failed. jobId:{}", insertOverwriteJob.getJobId()); return; } if (runningJobs == null) { runningJobs = Lists.newArrayList(); } runningJobs.add(insertOverwriteJob); } public void replayInsertOverwriteStateChange(InsertOverwriteStateChangeInfo info) { InsertOverwriteJob job = getInsertOverwriteJob(info.getJobId()); InsertOverwriteJobRunner runner = new InsertOverwriteJobRunner(job); runner.replayStateChange(info); if (job.isFinished()) { deregisterOverwriteJob(job.getJobId()); if (runningJobs != null) { runningJobs.remove(job); } } } public long getJobNum() { return jobNum; } public long getRunningJobSize() { return runningJobs.size(); } public InsertOverwriteJob getInsertOverwriteJob(long jobId) { lock.readLock().lock(); try { return overwriteJobMap.get(jobId); } finally { lock.readLock().unlock(); } } @Override public void write(DataOutput out) throws IOException { Text.writeString(out, GsonUtils.GSON.toJson(this)); } public static InsertOverwriteJobManager read(DataInput in) throws IOException { String json = Text.readString(in); InsertOverwriteJobManager jobManager = GsonUtils.GSON.fromJson(json, InsertOverwriteJobManager.class); return jobManager; } @Override public void gsonPostProcess() { if (!GlobalStateMgr.isCheckpointThread()) { if (runningJobs == null) { runningJobs = Lists.newArrayList(); } for (InsertOverwriteJob job : overwriteJobMap.values()) { if (!job.isFinished()) { LOG.info("add insert overwrite job:{} to runningJobs, state:{}", job.getJobId(), job.getJobState()); runningJobs.add(job); } } } } }
class InsertOverwriteJobManager implements Writable, GsonPostProcessable { private static final Logger LOG = LogManager.getLogger(InsertOverwriteJobManager.class); @SerializedName(value = "overwriteJobMap") private Map<Long, InsertOverwriteJob> overwriteJobMap; @SerializedName(value = "tableToOverwriteJobs") private Map<Long, List<Long>> tableToOverwriteJobs; private ExecutorService cancelJobExecutorService; private List<InsertOverwriteJob> runningJobs; private ReentrantReadWriteLock lock; public InsertOverwriteJobManager() { this.overwriteJobMap = Maps.newHashMap(); this.tableToOverwriteJobs = Maps.newHashMap(); ThreadFactory threadFactory = new DefaultThreadFactory("cancel-thread"); this.cancelJobExecutorService = Executors.newSingleThreadExecutor(threadFactory); this.runningJobs = Lists.newArrayList(); this.lock = new ReentrantReadWriteLock(); } public void executeJob(ConnectContext context, StmtExecutor stmtExecutor, InsertOverwriteJob job) throws Exception { CreateInsertOverwriteJobLog info = new CreateInsertOverwriteJobLog(job.getJobId(), job.getTargetDbId(), job.getTargetTableId(), job.getSourcePartitionIds()); GlobalStateMgr.getCurrentState().getEditLog().logCreateInsertOverwrite(info); boolean registered = registerOverwriteJob(job); if (!registered) { LOG.warn("register insert overwrite job:{} failed", job.getJobId()); throw new RuntimeException("register insert overwrite job failed"); } try { Database database = MetaUtils.getDatabase(context, job.getTargetDbId()); OlapTable table = (OlapTable) MetaUtils.getTable(context, database.getId(), job.getTargetTableId()); InsertOverwriteJobRunner jobRunner = new InsertOverwriteJobRunner(job, context, stmtExecutor, database, table); jobRunner.run(); } finally { deregisterOverwriteJob(job.getJobId()); } } public boolean registerOverwriteJob(InsertOverwriteJob job) { lock.writeLock().lock(); try { if (overwriteJobMap.containsKey(job.getJobId())) { LOG.warn("insert overwrite job:{} is running", job.getJobId()); return false; } overwriteJobMap.put(job.getJobId(), job); List<Long> tableJobs = tableToOverwriteJobs.get(job.getTargetTableId()); if (tableJobs == null) { tableJobs = Lists.newArrayList(); tableToOverwriteJobs.put(job.getTargetTableId(), tableJobs); } tableJobs.add(job.getJobId()); return true; } finally { lock.writeLock().unlock(); } } public boolean deregisterOverwriteJob(long jobid) { lock.writeLock().lock(); try { if (!overwriteJobMap.containsKey(jobid)) { return true; } InsertOverwriteJob job = overwriteJobMap.get(jobid); List<Long> tableJobs = tableToOverwriteJobs.get(job.getTargetTableId()); if (tableJobs != null) { tableJobs.remove(job.getJobId()); if (tableJobs.isEmpty()) { tableToOverwriteJobs.remove(job.getTargetTableId()); } } overwriteJobMap.remove(jobid); return true; } catch (Exception e) { LOG.warn("deregister overwrite job failed", e); throw e; } finally { lock.writeLock().unlock(); } } public boolean hasRunningOverwriteJob(long tableId) { lock.readLock().lock(); try { return tableToOverwriteJobs.containsKey(tableId); } finally { lock.readLock().unlock(); } } public void replayCreateInsertOverwrite(CreateInsertOverwriteJobLog jobInfo) { InsertOverwriteJob insertOverwriteJob = new InsertOverwriteJob(jobInfo.getJobId(), jobInfo.getDbId(), jobInfo.getTableId(), jobInfo.getTargetPartitionIds()); boolean registered = registerOverwriteJob(insertOverwriteJob); if (!registered) { LOG.warn("register insert overwrite job failed. jobId:{}", insertOverwriteJob.getJobId()); return; } if (runningJobs == null) { runningJobs = Lists.newArrayList(); } runningJobs.add(insertOverwriteJob); } public void replayInsertOverwriteStateChange(InsertOverwriteStateChangeInfo info) { InsertOverwriteJob job = getInsertOverwriteJob(info.getJobId()); InsertOverwriteJobRunner runner = new InsertOverwriteJobRunner(job); runner.replayStateChange(info); if (job.isFinished()) { deregisterOverwriteJob(job.getJobId()); if (runningJobs != null) { runningJobs.remove(job); } } } public long getJobNum() { return overwriteJobMap.size(); } public long getRunningJobSize() { return runningJobs.size(); } public InsertOverwriteJob getInsertOverwriteJob(long jobId) { lock.readLock().lock(); try { return overwriteJobMap.get(jobId); } finally { lock.readLock().unlock(); } } @Override public void write(DataOutput out) throws IOException { Text.writeString(out, GsonUtils.GSON.toJson(this)); } public static InsertOverwriteJobManager read(DataInput in) throws IOException { String json = Text.readString(in); InsertOverwriteJobManager jobManager = GsonUtils.GSON.fromJson(json, InsertOverwriteJobManager.class); return jobManager; } @Override public void gsonPostProcess() { if (!GlobalStateMgr.isCheckpointThread()) { if (runningJobs == null) { runningJobs = Lists.newArrayList(); } for (InsertOverwriteJob job : overwriteJobMap.values()) { if (!job.isFinished()) { LOG.info("add insert overwrite job:{} to runningJobs, state:{}", job.getJobId(), job.getJobState()); runningJobs.add(job); } } } } }
By reading the spec, I can't really tell what the intended behavior is. I'll try and see if we can keep the paths of the original request and see how the tests behave
public Object extractParameter(ResteasyReactiveRequestContext context) { int index = findPathParamIndex(context.getLocatorTarget().getClassPath(), context.getLocatorTarget().getPath()); if (index >= 0) { return context.getLocatorPathParam(index); } return null; }
int index = findPathParamIndex(context.getLocatorTarget().getClassPath(), context.getLocatorTarget().getPath());
public Object extractParameter(ResteasyReactiveRequestContext context) { int index = findPathParamIndex(context.getLocatorTarget().getClassPath(), context.getLocatorTarget().getPath()); if (index >= 0) { return context.getLocatorPathParam(index); } return null; }
class LocatableResourcePathParamExtractor implements ParameterExtractor { private final String name; public LocatableResourcePathParamExtractor(String name) { this.name = name; } @Override private int findPathParamIndex(URITemplate classPathTemplate, URITemplate methodPathTemplate) { int index = 0; if (classPathTemplate != null) { for (URITemplate.TemplateComponent component : classPathTemplate.components) { if (component.name != null) { if (component.name.equals(this.name)) { return index; } index++; } else if (component.names != null) { for (String nm : component.names) { if (nm.equals(this.name)) { return index; } } index++; } } } for (URITemplate.TemplateComponent component : methodPathTemplate.components) { if (component.name != null) { if (component.name.equals(this.name)) { return index; } index++; } else if (component.names != null) { for (String nm : component.names) { if (nm.equals(this.name)) { return index; } } index++; } } return -1; } }
class LocatableResourcePathParamExtractor implements ParameterExtractor { private final String name; public LocatableResourcePathParamExtractor(String name) { this.name = name; } @Override private int findPathParamIndex(URITemplate classPathTemplate, URITemplate methodPathTemplate) { int index = 0; if (classPathTemplate != null) { for (URITemplate.TemplateComponent component : classPathTemplate.components) { if (component.name != null) { if (component.name.equals(this.name)) { return index; } index++; } else if (component.names != null) { for (String nm : component.names) { if (nm.equals(this.name)) { return index; } } index++; } } } for (URITemplate.TemplateComponent component : methodPathTemplate.components) { if (component.name != null) { if (component.name.equals(this.name)) { return index; } index++; } else if (component.names != null) { for (String nm : component.names) { if (nm.equals(this.name)) { return index; } } index++; } } return -1; } }
Yes, there is no case where mgr is null.
public long loadGlobalFunction(DataInputStream in, long checksum) throws IOException { GlobalFunctionMgr mgr = GlobalFunctionMgr.read(in); if (mgr != null) { this.globalFunctionMgr = mgr; } LOG.info("finished replay global function from image"); return checksum; }
if (mgr != null) {
public long loadGlobalFunction(DataInputStream in, long checksum) throws IOException { this.globalFunctionMgr = GlobalFunctionMgr.read(in); LOG.info("finished replay global function from image"); return checksum; }
class SingletonHolder { private static final Env INSTANCE = new Env(); }
class SingletonHolder { private static final Env INSTANCE = new Env(); }
These methods seem like they are both writing the bom file. #Resolved
public void generate() { TreeSet<BomDependency> inputDependencies = scan(); TreeSet<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); TreeSet<BomDependency> outputDependencies = analyzer.analyze(); outputDependencies.retainAll(inputDependencies); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); outputDependencies = analyzer.analyze(); boolean validationPassed = analyzer.validate(); if(validationPassed) { rewriteBomFile(); writeBom(outputDependencies); } else { logger.info("Validation for the BOM failed. Exiting..."); } }
TreeSet<BomDependency> inputDependencies = scan();
public void generate() { List<BomDependency> inputDependencies = scan(); List<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); analyzer.reduce(); Collection<BomDependency> outputDependencies = analyzer.getBomEligibleDependencies(); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); boolean validationFailed = analyzer.validate(); outputDependencies = analyzer.getBomEligibleDependencies(); if(!validationFailed) { rewriteExistingBomFile(); writeBom(outputDependencies); } else { logger.trace("Validation for the BOM failed. Exiting..."); } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private String externalDependenciesFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator() { } public void setInputFile(String inputFileName) { this.inputFileName = inputFileName; } public void setOutputFile(String outputFileName) { this.outputFileName = outputFileName; } public void setPomFile(String pomFileName) { this.pomFileName = pomFileName; } public void setExternalDependenciesFile(String externalDependenciesFileName) { this.externalDependenciesFileName = externalDependenciesFileName; } private TreeSet<BomDependency> scan() { TreeSet<BomDependency> inputDependencies = new TreeSet<>(new BomDependencyComparator()); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { if (line.startsWith("com.azure")) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (matcher.matches()) { if (matcher.groupCount() == 4) { String groupId = matcher.group(1); String artifactId = matcher.group(2); String version = matcher.group(3); Matcher nonGAMatcher = SDK_NON_GA_PATTERN.matcher(version); if (!nonGAMatcher.matches()) { BomDependency dependency = new BomDependency(groupId, artifactId, version); if (AZURE_CORE_GROUPID.equalsIgnoreCase(groupId)) { switch (artifactId) { case "azure-sdk-all": case "azure-sdk-parent": case "azure-client-sdk-parent": break; default: if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER) && !artifactId.equalsIgnoreCase(AZURE_CORE_TEST_LIBRARY))) { logger.info("Skipping dependency {}:{}", groupId, artifactId); continue; } inputDependencies.add(dependency); break; } } } } } } } } catch (IOException exception) { exception.printStackTrace(); } return inputDependencies; } private TreeSet<BomDependency> resolveExternalDependencies() { TreeSet<BomDependency> externalDependencies = new TreeSet<>(new BomDependencyComparator()); MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); for (Dependency externalDependency : externalBomDependencies) { externalDependencies.addAll(Utils.getPomFileContent(externalDependency)); } } catch (XmlPullParserException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException exception) { exception.printStackTrace(); } return externalDependencies; } private void rewriteBomFile() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.pomFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } private void writeBom(TreeSet<BomDependency> bomDependencies) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.outputFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator(String inputFileName, String outputFileName, String pomFileName) { this.inputFileName = inputFileName; this.outputFileName = outputFileName; this.pomFileName = pomFileName; } private List<BomDependency> scan() { List<BomDependency> inputDependencies = new ArrayList<>(); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { BomDependency dependency = scanDependency(line); if(dependency != null) { inputDependencies.add(dependency); } } } catch (IOException exception) { logger.error("Input file parsing failed. Exception{}", exception.toString()); } return inputDependencies; } private BomDependency scanDependency(String line) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (!matcher.matches()) { return null; } if (matcher.groupCount() != 3) { return null; } String artifactId = matcher.group(1); String version = matcher.group(2); if(version.contains("-")) { return null; } if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER))) { logger.trace("Skipping dependency {}:{}", BASE_AZURE_GROUPID, artifactId); return null; } return new BomDependency(BASE_AZURE_GROUPID, artifactId, version); } private Model readModel() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); return model; } catch (XmlPullParserException | IOException e) { logger.error("BOM reading failed with: {}", e.toString()); } return null; } private void writeModel(Model model) { String pomFileName = this.pomFileName; writeModel(pomFileName, model); } private void writeModel(String fileName, Model model) { MavenXpp3Writer writer = new MavenXpp3Writer(); try { writer.write(new FileWriter(fileName), model); } catch (IOException exception) { logger.error("BOM writing failed with: {}", exception.toString()); } } private List<BomDependency> resolveExternalDependencies() { List<BomDependency> externalDependencies = new ArrayList<>(); List<Dependency> externalBomDependencies = getExternalDependencies(); externalDependencies.addAll(Utils.getExternalDependenciesContent(externalBomDependencies)); return externalDependencies; } private List<Dependency> getExternalDependencies() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); } private void rewriteExistingBomFile() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(model); } private void writeBom(Collection<BomDependency> bomDependencies) { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(this.outputFileName, model); } }
Do we have tests covering this? Is this only an issue with lambda functions?
private void checkArrayLibSortFuncArgs(BLangInvocation iExpr) { if (iExpr.argExprs.size() <= 2 && !types.isOrderedType(iExpr.argExprs.get(0).type)) { dlog.error(iExpr.argExprs.get(0).pos, DiagnosticCode.INVALID_SORT_ARRAY_MEMBER_TYPE, iExpr.argExprs.get(0).type); } if (iExpr.argExprs.size() != 3) { return; } BLangExpression keyFunction = iExpr.argExprs.get(2); BType keyFunctionType = keyFunction.type; if (keyFunctionType.tag == TypeTags.SEMANTIC_ERROR) { return; } if (keyFunctionType.tag == TypeTags.NIL) { if (!types.isOrderedType(iExpr.argExprs.get(0).type)) { dlog.error(iExpr.argExprs.get(0).pos, DiagnosticCode.INVALID_SORT_ARRAY_MEMBER_TYPE, iExpr.argExprs.get(0).type); } return; } DiagnosticPos pos; BType returnType; if (keyFunction.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { pos = keyFunction.pos; returnType = ((BLangSimpleVarRef) keyFunction).type.getReturnType(); } else if (keyFunction.getKind() == NodeKind.ARROW_EXPR) { BLangArrowFunction arrowFunction = ((BLangArrowFunction) keyFunction); pos = arrowFunction.body.expr.pos; returnType = arrowFunction.body.expr.type; } else { BLangLambdaFunction keyLambdaFunction = (BLangLambdaFunction) keyFunction; pos = keyLambdaFunction.function.pos; returnType = keyLambdaFunction.function.type.getReturnType(); if (returnType.tag == TypeTags.SEMANTIC_ERROR) { return; } } if (!types.isOrderedType(returnType)) { dlog.error(pos, DiagnosticCode.INVALID_SORT_FUNC_RETURN_TYPE, returnType); } }
if (returnType.tag == TypeTags.SEMANTIC_ERROR) {
private void checkArrayLibSortFuncArgs(BLangInvocation iExpr) { if (iExpr.argExprs.size() <= 2 && !types.isOrderedType(iExpr.argExprs.get(0).type)) { dlog.error(iExpr.argExprs.get(0).pos, DiagnosticCode.INVALID_SORT_ARRAY_MEMBER_TYPE, iExpr.argExprs.get(0).type); } if (iExpr.argExprs.size() != 3) { return; } BLangExpression keyFunction = iExpr.argExprs.get(2); BType keyFunctionType = keyFunction.type; if (keyFunctionType.tag == TypeTags.SEMANTIC_ERROR) { return; } if (keyFunctionType.tag == TypeTags.NIL) { if (!types.isOrderedType(iExpr.argExprs.get(0).type)) { dlog.error(iExpr.argExprs.get(0).pos, DiagnosticCode.INVALID_SORT_ARRAY_MEMBER_TYPE, iExpr.argExprs.get(0).type); } return; } DiagnosticPos pos; BType returnType; if (keyFunction.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { pos = keyFunction.pos; returnType = ((BLangSimpleVarRef) keyFunction).type.getReturnType(); } else if (keyFunction.getKind() == NodeKind.ARROW_EXPR) { BLangArrowFunction arrowFunction = ((BLangArrowFunction) keyFunction); pos = arrowFunction.body.expr.pos; returnType = arrowFunction.body.expr.type; if (returnType.tag == TypeTags.SEMANTIC_ERROR) { return; } } else { BLangLambdaFunction keyLambdaFunction = (BLangLambdaFunction) keyFunction; pos = keyLambdaFunction.function.pos; returnType = keyLambdaFunction.function.type.getReturnType(); } if (!types.isOrderedType(returnType)) { dlog.error(pos, DiagnosticCode.INVALID_SORT_FUNC_RETURN_TYPE, returnType); } }
class TypeChecker extends BLangNodeVisitor { private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY = new CompilerContext.Key<>(); private static Set<String> listLengthModifierFunctions = new HashSet<>(); private static Map<String, HashSet<String>> modifierFunctions = new HashMap<>(); private static final String TABLE_TNAME = "table"; private static final String LIST_LANG_LIB = "lang.array"; private static final String MAP_LANG_LIB = "lang.map"; private static final String TABLE_LANG_LIB = "lang.table"; private static final String VALUE_LANG_LIB = "lang.value"; private static final String XML_LANG_LIB = "lang.xml"; private static final String FUNCTION_NAME_PUSH = "push"; private static final String FUNCTION_NAME_POP = "pop"; private static final String FUNCTION_NAME_SHIFT = "shift"; private static final String FUNCTION_NAME_UNSHIFT = "unshift"; private Names names; private SymbolTable symTable; private SymbolEnter symbolEnter; private SymbolResolver symResolver; private NodeCloner nodeCloner; private Types types; private BLangDiagnosticLogHelper dlog; private SymbolEnv env; private boolean isTypeChecked; private TypeNarrower typeNarrower; private TypeParamAnalyzer typeParamAnalyzer; private BLangAnonymousModelHelper anonymousModelHelper; private SemanticAnalyzer semanticAnalyzer; private ResolvedTypeBuilder typeBuilder; private boolean nonErrorLoggingCheck = false; private int letCount = 0; private Stack<SymbolEnv> queryEnvs, prevEnvs; private Stack<BLangSelectClause> selectClauses; private BLangMissingNodesHelper missingNodesHelper; /** * Expected types or inherited types. */ private BType expType; private BType resultType; private DiagnosticCode diagCode; static { listLengthModifierFunctions.add(FUNCTION_NAME_PUSH); listLengthModifierFunctions.add(FUNCTION_NAME_POP); listLengthModifierFunctions.add(FUNCTION_NAME_SHIFT); listLengthModifierFunctions.add(FUNCTION_NAME_UNSHIFT); modifierFunctions.put(LIST_LANG_LIB, new HashSet<String>() {{ add("remove"); add("removeAll"); add("setLength"); add("reverse"); add("sort"); add("pop"); add("push"); add("shift"); add("unshift"); }}); modifierFunctions.put(MAP_LANG_LIB, new HashSet<String>() {{ add("remove"); add("removeIfHasKey"); add("removeAll"); }}); modifierFunctions.put(TABLE_LANG_LIB, new HashSet<String>() {{ add("put"); add("add"); add("remove"); add("removeIfHasKey"); add("removeAll"); }}); modifierFunctions.put(VALUE_LANG_LIB, new HashSet<String>() {{ add("mergeJson"); }}); modifierFunctions.put(XML_LANG_LIB, new HashSet<String>() {{ add("setName"); add("setChildren"); add("strip"); }}); } public static TypeChecker getInstance(CompilerContext context) { TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY); if (typeChecker == null) { typeChecker = new TypeChecker(context); } return typeChecker; } public TypeChecker(CompilerContext context) { context.put(TYPE_CHECKER_KEY, this); this.names = Names.getInstance(context); this.symTable = SymbolTable.getInstance(context); this.symbolEnter = SymbolEnter.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.nodeCloner = NodeCloner.getInstance(context); this.types = Types.getInstance(context); this.dlog = BLangDiagnosticLogHelper.getInstance(context); this.typeNarrower = TypeNarrower.getInstance(context); this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context); this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context); this.semanticAnalyzer = SemanticAnalyzer.getInstance(context); this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context); this.typeBuilder = new ResolvedTypeBuilder(); this.selectClauses = new Stack<>(); this.queryEnvs = new Stack<>(); this.prevEnvs = new Stack<>(); } public BType checkExpr(BLangExpression expr, SymbolEnv env) { return checkExpr(expr, env, symTable.noType); } public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) { return checkExpr(expr, env, expType, DiagnosticCode.INCOMPATIBLE_TYPES); } public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) { if (expr.typeChecked) { return expr.type; } if (expType.tag == TypeTags.INTERSECTION) { expType = ((BIntersectionType) expType).effectiveType; } SymbolEnv prevEnv = this.env; BType preExpType = this.expType; DiagnosticCode preDiagCode = this.diagCode; this.env = env; this.diagCode = diagCode; this.expType = expType; this.isTypeChecked = true; expr.expectedType = expType; expr.accept(this); if (resultType.tag == TypeTags.INTERSECTION) { resultType = ((BIntersectionType) resultType).effectiveType; } expr.type = resultType; expr.typeChecked = isTypeChecked; this.env = prevEnv; this.expType = preExpType; this.diagCode = preDiagCode; validateAndSetExprExpectedType(expr); return resultType; } private void validateAndSetExprExpectedType(BLangExpression expr) { if (resultType.tag == TypeTags.SEMANTIC_ERROR) { return; } if (expr.getKind() == NodeKind.RECORD_LITERAL_EXPR && expr.expectedType != null && expr.expectedType.tag == TypeTags.MAP && expr.type.tag == TypeTags.RECORD) { return; } expr.expectedType = resultType; } public void visit(BLangLiteral literalExpr) { BType literalType = setLiteralValueAndGetType(literalExpr, expType); if (literalType == symTable.semanticError || literalExpr.isFiniteContext) { return; } resultType = types.checkType(literalExpr, literalType, expType); } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { checkXMLNamespacePrefixes(xmlElementAccess.filters); checkExpr(xmlElementAccess.expr, env, symTable.xmlType); resultType = new BXMLType(symTable.xmlElementType, null); } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { if (xmlNavigation.lhsVar) { dlog.error(xmlNavigation.pos, DiagnosticCode.CANNOT_UPDATE_XML_SEQUENCE); } checkXMLNamespacePrefixes(xmlNavigation.filters); if (xmlNavigation.childIndex != null) { checkExpr(xmlNavigation.childIndex, env, symTable.intType); } BType actualType = checkExpr(xmlNavigation.expr, env, symTable.xmlType); types.checkType(xmlNavigation, actualType, expType); if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) { resultType = symTable.xmlType; } else { resultType = new BXMLType(symTable.xmlElementType, null); } } private void checkXMLNamespacePrefixes(List<BLangXMLElementFilter> filters) { for (BLangXMLElementFilter filter : filters) { if (!filter.namespace.isEmpty()) { Name nsName = names.fromString(filter.namespace); BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, nsName); filter.namespaceSymbol = nsSymbol; if (nsSymbol == symTable.notFoundSymbol) { dlog.error(filter.nsPos, DiagnosticCode.CANNOT_FIND_XML_NAMESPACE, nsName); } } } } private BType setLiteralValueAndGetType(BLangLiteral literalExpr, BType expType) { BType literalType = symTable.getTypeFromTag(literalExpr.type.tag); Object literalValue = literalExpr.value; literalExpr.isJSONContext = types.isJSONContext(expType); if (literalType.tag == TypeTags.INT) { if (expType.tag == TypeTags.FLOAT) { literalType = symTable.floatType; literalExpr.value = ((Long) literalValue).doubleValue(); } else if (expType.tag == TypeTags.DECIMAL && !NumericLiteralSupport.hasHexIndicator(literalExpr.originalValue)) { literalType = symTable.decimalType; literalExpr.value = String.valueOf(literalValue); } else if (TypeTags.isIntegerTypeTag(expType.tag) || expType.tag == TypeTags.BYTE) { literalType = getIntLiteralType(literalExpr.pos, expType, literalType, literalValue); if (literalType == symTable.semanticError) { return symTable.semanticError; } } else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.intType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.BYTE)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.byteType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED32_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed32IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED16_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed16IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED8_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed8IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED32_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned32IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED16_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned16IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED8_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned8IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes(); if (memberTypes.stream() .anyMatch(memType -> memType.tag == TypeTags.INT || memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY)) { return setLiteralValueAndGetType(literalExpr, symTable.intType); } BType finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.intType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.BYTE)) { return setLiteralValueAndGetType(literalExpr, symTable.byteType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.byteType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.FLOAT)) { return setLiteralValueAndGetType(literalExpr, symTable.floatType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.floatType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) { return setLiteralValueAndGetType(literalExpr, symTable.decimalType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.decimalType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } } } else if (literalType.tag == TypeTags.FLOAT) { String literal = String.valueOf(literalValue); String numericLiteral = NumericLiteralSupport.stripDiscriminator(literal); boolean isDiscriminatedFloat = NumericLiteralSupport.isFloatDiscriminated(literal); if (expType.tag == TypeTags.DECIMAL) { if (isDiscriminatedFloat || NumericLiteralSupport.isHexLiteral(numericLiteral)) { dlog.error(literalExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, symTable.floatType); resultType = symTable.semanticError; return resultType; } literalType = symTable.decimalType; literalExpr.value = numericLiteral; } else if (expType.tag == TypeTags.FLOAT) { literalExpr.value = Double.parseDouble(String.valueOf(numericLiteral)); } else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (!isDiscriminatedFloat && literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expType; BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType); if (unionMember != symTable.noType) { return unionMember; } } } else if (literalType.tag == TypeTags.DECIMAL) { return decimalLiteral(literalValue, literalExpr, expType); } else if (literalType.tag == TypeTags.STRING && this.expType.tag == TypeTags.CHAR_STRING && types.isCharLiteralValue((String) literalValue)) { return symTable.charStringType; } else { BType expected = getResolvedIntersectionType(this.expType); if (expected.tag == TypeTags.FINITE) { boolean foundMember = types.isAssignableToFiniteType(expected, literalExpr); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } else if (expected.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expected; boolean foundMember = unionType.getMemberTypes() .stream() .anyMatch(memberType -> types.isAssignableToFiniteType(memberType, literalExpr)); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } } if (literalExpr.type.tag == TypeTags.BYTE_ARRAY) { literalType = new BArrayType(symTable.byteType); } return literalType; } private BType getAndSetAssignableUnionMember(BLangLiteral literalExpr, BUnionType expType, BType desiredType) { Set<BType> memberTypes = expType.getMemberTypes(); if (memberTypes.stream() .anyMatch(memType -> memType.tag == desiredType.tag || memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY)) { return setLiteralValueAndGetType(literalExpr, desiredType); } BType finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.floatType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) { return setLiteralValueAndGetType(literalExpr, symTable.decimalType); } finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.decimalType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } return symTable.noType; } private boolean literalAssignableToFiniteType(BLangLiteral literalExpr, BFiniteType finiteType, int targetMemberTypeTag) { for (BLangExpression valueExpr : finiteType.getValueSpace()) { if (valueExpr.type.tag == targetMemberTypeTag && types.checkLiteralAssignabilityBasedOnType((BLangLiteral) valueExpr, literalExpr)) { return true; } } return false; } private BType decimalLiteral(Object literalValue, BLangLiteral literalExpr, BType expType) { String literal = String.valueOf(literalValue); if (expType.tag == TypeTags.FLOAT && NumericLiteralSupport.isDecimalDiscriminated(literal)) { dlog.error(literalExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, symTable.decimalType); resultType = symTable.semanticError; return resultType; } if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expType; BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.decimalType); if (unionMember != symTable.noType) { return unionMember; } } literalExpr.value = NumericLiteralSupport.stripDiscriminator(literal); resultType = symTable.decimalType; return symTable.decimalType; } private void setLiteralValueForFiniteType(BLangLiteral literalExpr, BType type) { types.setImplicitCastExpr(literalExpr, type, this.expType); this.resultType = type; literalExpr.isFiniteContext = true; } private BType getFiniteTypeWithValuesOfSingleType(BUnionType unionType, BType matchType) { List<BFiniteType> finiteTypeMembers = unionType.getMemberTypes().stream() .filter(memType -> memType.tag == TypeTags.FINITE) .map(memFiniteType -> (BFiniteType) memFiniteType) .collect(Collectors.toList()); if (finiteTypeMembers.isEmpty()) { return symTable.semanticError; } int tag = matchType.tag; Set<BLangExpression> matchedValueSpace = new LinkedHashSet<>(); for (BFiniteType finiteType : finiteTypeMembers) { Set<BLangExpression> set = new HashSet<>(); for (BLangExpression expression : finiteType.getValueSpace()) { if (expression.type.tag == tag) { set.add(expression); } } matchedValueSpace.addAll(set); } if (matchedValueSpace.isEmpty()) { return symTable.semanticError; } return new BFiniteType(null, matchedValueSpace); } private BType getIntLiteralType(DiagnosticPos pos, BType expType, BType literalType, Object literalValue) { switch (expType.tag) { case TypeTags.INT: return symTable.intType; case TypeTags.BYTE: if (types.isByteLiteralValue((Long) literalValue)) { return symTable.byteType; } break; case TypeTags.SIGNED32_INT: if (types.isSigned32LiteralValue((Long) literalValue)) { return symTable.signed32IntType; } break; case TypeTags.SIGNED16_INT: if (types.isSigned16LiteralValue((Long) literalValue)) { return symTable.signed16IntType; } break; case TypeTags.SIGNED8_INT: if (types.isSigned8LiteralValue((Long) literalValue)) { return symTable.signed8IntType; } break; case TypeTags.UNSIGNED32_INT: if (types.isUnsigned32LiteralValue((Long) literalValue)) { return symTable.unsigned32IntType; } break; case TypeTags.UNSIGNED16_INT: if (types.isUnsigned16LiteralValue((Long) literalValue)) { return symTable.unsigned16IntType; } break; case TypeTags.UNSIGNED8_INT: if (types.isUnsigned8LiteralValue((Long) literalValue)) { return symTable.unsigned8IntType; } break; default: } dlog.error(pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, literalType); resultType = symTable.semanticError; return resultType; } @Override public void visit(BLangListConstructorExpr listConstructor) { if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.READONLY) { BType inferredType = getInferredTupleType(listConstructor, expType); resultType = inferredType == symTable.semanticError ? symTable.semanticError : types.checkType(listConstructor, inferredType, expType); return; } resultType = checkListConstructorCompatibility(expType, listConstructor); } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { if (expType.tag == TypeTags.NONE) { List<BType> memTypes = checkExprList(new ArrayList<>(tableConstructorExpr.recordLiteralList), env); for (BType memType : memTypes) { if (memType == symTable.semanticError) { resultType = symTable.semanticError; return; } } if (tableConstructorExpr.recordLiteralList.size() == 0) { dlog.error(tableConstructorExpr.pos, DiagnosticCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE); resultType = symTable.semanticError; return; } BType inherentMemberType = inferTableMemberType(memTypes, tableConstructorExpr); BTableType tableType = new BTableType(TypeTags.TABLE, inherentMemberType, null); for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { recordLiteral.type = inherentMemberType; } if (!validateTableConstructorExpr(tableConstructorExpr, tableType)) { resultType = symTable.semanticError; return; } if (checkKeySpecifier(tableConstructorExpr, tableType)) { return; } resultType = tableType; return; } BType applicableExpType = expType.tag == TypeTags.INTERSECTION ? ((BIntersectionType) expType).effectiveType : expType; if (applicableExpType.tag == TypeTags.TABLE) { List<BType> memTypes = new ArrayList<>(); for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { BLangRecordLiteral clonedExpr = recordLiteral; if (this.nonErrorLoggingCheck) { clonedExpr.cloneAttempt++; clonedExpr = nodeCloner.clone(recordLiteral); } BType recordType = checkExpr(clonedExpr, env, ((BTableType) applicableExpType).constraint); if (recordType == symTable.semanticError) { resultType = symTable.semanticError; return; } memTypes.add(recordType); } if (((BTableType) applicableExpType).constraint.tag == TypeTags.MAP) { validateMapConstraintTable(tableConstructorExpr, applicableExpType); return; } if (!(validateTableType((BTableType) applicableExpType, tableConstructorExpr.recordLiteralList) && validateTableConstructorExpr(tableConstructorExpr, (BTableType) applicableExpType))) { resultType = symTable.semanticError; return; } BTableType tableType = new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, applicableExpType), null); if (Symbols.isFlagOn(applicableExpType.flags, Flags.READONLY)) { tableType.flags |= Flags.READONLY; } if (checkKeySpecifier(tableConstructorExpr, tableType)) { return; } BTableType expectedTableType = (BTableType) applicableExpType; if (expectedTableType.fieldNameList != null && tableType.fieldNameList == null) { tableType.fieldNameList = expectedTableType.fieldNameList; } resultType = tableType; } else if (applicableExpType.tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; BLangDiagnosticLog prevDLog = this.dlog.getCurrentLog(); this.dlog.setNonConsoleDLog(); List<BType> matchingTypes = new ArrayList<>(); BUnionType expectedType = (BUnionType) applicableExpType; for (BType memType : expectedType.getMemberTypes()) { BLangTableConstructorExpr clonedTableExpr = tableConstructorExpr; if (this.nonErrorLoggingCheck) { tableConstructorExpr.cloneAttempt++; clonedTableExpr = nodeCloner.clone(tableConstructorExpr); } BType resultType = checkExpr(clonedTableExpr, env, memType); if (resultType != symTable.semanticError && dlog.getErrorCount() == 0 && isUniqueType(matchingTypes, resultType)) { matchingTypes.add(resultType); } dlog.resetErrorCount(); } this.dlog.setCurrentLog(prevDLog); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; if (matchingTypes.isEmpty()) { BLangTableConstructorExpr exprToLog = tableConstructorExpr; if (this.nonErrorLoggingCheck) { tableConstructorExpr.cloneAttempt++; exprToLog = nodeCloner.clone(tableConstructorExpr); } dlog.error(tableConstructorExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, getInferredTableType(exprToLog)); } else if (matchingTypes.size() != 1) { dlog.error(tableConstructorExpr.pos, DiagnosticCode.AMBIGUOUS_TYPES, expType); } else { resultType = checkExpr(tableConstructorExpr, env, matchingTypes.get(0)); return; } resultType = symTable.semanticError; } else { resultType = symTable.semanticError; } } private BType getInferredTableType(BLangTableConstructorExpr exprToLog) { List<BType> memTypes = checkExprList(new ArrayList<>(exprToLog.recordLiteralList), env); for (BType memType : memTypes) { if (memType == symTable.semanticError) { return symTable.semanticError; } } return new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, exprToLog), null); } private boolean checkKeySpecifier(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) { if (tableConstructorExpr.tableKeySpecifier != null) { if (!(validateTableConstructorRecordLiterals(getTableKeyNameList(tableConstructorExpr. tableKeySpecifier), tableConstructorExpr.recordLiteralList))) { resultType = symTable.semanticError; return true; } tableType.fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier); } return false; } private BType inferTableMemberType(List<BType> memTypes, BType expType) { if (memTypes.isEmpty()) { return ((BTableType) expType).constraint; } LinkedHashSet<BType> result = new LinkedHashSet<>(); result.add(memTypes.get(0)); BUnionType unionType = BUnionType.create(null, result); for (int i = 1; i < memTypes.size(); i++) { BType source = memTypes.get(i); if (!types.isAssignable(source, unionType)) { result.add(source); unionType = BUnionType.create(null, result); } } if (unionType.getMemberTypes().size() == 1) { return memTypes.get(0); } return unionType; } private BType inferTableMemberType(List<BType> memTypes, BLangTableConstructorExpr tableConstructorExpr) { BLangTableKeySpecifier keySpecifier = tableConstructorExpr.tableKeySpecifier; List<String> keySpecifierFieldNames = new ArrayList<>(); Set<BField> allFieldSet = new LinkedHashSet<>(); for (BType memType : memTypes) { allFieldSet.addAll(((BRecordType) memType).fields.values()); } Set<BField> commonFieldSet = new LinkedHashSet<>(allFieldSet); for (BType memType : memTypes) { commonFieldSet.retainAll(((BRecordType) memType).fields.values()); } List<String> requiredFieldNames = new ArrayList<>(); if (keySpecifier != null) { for (IdentifierNode identifierNode : keySpecifier.fieldNameIdentifierList) { requiredFieldNames.add(((BLangIdentifier) identifierNode).value); keySpecifierFieldNames.add(((BLangIdentifier) identifierNode).value); } } List<String> fieldNames = new ArrayList<>(); for (BField field : allFieldSet) { String fieldName = field.name.value; if (fieldNames.contains(fieldName)) { dlog.error(tableConstructorExpr.pos, DiagnosticCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE_DUE_AMBIGUITY, fieldName); return symTable.semanticError; } fieldNames.add(fieldName); boolean isOptional = true; for (BField commonField : commonFieldSet) { if (commonField.name.value.equals(fieldName)) { isOptional = false; requiredFieldNames.add(commonField.name.value); } } if (isOptional) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.OPTIONAL)); } else if (requiredFieldNames.contains(fieldName) && keySpecifierFieldNames.contains(fieldName)) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)) + Flags.asMask(EnumSet.of(Flag.READONLY)); } else if (requiredFieldNames.contains(fieldName)) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)); } } return createTableConstraintRecordType(allFieldSet, tableConstructorExpr.pos); } private BRecordType createTableConstraintRecordType(Set<BField> allFieldSet, DiagnosticPos pos) { PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL); for (BField field : allFieldSet) { recordSymbol.scope.define(field.name, field.symbol); } BRecordType recordType = new BRecordType(recordSymbol); recordType.fields = allFieldSet.stream().collect(getFieldCollector()); recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); recordType.sealed = true; recordType.restFieldType = symTable.noType; return recordType; } private Collector<BField, ?, LinkedHashMap<String, BField>> getFieldCollector() { BinaryOperator<BField> mergeFunc = (u, v) -> { throw new IllegalStateException(String.format("Duplicate key %s", u)); }; return Collectors.toMap(field -> field.name.value, Function.identity(), mergeFunc, LinkedHashMap::new); } private boolean validateTableType(BTableType tableType, List<BLangRecordLiteral> recordLiterals) { BType constraint = tableType.constraint; if (!types.isAssignable(constraint, symTable.mapAllType)) { dlog.error(tableType.constraintPos, DiagnosticCode.TABLE_CONSTRAINT_INVALID_SUBTYPE, constraint); resultType = symTable.semanticError; return false; } List<String> fieldNameList = tableType.fieldNameList; if (fieldNameList != null) { return validateKeySpecifier(fieldNameList, constraint.tag != TypeTags.INTERSECTION ? constraint : ((BIntersectionType) constraint).effectiveType, tableType.keyPos) && validateTableConstructorRecordLiterals(fieldNameList, recordLiterals); } return true; } private boolean validateTableConstructorRecordLiterals(List<String> keySpecifierFieldNames, List<BLangRecordLiteral> recordLiterals) { for (String fieldName : keySpecifierFieldNames) { for (BLangRecordLiteral recordLiteral : recordLiterals) { BLangRecordKeyValueField recordKeyValueField = getRecordKeyValueField(recordLiteral, fieldName); if (recordKeyValueField.getValue().getKind() == NodeKind.LITERAL || recordKeyValueField.getValue().getKind() == NodeKind.NUMERIC_LITERAL || recordKeyValueField.getValue().getKind() == NodeKind.RECORD_LITERAL_EXPR || recordKeyValueField.getValue().getKind() == NodeKind.ARRAY_LITERAL_EXPR || recordKeyValueField.getValue().getKind() == NodeKind.TUPLE_LITERAL_EXPR || recordKeyValueField.getValue().getKind() == NodeKind.XML_ELEMENT_LITERAL || recordKeyValueField.getValue().getKind() == NodeKind.XML_TEXT_LITERAL) { continue; } dlog.error(recordLiteral.pos, DiagnosticCode.KEY_SPECIFIER_FIELD_VALUE_MUST_BE_CONSTANT, fieldName); resultType = symTable.semanticError; return false; } } return true; } private BLangRecordKeyValueField getRecordKeyValueField(BLangRecordLiteral recordLiteral, String fieldName) { for (RecordLiteralNode.RecordField recordField : recordLiteral.fields) { BLangRecordKeyValueField recordKeyValueField = (BLangRecordKeyValueField) recordField; if (fieldName.equals(recordKeyValueField.key.toString())) { return recordKeyValueField; } } return null; } private boolean validateKeySpecifier(List<String> fieldNameList, BType constraint, DiagnosticPos pos) { for (String fieldName : fieldNameList) { BField field = types.getTableConstraintField(constraint, fieldName); if (field == null) { dlog.error(pos, DiagnosticCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER, fieldName, constraint); resultType = symTable.semanticError; return false; } if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) { dlog.error(pos, DiagnosticCode.KEY_SPECIFIER_FIELD_MUST_BE_READONLY, fieldName); resultType = symTable.semanticError; return false; } if (!Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { dlog.error(pos, DiagnosticCode.KEY_SPECIFIER_FIELD_MUST_BE_REQUIRED, fieldName); resultType = symTable.semanticError; return false; } if (!types.isAssignable(field.type, symTable.anydataType)) { dlog.error(pos, DiagnosticCode.KEY_SPECIFIER_FIELD_MUST_BE_ANYDATA, fieldName, constraint); resultType = symTable.semanticError; return false; } } return true; } private boolean validateTableConstructorExpr(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) { BType constraintType = tableType.constraint; if (tableConstructorExpr.tableKeySpecifier != null) { List<String> fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier); if (tableType.fieldNameList == null && !validateKeySpecifier(fieldNameList, constraintType.tag != TypeTags.INTERSECTION ? constraintType : ((BIntersectionType) constraintType).effectiveType, tableConstructorExpr.tableKeySpecifier.pos)) { return false; } if (tableType.fieldNameList != null && !tableType.fieldNameList.equals(fieldNameList)) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticCode.TABLE_KEY_SPECIFIER_MISMATCH, tableType.fieldNameList.toString(), fieldNameList.toString()); resultType = symTable.semanticError; return false; } } BType keyTypeConstraint = tableType.keyTypeConstraint; if (keyTypeConstraint != null) { List<BType> memberTypes = new ArrayList<>(); if (keyTypeConstraint.tag == TypeTags.TUPLE) { for (Type type : ((TupleType) keyTypeConstraint).getTupleTypes()) { memberTypes.add((BType) type); } } else { memberTypes.add(keyTypeConstraint); } if (tableConstructorExpr.tableKeySpecifier == null && keyTypeConstraint.tag == TypeTags.NEVER) { return true; } if (tableConstructorExpr.tableKeySpecifier == null || tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size() != memberTypes.size()) { dlog.error(tableConstructorExpr.pos, DiagnosticCode.KEY_SPECIFIER_SIZE_MISMATCH_WITH_KEY_CONSTRAINT, memberTypes.size(), tableConstructorExpr.tableKeySpecifier == null ? 0 : tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size()); resultType = symTable.semanticError; return false; } List<IdentifierNode> fieldNameIdentifierList = tableConstructorExpr.tableKeySpecifier. fieldNameIdentifierList; int index = 0; for (IdentifierNode identifier : fieldNameIdentifierList) { BField field = types.getTableConstraintField(constraintType, ((BLangIdentifier) identifier).value); if (!types.isAssignable(field.type, memberTypes.get(index))) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticCode.KEY_SPECIFIER_MISMATCH_WITH_KEY_CONSTRAINT, fieldNameIdentifierList.toString(), memberTypes.toString()); resultType = symTable.semanticError; return false; } index++; } } return true; } private void validateMapConstraintTable(BLangTableConstructorExpr tableConstructorExpr, BType expType) { if (((BTableType) expType).fieldNameList != null || ((BTableType) expType).keyTypeConstraint != null) { dlog.error(((BTableType) expType).keyPos, DiagnosticCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT); resultType = symTable.semanticError; return; } if (tableConstructorExpr.tableKeySpecifier != null) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT); resultType = symTable.semanticError; return; } if (!(validateTableType((BTableType) expType, tableConstructorExpr.recordLiteralList))) { resultType = symTable.semanticError; return; } resultType = expType; } private List<String> getTableKeyNameList(BLangTableKeySpecifier tableKeySpecifier) { List<String> fieldNamesList = new ArrayList<>(); for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) { fieldNamesList.add(((BLangIdentifier) identifier).value); } return fieldNamesList; } private BType createTableKeyConstraint(List<String> fieldNames, BType constraintType) { if (fieldNames == null) { return symTable.semanticError; } List<BType> memTypes = new ArrayList<>(); for (String fieldName : fieldNames) { BField tableConstraintField = types.getTableConstraintField(constraintType, fieldName); if (tableConstraintField == null) { return symTable.semanticError; } BType fieldType = tableConstraintField.type; memTypes.add(fieldType); } if (memTypes.size() == 1) { return memTypes.get(0); } return new BTupleType(memTypes); } private BType checkListConstructorCompatibility(BType bType, BLangListConstructorExpr listConstructor) { int tag = bType.tag; if (tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; BLangDiagnosticLog prevDLog = this.dlog.getCurrentLog(); this.dlog.setNonConsoleDLog(); List<BType> compatibleTypes = new ArrayList<>(); boolean erroredExpType = false; for (BType memberType : ((BUnionType) bType).getMemberTypes()) { if (memberType == symTable.semanticError) { if (!erroredExpType) { erroredExpType = true; } continue; } BType listCompatibleMemType = getListConstructorCompatibleNonUnionType(memberType); if (listCompatibleMemType == symTable.semanticError) { continue; } BType memCompatibiltyType = checkListConstructorCompatibility(listCompatibleMemType, listConstructor); if (memCompatibiltyType != symTable.semanticError && dlog.getErrorCount() == 0 && isUniqueType(compatibleTypes, memCompatibiltyType)) { compatibleTypes.add(memCompatibiltyType); } dlog.resetErrorCount(); } this.dlog.setCurrentLog(prevDLog); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; if (compatibleTypes.isEmpty()) { BLangListConstructorExpr exprToLog = listConstructor; if (this.nonErrorLoggingCheck) { listConstructor.cloneAttempt++; exprToLog = nodeCloner.clone(listConstructor); } BType inferredTupleType = getInferredTupleType(exprToLog, symTable.noType); if (!erroredExpType && inferredTupleType != symTable.semanticError) { dlog.error(listConstructor.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, inferredTupleType); } return symTable.semanticError; } else if (compatibleTypes.size() != 1) { dlog.error(listConstructor.pos, DiagnosticCode.AMBIGUOUS_TYPES, expType); return symTable.semanticError; } return checkListConstructorCompatibility(compatibleTypes.get(0), listConstructor); } if (tag == TypeTags.INTERSECTION) { return checkListConstructorCompatibility(((BIntersectionType) bType).effectiveType, listConstructor); } BType possibleType = getListConstructorCompatibleNonUnionType(bType); switch (possibleType.tag) { case TypeTags.ARRAY: return checkArrayType(listConstructor, (BArrayType) possibleType); case TypeTags.TUPLE: return checkTupleType(listConstructor, (BTupleType) possibleType); case TypeTags.READONLY: return checkReadOnlyListType(listConstructor); case TypeTags.TYPEDESC: List<BType> results = new ArrayList<>(); listConstructor.isTypedescExpr = true; for (int i = 0; i < listConstructor.exprs.size(); i++) { results.add(checkExpr(listConstructor.exprs.get(i), env, symTable.noType)); } List<BType> actualTypes = new ArrayList<>(); for (int i = 0; i < listConstructor.exprs.size(); i++) { final BLangExpression expr = listConstructor.exprs.get(i); if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) { actualTypes.add(((BLangTypedescExpr) expr).resolvedType); } else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { actualTypes.add(((BLangSimpleVarRef) expr).symbol.type); } else { actualTypes.add(results.get(i)); } } if (actualTypes.size() == 1) { listConstructor.typedescType = actualTypes.get(0); } else { listConstructor.typedescType = new BTupleType(actualTypes); } return new BTypedescType(listConstructor.typedescType, null); } BLangListConstructorExpr exprToLog = listConstructor; if (this.nonErrorLoggingCheck) { listConstructor.cloneAttempt++; exprToLog = nodeCloner.clone(listConstructor); } if (bType == symTable.semanticError) { getInferredTupleType(exprToLog, symTable.semanticError); } else { dlog.error(listConstructor.pos, DiagnosticCode.INCOMPATIBLE_TYPES, bType, getInferredTupleType(exprToLog, symTable.noType)); } return symTable.semanticError; } private BType getListConstructorCompatibleNonUnionType(BType type) { switch (type.tag) { case TypeTags.ARRAY: case TypeTags.TUPLE: case TypeTags.READONLY: case TypeTags.TYPEDESC: return type; case TypeTags.JSON: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayJsonType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayJsonType, env, symTable, anonymousModelHelper, names); case TypeTags.ANYDATA: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayAnydataType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayAnydataType, env, symTable, anonymousModelHelper, names); case TypeTags.ANY: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayType, env, symTable, anonymousModelHelper, names); case TypeTags.INTERSECTION: return ((BIntersectionType) type).effectiveType; } return symTable.semanticError; } private BType checkArrayType(BLangListConstructorExpr listConstructor, BArrayType arrayType) { BType eType = arrayType.eType; if (arrayType.state == BArrayState.OPEN_SEALED) { arrayType.size = listConstructor.exprs.size(); arrayType.state = BArrayState.CLOSED_SEALED; } else if ((arrayType.state != BArrayState.UNSEALED) && (arrayType.size != listConstructor.exprs.size())) { if (arrayType.size < listConstructor.exprs.size()) { dlog.error(listConstructor.pos, DiagnosticCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size, listConstructor.exprs.size()); return symTable.semanticError; } if (!types.hasFillerValue(eType)) { dlog.error(listConstructor.pos, DiagnosticCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE, expType); return symTable.semanticError; } } boolean errored = false; for (BLangExpression expr : listConstructor.exprs) { if (exprIncompatible(eType, expr) && !errored) { errored = true; } } return errored ? symTable.semanticError : arrayType; } private BType checkTupleType(BLangListConstructorExpr listConstructor, BTupleType tupleType) { List<BLangExpression> exprs = listConstructor.exprs; List<BType> memberTypes = tupleType.tupleTypes; BType restType = tupleType.restType; int listExprSize = exprs.size(); int memberTypeSize = memberTypes.size(); if (listExprSize < memberTypeSize) { for (int i = listExprSize; i < memberTypeSize; i++) { if (!types.hasFillerValue(memberTypes.get(i))) { dlog.error(listConstructor.pos, DiagnosticCode.SYNTAX_ERROR, "tuple and expression size does not match"); return symTable.semanticError; } } } else if (listExprSize > memberTypeSize && restType == null) { dlog.error(listConstructor.pos, DiagnosticCode.SYNTAX_ERROR, "tuple and expression size does not match"); return symTable.semanticError; } boolean errored = false; int nonRestCountToCheck = listExprSize < memberTypeSize ? listExprSize : memberTypeSize; for (int i = 0; i < nonRestCountToCheck; i++) { if (exprIncompatible(memberTypes.get(i), exprs.get(i)) && !errored) { errored = true; } } for (int i = nonRestCountToCheck; i < exprs.size(); i++) { if (exprIncompatible(restType, exprs.get(i)) && !errored) { errored = true; } } return errored ? symTable.semanticError : tupleType; } private BType checkReadOnlyListType(BLangListConstructorExpr listConstructor) { if (!this.nonErrorLoggingCheck) { BType inferredType = getInferredTupleType(listConstructor, symTable.readonlyType); if (inferredType == symTable.semanticError) { return symTable.semanticError; } return types.checkType(listConstructor, inferredType, symTable.readonlyType); } for (BLangExpression expr : listConstructor.exprs) { if (exprIncompatible(symTable.readonlyType, expr)) { return symTable.semanticError; } } return symTable.readonlyType; } private boolean exprIncompatible(BType eType, BLangExpression expr) { if (expr.typeChecked) { return expr.type == symTable.semanticError; } BLangExpression exprToCheck = expr; if (this.nonErrorLoggingCheck) { expr.cloneAttempt++; exprToCheck = nodeCloner.clone(expr); } return checkExpr(exprToCheck, this.env, eType) == symTable.semanticError; } private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env) { return checkExprList(exprs, env, symTable.noType); } private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env, BType expType) { List<BType> types = new ArrayList<>(); SymbolEnv prevEnv = this.env; BType preExpType = this.expType; this.env = env; this.expType = expType; for (BLangExpression e : exprs) { checkExpr(e, this.env, expType); types.add(resultType); } this.env = prevEnv; this.expType = preExpType; return types; } private BType getInferredTupleType(BLangListConstructorExpr listConstructor, BType expType) { List<BType> memTypes = checkExprList(listConstructor.exprs, env, expType); for (BType memType : memTypes) { if (memType == symTable.semanticError) { return symTable.semanticError; } } BTupleType tupleType = new BTupleType(memTypes); if (expType.tag != TypeTags.READONLY) { return tupleType; } tupleType.flags |= Flags.READONLY; return tupleType; } public void visit(BLangRecordLiteral recordLiteral) { int expTypeTag = expType.tag; if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.READONLY) { expType = defineInferredRecordType(recordLiteral, expType); } else if (expTypeTag == TypeTags.OBJECT) { dlog.error(recordLiteral.pos, DiagnosticCode.INVALID_RECORD_LITERAL, expType); resultType = symTable.semanticError; return; } resultType = getEffectiveMappingType(recordLiteral, checkMappingConstructorCompatibility(expType, recordLiteral)); } private BType getEffectiveMappingType(BLangRecordLiteral recordLiteral, BType applicableMappingType) { if (applicableMappingType == symTable.semanticError || (applicableMappingType.tag == TypeTags.RECORD && Symbols.isFlagOn(applicableMappingType.flags, Flags.READONLY))) { return applicableMappingType; } Map<String, RecordLiteralNode.RecordField> readOnlyFields = new LinkedHashMap<>(); LinkedHashMap<String, BField> applicableTypeFields = applicableMappingType.tag == TypeTags.RECORD ? ((BRecordType) applicableMappingType).fields : new LinkedHashMap<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { continue; } String name; if (field.isKeyValueField()) { BLangRecordKeyValueField keyValueField = (BLangRecordKeyValueField) field; if (!keyValueField.readonly) { continue; } BLangExpression keyExpr = keyValueField.key.expr; if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { name = ((BLangSimpleVarRef) keyExpr).variableName.value; } else { name = (String) ((BLangLiteral) keyExpr).value; } } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; if (!varNameField.readonly) { continue; } name = varNameField.variableName.value; } if (applicableTypeFields.containsKey(name) && Symbols.isFlagOn(applicableTypeFields.get(name).symbol.flags, Flags.READONLY)) { continue; } readOnlyFields.put(name, field); } if (readOnlyFields.isEmpty()) { return applicableMappingType; } PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL); LinkedHashMap<String, BField> newFields = new LinkedHashMap<>(); for (Map.Entry<String, RecordLiteralNode.RecordField> readOnlyEntry : readOnlyFields.entrySet()) { RecordLiteralNode.RecordField field = readOnlyEntry.getValue(); String key = readOnlyEntry.getKey(); Name fieldName = names.fromString(key); BType readOnlyFieldType; if (field.isKeyValueField()) { readOnlyFieldType = ((BLangRecordKeyValueField) field).valueExpr.type; } else { readOnlyFieldType = ((BLangRecordVarNameField) field).type; } BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{ add(Flag.REQUIRED); add(Flag.READONLY); }}), fieldName, pkgID, readOnlyFieldType, recordSymbol, ((BLangNode) field).pos, VIRTUAL); newFields.put(key, new BField(fieldName, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordType = new BRecordType(recordSymbol); if (applicableMappingType.tag == TypeTags.MAP) { recordType.sealed = false; recordType.restFieldType = ((BMapType) applicableMappingType).constraint; } else { BRecordType applicableRecordType = (BRecordType) applicableMappingType; boolean allReadOnlyFields = true; for (Map.Entry<String, BField> origEntry : applicableRecordType.fields.entrySet()) { String fieldName = origEntry.getKey(); BField field = origEntry.getValue(); if (readOnlyFields.containsKey(fieldName)) { continue; } BVarSymbol origFieldSymbol = field.symbol; int origFieldFlags = origFieldSymbol.flags; if (allReadOnlyFields && !Symbols.isFlagOn(origFieldFlags, Flags.READONLY)) { allReadOnlyFields = false; } BVarSymbol fieldSymbol = new BVarSymbol(origFieldFlags, field.name, pkgID, origFieldSymbol.type, recordSymbol, field.pos, VIRTUAL); newFields.put(fieldName, new BField(field.name, null, fieldSymbol)); recordSymbol.scope.define(field.name, fieldSymbol); } recordType.sealed = applicableRecordType.sealed; recordType.restFieldType = applicableRecordType.restFieldType; if (recordType.sealed && allReadOnlyFields) { recordType.flags |= Flags.READONLY; recordType.tsymbol.flags |= Flags.READONLY; } } recordType.fields = newFields; recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, recordLiteral.pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); if (applicableMappingType.tag == TypeTags.MAP) { recordLiteral.expectedType = applicableMappingType; } return recordType; } private BType checkMappingConstructorCompatibility(BType bType, BLangRecordLiteral mappingConstructor) { int tag = bType.tag; if (tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; BLangDiagnosticLog prevDLog = this.dlog.getCurrentLog(); this.dlog.setNonConsoleDLog(); List<BType> compatibleTypes = new ArrayList<>(); boolean erroredExpType = false; for (BType memberType : ((BUnionType) bType).getMemberTypes()) { if (memberType == symTable.semanticError) { if (!erroredExpType) { erroredExpType = true; } continue; } BType listCompatibleMemType = getMappingConstructorCompatibleNonUnionType(memberType); if (listCompatibleMemType == symTable.semanticError) { continue; } BType memCompatibiltyType = checkMappingConstructorCompatibility(listCompatibleMemType, mappingConstructor); if (memCompatibiltyType != symTable.semanticError && dlog.getErrorCount() == 0 && isUniqueType(compatibleTypes, memCompatibiltyType)) { compatibleTypes.add(memCompatibiltyType); } dlog.resetErrorCount(); } this.dlog.setCurrentLog(prevDLog); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; if (compatibleTypes.isEmpty()) { if (!erroredExpType) { reportIncompatibleMappingConstructorError(mappingConstructor, bType); } validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } else if (compatibleTypes.size() != 1) { dlog.error(mappingConstructor.pos, DiagnosticCode.AMBIGUOUS_TYPES, bType); validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } return checkMappingConstructorCompatibility(compatibleTypes.get(0), mappingConstructor); } if (tag == TypeTags.INTERSECTION) { return checkMappingConstructorCompatibility(((BIntersectionType) bType).effectiveType, mappingConstructor); } BType possibleType = getMappingConstructorCompatibleNonUnionType(bType); switch (possibleType.tag) { case TypeTags.MAP: return validateSpecifiedFields(mappingConstructor, possibleType) ? possibleType : symTable.semanticError; case TypeTags.RECORD: boolean isSpecifiedFieldsValid = validateSpecifiedFields(mappingConstructor, possibleType); boolean hasAllRequiredFields = validateRequiredFields((BRecordType) possibleType, mappingConstructor.fields, mappingConstructor.pos); return isSpecifiedFieldsValid && hasAllRequiredFields ? possibleType : symTable.semanticError; case TypeTags.READONLY: return checkReadOnlyMappingType(mappingConstructor); } reportIncompatibleMappingConstructorError(mappingConstructor, bType); validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } private BType checkReadOnlyMappingType(BLangRecordLiteral mappingConstructor) { if (!this.nonErrorLoggingCheck) { BType inferredType = defineInferredRecordType(mappingConstructor, symTable.readonlyType); if (inferredType == symTable.semanticError) { return symTable.semanticError; } return checkMappingConstructorCompatibility(inferredType, mappingConstructor); } for (RecordLiteralNode.RecordField field : mappingConstructor.fields) { BLangExpression exprToCheck; if (field.isKeyValueField()) { exprToCheck = ((BLangRecordKeyValueField) field).valueExpr; } else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { exprToCheck = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; } else { exprToCheck = (BLangRecordVarNameField) field; } if (exprIncompatible(symTable.readonlyType, exprToCheck)) { return symTable.semanticError; } } return symTable.readonlyType; } private BType getMappingConstructorCompatibleNonUnionType(BType type) { switch (type.tag) { case TypeTags.MAP: case TypeTags.RECORD: case TypeTags.READONLY: return type; case TypeTags.JSON: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapJsonType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapJsonType, env, symTable, anonymousModelHelper, names); case TypeTags.ANYDATA: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapAnydataType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapAnydataType, env, symTable, anonymousModelHelper, names); case TypeTags.ANY: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapType, env, symTable, anonymousModelHelper, names); case TypeTags.INTERSECTION: return ((BIntersectionType) type).effectiveType; } return symTable.semanticError; } private boolean isMappingConstructorCompatibleType(BType type) { return type.tag == TypeTags.RECORD || type.tag == TypeTags.MAP; } private void reportIncompatibleMappingConstructorError(BLangRecordLiteral mappingConstructorExpr, BType expType) { if (expType == symTable.semanticError) { return; } if (expType.tag != TypeTags.UNION) { dlog.error(mappingConstructorExpr.pos, DiagnosticCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType); return; } BUnionType unionType = (BUnionType) expType; BType[] memberTypes = unionType.getMemberTypes().toArray(new BType[0]); if (memberTypes.length == 2) { BRecordType recType = null; if (memberTypes[0].tag == TypeTags.RECORD && memberTypes[1].tag == TypeTags.NIL) { recType = (BRecordType) memberTypes[0]; } else if (memberTypes[1].tag == TypeTags.RECORD && memberTypes[0].tag == TypeTags.NIL) { recType = (BRecordType) memberTypes[1]; } if (recType != null) { validateSpecifiedFields(mappingConstructorExpr, recType); validateRequiredFields(recType, mappingConstructorExpr.fields, mappingConstructorExpr.pos); return; } } for (BType bType : memberTypes) { if (isMappingConstructorCompatibleType(bType)) { dlog.error(mappingConstructorExpr.pos, DiagnosticCode.INCOMPATIBLE_MAPPING_CONSTRUCTOR, unionType); return; } } dlog.error(mappingConstructorExpr.pos, DiagnosticCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, unionType); } private boolean validateSpecifiedFields(BLangRecordLiteral mappingConstructor, BType possibleType) { boolean isFieldsValid = true; for (RecordLiteralNode.RecordField field : mappingConstructor.fields) { BType checkedType = checkMappingField(field, possibleType); if (isFieldsValid && checkedType == symTable.semanticError) { isFieldsValid = false; } } return isFieldsValid; } private boolean validateRequiredFields(BRecordType type, List<RecordLiteralNode.RecordField> specifiedFields, DiagnosticPos pos) { HashSet<String> specFieldNames = getFieldNames(specifiedFields); boolean hasAllRequiredFields = true; for (BField field : type.fields.values()) { String fieldName = field.name.value; if (!specFieldNames.contains(fieldName) && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { dlog.error(pos, DiagnosticCode.MISSING_REQUIRED_RECORD_FIELD, field.name); if (hasAllRequiredFields) { hasAllRequiredFields = false; } } } return hasAllRequiredFields; } private HashSet<String> getFieldNames(List<RecordLiteralNode.RecordField> specifiedFields) { HashSet<String> fieldNames = new HashSet<>(); for (RecordLiteralNode.RecordField specifiedField : specifiedFields) { if (specifiedField.isKeyValueField()) { String name = getKeyValueFieldName((BLangRecordKeyValueField) specifiedField); if (name == null) { continue; } fieldNames.add(name); } else if (specifiedField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { fieldNames.add(getVarNameFieldName((BLangRecordVarNameField) specifiedField)); } else { fieldNames.addAll(getSpreadOpFieldRequiredFieldNames( (BLangRecordLiteral.BLangRecordSpreadOperatorField) specifiedField)); } } return fieldNames; } private String getKeyValueFieldName(BLangRecordKeyValueField field) { BLangRecordKey key = field.key; if (key.computedKey) { return null; } BLangExpression keyExpr = key.expr; if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { return ((BLangSimpleVarRef) keyExpr).variableName.value; } else if (keyExpr.getKind() == NodeKind.LITERAL) { return (String) ((BLangLiteral) keyExpr).value; } return null; } private String getVarNameFieldName(BLangRecordVarNameField field) { return field.variableName.value; } private List<String> getSpreadOpFieldRequiredFieldNames(BLangRecordLiteral.BLangRecordSpreadOperatorField field) { BType spreadType = checkExpr(field.expr, env); if (spreadType.tag != TypeTags.RECORD) { return Collections.emptyList(); } List<String> fieldNames = new ArrayList<>(); for (BField bField : ((BRecordType) spreadType).getFields().values()) { if (!Symbols.isOptional(bField.symbol)) { fieldNames.add(bField.name.value); } } return fieldNames; } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { if (workerFlushExpr.workerIdentifier != null) { String workerName = workerFlushExpr.workerIdentifier.getValue(); if (!this.workerExists(this.env, workerName)) { this.dlog.error(workerFlushExpr.pos, DiagnosticCode.UNDEFINED_WORKER, workerName); } } BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(workerFlushExpr, actualType, expType); } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(syncSendExpr.workerIdentifier)); if (symTable.notFoundSymbol.equals(symbol)) { syncSendExpr.workerType = symTable.semanticError; } else { syncSendExpr.workerType = symbol.type; } syncSendExpr.env = this.env; checkExpr(syncSendExpr.expr, this.env); if (!syncSendExpr.expr.type.isAnydata()) { this.dlog.error(syncSendExpr.pos, DiagnosticCode.INVALID_TYPE_FOR_SEND, syncSendExpr.expr.type); } String workerName = syncSendExpr.workerIdentifier.getValue(); if (!this.workerExists(this.env, workerName)) { this.dlog.error(syncSendExpr.pos, DiagnosticCode.UNDEFINED_WORKER, workerName); } syncSendExpr.expectedType = expType; resultType = expType == symTable.noType ? symTable.nilType : expType; } @Override public void visit(BLangWorkerReceive workerReceiveExpr) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerReceiveExpr.workerIdentifier)); if (workerReceiveExpr.isChannel) { this.dlog.error(workerReceiveExpr.pos, DiagnosticCode.UNDEFINED_ACTION); return; } workerReceiveExpr.env = this.env; if (symTable.notFoundSymbol.equals(symbol)) { workerReceiveExpr.workerType = symTable.semanticError; } else { workerReceiveExpr.workerType = symbol.type; } if (symTable.noType == this.expType) { this.dlog.error(workerReceiveExpr.pos, DiagnosticCode.INVALID_USAGE_OF_RECEIVE_EXPRESSION); } workerReceiveExpr.type = this.expType; resultType = this.expType; } private boolean workerExists(SymbolEnv env, String workerName) { if (workerName.equals(DEFAULT_WORKER_NAME)) { return true; } BSymbol symbol = this.symResolver.lookupSymbolInMainSpace(env, new Name(workerName)); return symbol != this.symTable.notFoundSymbol && symbol.type.tag == TypeTags.FUTURE && ((BFutureType) symbol.type).workerDerivative; } @Override public void visit(BLangConstRef constRef) { constRef.symbol = symResolver.lookupMainSpaceSymbolInPackage(constRef.pos, env, names.fromIdNode(constRef.pkgAlias), names.fromIdNode(constRef.variableName)); types.setImplicitCastExpr(constRef, constRef.type, expType); resultType = constRef.type; } public void visit(BLangSimpleVarRef varRefExpr) { BType actualType = symTable.semanticError; Name varName = names.fromIdNode(varRefExpr.variableName); if (varName == Names.IGNORE) { if (varRefExpr.lhsVar) { varRefExpr.type = this.symTable.anyType; } else { varRefExpr.type = this.symTable.semanticError; dlog.error(varRefExpr.pos, DiagnosticCode.UNDERSCORE_NOT_ALLOWED); } varRefExpr.symbol = new BVarSymbol(0, varName, env.enclPkg.symbol.pkgID, varRefExpr.type, env.scope.owner, varRefExpr.pos, VIRTUAL); resultType = varRefExpr.type; return; } Name compUnitName = getCurrentCompUnit(varRefExpr); varRefExpr.pkgSymbol = symResolver.resolvePrefixSymbol(env, names.fromIdNode(varRefExpr.pkgAlias), compUnitName); if (varRefExpr.pkgSymbol == symTable.notFoundSymbol) { dlog.error(varRefExpr.pos, DiagnosticCode.UNDEFINED_MODULE, varRefExpr.pkgAlias); } if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) { actualType = symTable.stringType; } else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) { BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(varRefExpr.pos, env, names.fromIdNode(varRefExpr.pkgAlias), varName); if (symbol == symTable.notFoundSymbol && env.enclType != null) { Name objFuncName = names.fromString(Symbols .getAttachedFuncSymbolName(env.enclType.type.tsymbol.name.value, varName.value)); symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName, env.enclType.type.tsymbol); } if (((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE)) { BVarSymbol varSym = (BVarSymbol) symbol; checkSelfReferences(varRefExpr.pos, env, varSym); varRefExpr.symbol = varSym; actualType = varSym.type; markAndRegisterClosureVariable(symbol, varRefExpr.pos); } else if ((symbol.tag & SymTag.TYPE_DEF) == SymTag.TYPE_DEF) { actualType = symbol.type.tag == TypeTags.TYPEDESC ? symbol.type : new BTypedescType(symbol.type, null); varRefExpr.symbol = symbol; } else if ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) { BConstantSymbol constSymbol = (BConstantSymbol) symbol; varRefExpr.symbol = constSymbol; BType symbolType = symbol.type; if (symbolType != symTable.noType && expType.tag == TypeTags.FINITE || (expType.tag == TypeTags.UNION && ((BUnionType) expType).getMemberTypes().stream() .anyMatch(memType -> memType.tag == TypeTags.FINITE && types.isAssignable(symbolType, memType)))) { actualType = symbolType; } else { actualType = constSymbol.literalType; } if (varRefExpr.lhsVar || varRefExpr.compoundAssignmentLhsVar) { actualType = symTable.semanticError; dlog.error(varRefExpr.pos, DiagnosticCode.CANNOT_UPDATE_CONSTANT_VALUE); } } else { logUndefinedSymbolError(varRefExpr.pos, varName.value); } } if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) { dlog.error(varRefExpr.pos, DiagnosticCode.SEALED_ARRAY_TYPE_CAN_NOT_INFER_SIZE); return; } resultType = types.checkType(varRefExpr, actualType, expType); } @Override public void visit(BLangRecordVarRef varRefExpr) { LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); String recordName = this.anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.symbol.pkgID); BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0, names.fromString(recordName), env.enclPkg.symbol.pkgID, null, env.scope.owner, varRefExpr.pos, SOURCE); symbolEnter.defineSymbol(varRefExpr.pos, recordSymbol, env); boolean unresolvedReference = false; for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) { ((BLangVariableReference) recordRefField.variableReference).lhsVar = true; checkExpr(recordRefField.variableReference, env); if (((BLangVariableReference) recordRefField.variableReference).symbol == null || !isValidVariableReference(recordRefField.variableReference)) { unresolvedReference = true; continue; } BVarSymbol bVarSymbol = (BVarSymbol) ((BLangVariableReference) recordRefField.variableReference).symbol; BField field = new BField(names.fromIdNode(recordRefField.variableName), varRefExpr.pos, new BVarSymbol(0, names.fromIdNode(recordRefField.variableName), env.enclPkg.symbol.pkgID, bVarSymbol.type, recordSymbol, varRefExpr.pos, SOURCE)); fields.put(field.name.value, field); } BLangExpression restParam = (BLangExpression) varRefExpr.restParam; if (restParam != null) { checkExpr(restParam, env); unresolvedReference = !isValidVariableReference(restParam); } if (unresolvedReference) { resultType = symTable.semanticError; return; } BRecordType bRecordType = new BRecordType(recordSymbol); bRecordType.fields = fields; recordSymbol.type = bRecordType; varRefExpr.symbol = new BVarSymbol(0, recordSymbol.name, env.enclPkg.symbol.pkgID, bRecordType, env.scope.owner, varRefExpr.pos, SOURCE); if (restParam == null) { bRecordType.sealed = true; bRecordType.restFieldType = symTable.noType; } else if (restParam.type == symTable.semanticError) { bRecordType.restFieldType = symTable.mapType; } else { BMapType restParamType = (BMapType) restParam.type; bRecordType.restFieldType = restParamType.constraint; } resultType = bRecordType; } @Override public void visit(BLangErrorVarRef varRefExpr) { if (varRefExpr.typeNode != null) { BType bType = symResolver.resolveTypeNode(varRefExpr.typeNode, env); varRefExpr.type = bType; checkIndirectErrorVarRef(varRefExpr); resultType = bType; return; } if (varRefExpr.message != null) { varRefExpr.message.lhsVar = true; checkExpr(varRefExpr.message, env); if (!types.isAssignable(symTable.stringType, varRefExpr.message.type)) { dlog.error(varRefExpr.message.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.stringType, varRefExpr.message.type); } } if (varRefExpr.cause != null) { varRefExpr.cause.lhsVar = true; checkExpr(varRefExpr.cause, env); if (!types.isAssignable(symTable.errorOrNilType, varRefExpr.cause.type)) { dlog.error(varRefExpr.cause.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.errorOrNilType, varRefExpr.cause.type); } } boolean unresolvedReference = false; for (BLangNamedArgsExpression detailItem : varRefExpr.detail) { BLangVariableReference refItem = (BLangVariableReference) detailItem.expr; refItem.lhsVar = true; checkExpr(refItem, env); if (!isValidVariableReference(refItem)) { unresolvedReference = true; continue; } if (refItem.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR || refItem.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { dlog.error(refItem.pos, DiagnosticCode.INVALID_VARIABLE_REFERENCE_IN_BINDING_PATTERN, refItem); unresolvedReference = true; continue; } if (refItem.symbol == null) { unresolvedReference = true; } } if (varRefExpr.restVar != null) { varRefExpr.restVar.lhsVar = true; if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { checkExpr(varRefExpr.restVar, env); unresolvedReference = unresolvedReference || varRefExpr.restVar.symbol == null || !isValidVariableReference(varRefExpr.restVar); } else if (varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR || varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { unresolvedReference = checkErrorRestParamVarRef(varRefExpr, unresolvedReference); } } if (unresolvedReference) { resultType = symTable.semanticError; return; } BType errorRefRestFieldType; if (varRefExpr.restVar == null) { errorRefRestFieldType = symTable.anydataOrReadonly; } else if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) varRefExpr.restVar).variableName.value.equals(Names.IGNORE.value)) { errorRefRestFieldType = symTable.anydataOrReadonly; } else if (varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR || varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { errorRefRestFieldType = varRefExpr.restVar.type; } else if (varRefExpr.restVar.type.tag == TypeTags.MAP) { errorRefRestFieldType = ((BMapType) varRefExpr.restVar.type).constraint; } else { dlog.error(varRefExpr.restVar.pos, DiagnosticCode.INCOMPATIBLE_TYPES, varRefExpr.restVar.type, symTable.detailType); resultType = symTable.semanticError; return; } BType errorDetailType = errorRefRestFieldType == symTable.anydataOrReadonly ? symTable.errorType.detailType : new BMapType(TypeTags.MAP, errorRefRestFieldType, null, Flags.PUBLIC); resultType = new BErrorType(symTable.errorType.tsymbol, errorDetailType); } private void checkIndirectErrorVarRef(BLangErrorVarRef varRefExpr) { for (BLangNamedArgsExpression detailItem : varRefExpr.detail) { checkExpr(detailItem.expr, env); checkExpr(detailItem, env, detailItem.expr.type); } if (varRefExpr.restVar != null) { checkExpr(varRefExpr.restVar, env); } if (varRefExpr.message != null) { varRefExpr.message.lhsVar = true; checkExpr(varRefExpr.message, env); } if (varRefExpr.cause != null) { varRefExpr.cause.lhsVar = true; checkExpr(varRefExpr.cause, env); } } private boolean checkErrorRestParamVarRef(BLangErrorVarRef varRefExpr, boolean unresolvedReference) { BLangAccessExpression accessExpression = (BLangAccessExpression) varRefExpr.restVar; Name exprName = names.fromIdNode(((BLangSimpleVarRef) accessExpression.expr).variableName); BSymbol fSym = symResolver.lookupSymbolInMainSpace(env, exprName); if (fSym != null) { if (fSym.type.getKind() == TypeKind.MAP) { BType constraint = ((BMapType) fSym.type).constraint; if (types.isAssignable(constraint, symTable.anydataOrReadonly)) { varRefExpr.restVar.type = constraint; } else { varRefExpr.restVar.type = symTable.anydataOrReadonly; } } else { throw new UnsupportedOperationException("rec field base access"); } } else { unresolvedReference = true; } return unresolvedReference; } @Override public void visit(BLangTupleVarRef varRefExpr) { List<BType> results = new ArrayList<>(); for (int i = 0; i < varRefExpr.expressions.size(); i++) { ((BLangVariableReference) varRefExpr.expressions.get(i)).lhsVar = true; results.add(checkExpr(varRefExpr.expressions.get(i), env, symTable.noType)); } BTupleType actualType = new BTupleType(results); if (varRefExpr.restParam != null) { BLangExpression restExpr = (BLangExpression) varRefExpr.restParam; ((BLangVariableReference) restExpr).lhsVar = true; BType checkedType = checkExpr(restExpr, env, symTable.noType); if (checkedType.tag != TypeTags.ARRAY) { dlog.error(varRefExpr.pos, DiagnosticCode.INVALID_TYPE_FOR_REST_DESCRIPTOR, checkedType); resultType = symTable.semanticError; return; } actualType.restType = ((BArrayType) checkedType).eType; } resultType = types.checkType(varRefExpr, actualType, expType); } /** * This method will recursively check if a multidimensional array has at least one open sealed dimension. * * @param arrayType array to check if open sealed * @return true if at least one dimension is open sealed */ public boolean isArrayOpenSealedType(BArrayType arrayType) { if (arrayType.state == BArrayState.OPEN_SEALED) { return true; } if (arrayType.eType.tag == TypeTags.ARRAY) { return isArrayOpenSealedType((BArrayType) arrayType.eType); } return false; } /** * This method will recursively traverse and find the symbol environment of a lambda node (which is given as the * enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the * enclosing invokable node's environment, which are outside of the scope of a lambda function. */ private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) { if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) { return env.enclEnv; } if (env.enclEnv.node != null && ((env.enclEnv.node.getKind() == NodeKind.TRANSACTION) || (env.enclEnv.node.getKind() == NodeKind.RETRY) || (env.enclEnv.node.getKind() == NodeKind.ON_FAIL))) { return env.enclEnv; } if (env.enclInvokable != null && env.enclInvokable == encInvokable) { return findEnclosingInvokableEnv(env.enclEnv, encInvokable); } return env; } private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangRecordTypeNode recordTypeNode) { if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) { return env.enclEnv; } if (env.enclEnv.node != null && ((env.enclEnv.node.getKind() == NodeKind.TRANSACTION) || (env.enclEnv.node.getKind() == NodeKind.RETRY) || (env.enclEnv.node.getKind() == NodeKind.ON_FAIL))) { return env.enclEnv; } if (env.enclType != null && env.enclType == recordTypeNode) { return findEnclosingInvokableEnv(env.enclEnv, recordTypeNode); } return env; } private boolean isFunctionArgument(BSymbol symbol, List<BLangSimpleVariable> params) { return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) && param.type.tag == symbol.type.tag)); } public void visit(BLangFieldBasedAccess fieldAccessExpr) { ((BLangVariableReference) fieldAccessExpr.expr).lhsVar = fieldAccessExpr.lhsVar; ((BLangVariableReference) fieldAccessExpr.expr).compoundAssignmentLhsVar = fieldAccessExpr.compoundAssignmentLhsVar; BType varRefType = getTypeOfExprInFieldAccess(fieldAccessExpr.expr); if (fieldAccessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess && !isXmlAccess(fieldAccessExpr)) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.INVALID_FIELD_ACCESS_EXPRESSION); resultType = symTable.semanticError; return; } BType actualType; if (fieldAccessExpr.fieldKind == FieldKind.ALL && varRefType.tag != TypeTags.XML) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_GET_ALL_FIELDS, varRefType); actualType = symTable.semanticError; } else { if (fieldAccessExpr.optionalFieldAccess) { if (fieldAccessExpr.lhsVar || fieldAccessExpr.compoundAssignmentLhsVar) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPTIONAL_FIELD_ACCESS_NOT_REQUIRED_ON_LHS); resultType = symTable.semanticError; return; } actualType = checkOptionalFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field)); } else { actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field)); if (actualType != symTable.semanticError && (fieldAccessExpr.lhsVar || fieldAccessExpr.compoundAssignmentLhsVar)) { if (isAllReadonlyTypes(varRefType)) { if (varRefType.tag != TypeTags.OBJECT || !isInitializationInInit(varRefType)) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType); resultType = symTable.semanticError; return; } } else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD) && isInvalidReadonlyFieldUpdate(varRefType, fieldAccessExpr.field.value)) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_READONLY_RECORD_FIELD, fieldAccessExpr.field.value, varRefType); resultType = symTable.semanticError; return; } } } } resultType = types.checkType(fieldAccessExpr, actualType, this.expType); } private boolean isAllReadonlyTypes(BType type) { if (type.tag != TypeTags.UNION) { return Symbols.isFlagOn(type.flags, Flags.READONLY); } for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isAllReadonlyTypes(memberType)) { return false; } } return true; } private boolean isInitializationInInit(BType type) { BObjectType objectType = (BObjectType) type; BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) objectType.tsymbol; BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc; return env.enclInvokable != null && initializerFunc != null && env.enclInvokable.symbol == initializerFunc.symbol; } private boolean isInvalidReadonlyFieldUpdate(BType type, String fieldName) { if (type.tag == TypeTags.RECORD) { if (Symbols.isFlagOn(type.flags, Flags.READONLY)) { return true; } BRecordType recordType = (BRecordType) type; for (BField field : recordType.fields.values()) { if (!field.name.value.equals(fieldName)) { continue; } return Symbols.isFlagOn(field.symbol.flags, Flags.READONLY); } return recordType.sealed; } boolean allInvalidUpdates = true; for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isInvalidReadonlyFieldUpdate(memberType, fieldName)) { allInvalidUpdates = false; } } return allInvalidUpdates; } private boolean isXmlAccess(BLangFieldBasedAccess fieldAccessExpr) { BLangExpression expr = fieldAccessExpr.expr; BType exprType = expr.type; if (exprType.tag == TypeTags.XML || exprType.tag == TypeTags.XML_ELEMENT) { return true; } if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType((BLangFieldBasedAccess) expr) && exprType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) exprType).getMemberTypes(); return memberTypes.contains(symTable.xmlType) || memberTypes.contains(symTable.xmlElementType); } return false; } public void visit(BLangIndexBasedAccess indexBasedAccessExpr) { ((BLangVariableReference) indexBasedAccessExpr.expr).lhsVar = indexBasedAccessExpr.lhsVar; ((BLangVariableReference) indexBasedAccessExpr.expr).compoundAssignmentLhsVar = indexBasedAccessExpr.compoundAssignmentLhsVar; checkExpr(indexBasedAccessExpr.expr, this.env, symTable.noType); if (indexBasedAccessExpr.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY && indexBasedAccessExpr.expr.type.tag != TypeTags.TABLE) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.MULTI_KEY_MEMBER_ACCESS_NOT_SUPPORTED, indexBasedAccessExpr.expr.type); resultType = symTable.semanticError; return; } BType actualType = checkIndexAccessExpr(indexBasedAccessExpr); BType exprType = indexBasedAccessExpr.expr.type; BLangExpression indexExpr = indexBasedAccessExpr.indexExpr; if (actualType != symTable.semanticError && (indexBasedAccessExpr.lhsVar || indexBasedAccessExpr.compoundAssignmentLhsVar)) { if (isAllReadonlyTypes(exprType)) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, exprType); resultType = symTable.semanticError; return; } else if (types.isSubTypeOfBaseType(exprType, TypeTags.RECORD) && (indexExpr.getKind() == NodeKind.LITERAL || isConst(indexExpr)) && isInvalidReadonlyFieldUpdate(exprType, getConstFieldName(indexExpr))) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_READONLY_RECORD_FIELD, getConstFieldName(indexExpr), exprType); resultType = symTable.semanticError; return; } } if (indexBasedAccessExpr.lhsVar) { indexBasedAccessExpr.originalType = actualType; indexBasedAccessExpr.type = actualType; resultType = actualType; return; } this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType); } public void visit(BLangInvocation iExpr) { if (iExpr.expr == null) { checkFunctionInvocationExpr(iExpr); return; } if (invalidModuleAliasUsage(iExpr)) { return; } checkExpr(iExpr.expr, this.env, symTable.noType); BType varRefType = iExpr.expr.type; switch (varRefType.tag) { case TypeTags.OBJECT: checkObjectFunctionInvocationExpr(iExpr, (BObjectType) varRefType); break; case TypeTags.RECORD: checkFieldFunctionPointer(iExpr, this.env); break; case TypeTags.NONE: dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, iExpr.name); break; case TypeTags.SEMANTIC_ERROR: break; default: checkInLangLib(iExpr, varRefType); } } public void visit(BLangInvocation.BLangActionInvocation aInv) { if (aInv.expr == null) { checkFunctionInvocationExpr(aInv); return; } if (invalidModuleAliasUsage(aInv)) { return; } checkExpr(aInv.expr, this.env, symTable.noType); BLangExpression varRef = aInv.expr; switch (varRef.type.tag) { case TypeTags.OBJECT: checkActionInvocation(aInv, (BObjectType) varRef.type); break; case TypeTags.RECORD: checkFieldFunctionPointer(aInv, this.env); break; case TypeTags.NONE: dlog.error(aInv.pos, DiagnosticCode.UNDEFINED_FUNCTION, aInv.name); resultType = symTable.semanticError; break; case TypeTags.SEMANTIC_ERROR: default: dlog.error(aInv.pos, DiagnosticCode.INVALID_ACTION_INVOCATION, varRef.type); resultType = symTable.semanticError; break; } } private boolean invalidModuleAliasUsage(BLangInvocation invocation) { Name pkgAlias = names.fromIdNode(invocation.pkgAlias); if (pkgAlias != Names.EMPTY) { dlog.error(invocation.pos, DiagnosticCode.PKG_ALIAS_NOT_ALLOWED_HERE); return true; } return false; } public void visit(BLangLetExpression letExpression) { BLetSymbol letSymbol = new BLetSymbol(SymTag.LET, Flags.asMask(new HashSet<>(Lists.of())), new Name(String.format("$let_symbol_%d$", letCount++)), env.enclPkg.symbol.pkgID, letExpression.type, env.scope.owner, letExpression.pos); letExpression.env = SymbolEnv.createExprEnv(letExpression, env, letSymbol); for (BLangLetVariable letVariable : letExpression.letVarDeclarations) { semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letExpression.env); } BType exprType = checkExpr(letExpression.expr, letExpression.env); types.checkType(letExpression, exprType, this.expType); } private void checkInLangLib(BLangInvocation iExpr, BType varRefType) { BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType); if (langLibMethodSymbol == symTable.notFoundSymbol) { dlog.error(iExpr.name.pos, DiagnosticCode.UNDEFINED_FUNCTION_IN_TYPE, iExpr.name.value, iExpr.expr.type); resultType = symTable.semanticError; return; } if (checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol)) { return; } checkIllegalStorageSizeChangeMethodCall(iExpr, varRefType); } private boolean checkInvalidImmutableValueUpdate(BLangInvocation iExpr, BType varRefType, BSymbol langLibMethodSymbol) { if (!Symbols.isFlagOn(varRefType.flags, Flags.READONLY)) { return false; } String packageId = langLibMethodSymbol.pkgID.name.value; if (!modifierFunctions.containsKey(packageId)) { return false; } String funcName = langLibMethodSymbol.name.value; if (!modifierFunctions.get(packageId).contains(funcName)) { return false; } if (funcName.equals("mergeJson") && varRefType.tag != TypeTags.MAP) { return false; } dlog.error(iExpr.pos, DiagnosticCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType); resultType = symTable.semanticError; return true; } private boolean isFixedLengthList(BType type) { switch(type.tag) { case TypeTags.ARRAY: return (((BArrayType) type).state != BArrayState.UNSEALED); case TypeTags.TUPLE: return (((BTupleType) type).restType == null); case TypeTags.UNION: BUnionType unionType = (BUnionType) type; for (BType member : unionType.getMemberTypes()) { if (!isFixedLengthList(member)) { return false; } } return true; default: return false; } } private void checkIllegalStorageSizeChangeMethodCall(BLangInvocation iExpr, BType varRefType) { String invocationName = iExpr.name.getValue(); if (!listLengthModifierFunctions.contains(invocationName)) { return; } if (isFixedLengthList(varRefType)) { dlog.error(iExpr.name.pos, DiagnosticCode.ILLEGAL_FUNCTION_CHANGE_LIST_SIZE, invocationName, varRefType); resultType = symTable.semanticError; return; } if (isShiftOnIncompatibleTuples(varRefType, invocationName)) { dlog.error(iExpr.name.pos, DiagnosticCode.ILLEGAL_FUNCTION_CHANGE_TUPLE_SHAPE, invocationName, varRefType); resultType = symTable.semanticError; return; } } private boolean isShiftOnIncompatibleTuples(BType varRefType, String invocationName) { if ((varRefType.tag == TypeTags.TUPLE) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0) && hasDifferentTypeThanRest((BTupleType) varRefType)) { return true; } if ((varRefType.tag == TypeTags.UNION) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0)) { BUnionType unionVarRef = (BUnionType) varRefType; boolean allMemberAreFixedShapeTuples = true; for (BType member : unionVarRef.getMemberTypes()) { if (member.tag != TypeTags.TUPLE) { allMemberAreFixedShapeTuples = false; break; } if (!hasDifferentTypeThanRest((BTupleType) member)) { allMemberAreFixedShapeTuples = false; break; } } return allMemberAreFixedShapeTuples; } return false; } private boolean hasDifferentTypeThanRest(BTupleType tupleType) { if (tupleType.restType == null) { return false; } for (BType member : tupleType.getTupleTypes()) { if (!types.isSameType(tupleType.restType, member)) { return true; } } return false; } private boolean checkFieldFunctionPointer(BLangInvocation iExpr, SymbolEnv env) { BType type = checkExpr(iExpr.expr, env); BLangIdentifier invocationIdentifier = iExpr.name; if (type == symTable.semanticError) { return false; } BSymbol funcSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(invocationIdentifier), type.tsymbol); if (funcSymbol == symTable.notFoundSymbol) { BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, type); if (langLibMethodSymbol == symTable.notFoundSymbol) { dlog.error(iExpr.name.pos, DiagnosticCode.UNDEFINED_FIELD_IN_RECORD, invocationIdentifier, type); resultType = symTable.semanticError; } else { checkInvalidImmutableValueUpdate(iExpr, type, langLibMethodSymbol); } return false; } iExpr.symbol = funcSymbol; iExpr.type = ((BInvokableSymbol) funcSymbol).retType; checkInvocationParamAndReturnType(iExpr); iExpr.functionPointerInvocation = true; return true; } public void visit(BLangTypeInit cIExpr) { if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null) || expType.tag == TypeTags.RECORD) { dlog.error(cIExpr.pos, DiagnosticCode.INVALID_TYPE_NEW_LITERAL, expType); resultType = symTable.semanticError; return; } BType actualType; if (cIExpr.userDefinedType != null) { actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env); } else { actualType = expType; } if (actualType == symTable.semanticError) { resultType = symTable.semanticError; return; } if (actualType.tag == TypeTags.INTERSECTION) { actualType = ((BIntersectionType) actualType).effectiveType; } switch (actualType.tag) { case TypeTags.OBJECT: if ((actualType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) { dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, actualType.tsymbol); cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType)); resultType = symTable.semanticError; return; } if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) { cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol; checkInvocationParam(cIExpr.initInvocation); cIExpr.initInvocation.type = ((BInvokableSymbol) cIExpr.initInvocation.symbol).retType; } else { if (!isValidInitInvocation(cIExpr, (BObjectType) actualType)) { return; } } break; case TypeTags.STREAM: if (cIExpr.initInvocation.argExprs.size() != 1) { dlog.error(cIExpr.pos, DiagnosticCode.INVALID_STREAM_CONSTRUCTOR, cIExpr.initInvocation.name); resultType = symTable.semanticError; return; } BStreamType actualStreamType = (BStreamType) actualType; if (actualStreamType.error != null) { BType error = actualStreamType.error; if (error != symTable.neverType && !types.containsErrorType(error)) { dlog.error(cIExpr.pos, DiagnosticCode.ERROR_TYPE_EXPECTED, error.toString()); resultType = symTable.semanticError; return; } } BLangExpression iteratorExpr = cIExpr.initInvocation.argExprs.get(0); BType constructType = checkExpr(iteratorExpr, env, symTable.noType); BUnionType nextReturnType = types.getVarTypeFromIteratorFuncReturnType(constructType); BUnionType expectedReturnType = createNextReturnType(cIExpr.pos, (BStreamType) actualType); if (nextReturnType == null) { dlog.error(iteratorExpr.pos, DiagnosticCode.MISSING_REQUIRED_METHOD_NEXT, constructType, expectedReturnType); resultType = symTable.semanticError; return; } if (types.getErrorType(nextReturnType) == null && (types.getErrorType(expectedReturnType) != null)) { dlog.error(iteratorExpr.pos, DiagnosticCode.INVALID_STREAM_CONSTRUCTOR_EXP_TYPE, iteratorExpr); resultType = symTable.semanticError; return; } types.checkType(iteratorExpr.pos, nextReturnType, expectedReturnType, DiagnosticCode.INCOMPATIBLE_TYPES); resultType = actualType; return; case TypeTags.UNION: List<BType> matchingMembers = findMembersWithMatchingInitFunc(cIExpr, (BUnionType) actualType); BType matchedType = getMatchingType(matchingMembers, cIExpr, actualType); cIExpr.initInvocation.type = symTable.nilType; if (matchedType.tag == TypeTags.OBJECT) { if (((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc != null) { cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc.symbol; checkInvocationParam(cIExpr.initInvocation); cIExpr.initInvocation.type = ((BInvokableSymbol) cIExpr.initInvocation.symbol).retType; actualType = matchedType; break; } else { if (!isValidInitInvocation(cIExpr, (BObjectType) matchedType)) { return; } } } types.checkType(cIExpr, matchedType, expType); cIExpr.type = matchedType; resultType = matchedType; return; default: dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType); resultType = symTable.semanticError; return; } if (cIExpr.initInvocation.type == null) { cIExpr.initInvocation.type = symTable.nilType; } BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.type); resultType = types.checkType(cIExpr, actualTypeInitType, expType); } private BUnionType createNextReturnType(DiagnosticPos pos, BStreamType streamType) { BRecordType recordType = new BRecordType(null); recordType.restFieldType = symTable.noType; recordType.sealed = true; Name fieldName = Names.VALUE; BField field = new BField(fieldName, pos, new BVarSymbol(Flags.PUBLIC, fieldName, env.enclPkg.packageID, streamType.constraint, env.scope.owner, pos, VIRTUAL)); field.type = streamType.constraint; recordType.fields.put(field.name.value, field); recordType.tsymbol = Symbols.createRecordSymbol(0, Names.EMPTY, env.enclPkg.packageID, recordType, env.scope.owner, pos, VIRTUAL); recordType.tsymbol.scope = new Scope(env.scope.owner); recordType.tsymbol.scope.define(fieldName, field.symbol); LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>(); retTypeMembers.add(recordType); if (streamType.error != symTable.neverType && streamType.error != null) { retTypeMembers.add(streamType.error); } retTypeMembers.add(symTable.nilType); BUnionType unionType = BUnionType.create(null, retTypeMembers); unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY, env.enclPkg.symbol.pkgID, unionType, env.scope.owner, pos, VIRTUAL); return unionType; } private boolean isValidInitInvocation(BLangTypeInit cIExpr, BObjectType objType) { if (!cIExpr.initInvocation.argExprs.isEmpty() && ((BObjectTypeSymbol) objType.tsymbol).initializerFunc == null) { dlog.error(cIExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, cIExpr.initInvocation.exprSymbol); cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType)); resultType = symTable.semanticError; return false; } return true; } private BType getObjectConstructorReturnType(BType objType, BType initRetType) { if (initRetType.tag == TypeTags.UNION) { LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>(); retTypeMembers.add(objType); retTypeMembers.addAll(((BUnionType) initRetType).getMemberTypes()); retTypeMembers.remove(symTable.nilType); BUnionType unionType = BUnionType.create(null, retTypeMembers); unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY, env.enclPkg.symbol.pkgID, unionType, env.scope.owner, symTable.builtinPos, VIRTUAL); return unionType; } else if (initRetType.tag == TypeTags.NIL) { return objType; } return symTable.semanticError; } private List<BType> findMembersWithMatchingInitFunc(BLangTypeInit cIExpr, BUnionType lhsUnionType) { int objectCount = 0; for (BType memberType : lhsUnionType.getMemberTypes()) { int tag = memberType.tag; if (tag == TypeTags.OBJECT) { objectCount++; continue; } if (tag != TypeTags.INTERSECTION) { continue; } if (((BIntersectionType) memberType).effectiveType.tag == TypeTags.OBJECT) { objectCount++; } } boolean containsSingleObject = objectCount == 1; List<BType> matchingLhsMemberTypes = new ArrayList<>(); for (BType memberType : lhsUnionType.getMemberTypes()) { if (memberType.tag != TypeTags.OBJECT) { continue; } if ((memberType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) { dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, lhsUnionType.tsymbol); } if (containsSingleObject) { return Collections.singletonList(memberType); } BAttachedFunction initializerFunc = ((BObjectTypeSymbol) memberType.tsymbol).initializerFunc; if (isArgsMatchesFunction(cIExpr.argsExpr, initializerFunc)) { matchingLhsMemberTypes.add(memberType); } } return matchingLhsMemberTypes; } private BType getMatchingType(List<BType> matchingLhsMembers, BLangTypeInit cIExpr, BType lhsUnion) { if (matchingLhsMembers.isEmpty()) { dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, lhsUnion); resultType = symTable.semanticError; return symTable.semanticError; } else if (matchingLhsMembers.size() == 1) { return matchingLhsMembers.get(0).tsymbol.type; } else { dlog.error(cIExpr.pos, DiagnosticCode.AMBIGUOUS_TYPES, lhsUnion); resultType = symTable.semanticError; return symTable.semanticError; } } private boolean isArgsMatchesFunction(List<BLangExpression> invocationArguments, BAttachedFunction function) { invocationArguments.forEach(expr -> checkExpr(expr, env, symTable.noType)); if (function == null) { return invocationArguments.isEmpty(); } if (function.symbol.params.isEmpty() && invocationArguments.isEmpty()) { return true; } List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); List<BLangExpression> positionalArgs = new ArrayList<>(); for (BLangExpression argument : invocationArguments) { if (argument.getKind() == NodeKind.NAMED_ARGS_EXPR) { namedArgs.add((BLangNamedArgsExpression) argument); } else { positionalArgs.add(argument); } } List<BVarSymbol> requiredParams = function.symbol.params.stream() .filter(param -> !param.defaultableParam) .collect(Collectors.toList()); if (requiredParams.size() > invocationArguments.size()) { return false; } List<BVarSymbol> defaultableParams = function.symbol.params.stream() .filter(param -> param.defaultableParam) .collect(Collectors.toList()); int givenRequiredParamCount = 0; for (int i = 0; i < positionalArgs.size(); i++) { if (function.symbol.params.size() > i) { givenRequiredParamCount++; BVarSymbol functionParam = function.symbol.params.get(i); if (!types.isAssignable(positionalArgs.get(i).type, functionParam.type)) { return false; } requiredParams.remove(functionParam); defaultableParams.remove(functionParam); continue; } if (function.symbol.restParam != null) { BType restParamType = ((BArrayType) function.symbol.restParam.type).eType; if (!types.isAssignable(positionalArgs.get(i).type, restParamType)) { return false; } continue; } return false; } for (BLangNamedArgsExpression namedArg : namedArgs) { boolean foundNamedArg = false; List<BVarSymbol> params = function.symbol.params; for (int i = givenRequiredParamCount; i < params.size(); i++) { BVarSymbol functionParam = params.get(i); if (!namedArg.name.value.equals(functionParam.name.value)) { continue; } foundNamedArg = true; BType namedArgExprType = checkExpr(namedArg.expr, env); if (!types.isAssignable(functionParam.type, namedArgExprType)) { return false; } requiredParams.remove(functionParam); defaultableParams.remove(functionParam); } if (!foundNamedArg) { return false; } } return requiredParams.size() <= 0; } public void visit(BLangWaitForAllExpr waitForAllExpr) { switch (expType.tag) { case TypeTags.RECORD: checkTypesForRecords(waitForAllExpr); break; case TypeTags.MAP: checkTypesForMap(waitForAllExpr.keyValuePairs, ((BMapType) expType).constraint); LinkedHashSet<BType> memberTypesForMap = collectWaitExprTypes(waitForAllExpr.keyValuePairs); if (memberTypesForMap.size() == 1) { resultType = new BMapType(TypeTags.MAP, memberTypesForMap.iterator().next(), symTable.mapType.tsymbol); break; } BUnionType constraintTypeForMap = BUnionType.create(null, memberTypesForMap); resultType = new BMapType(TypeTags.MAP, constraintTypeForMap, symTable.mapType.tsymbol); break; case TypeTags.NONE: case TypeTags.ANY: checkTypesForMap(waitForAllExpr.keyValuePairs, expType); LinkedHashSet<BType> memberTypes = collectWaitExprTypes(waitForAllExpr.keyValuePairs); if (memberTypes.size() == 1) { resultType = new BMapType(TypeTags.MAP, memberTypes.iterator().next(), symTable.mapType.tsymbol); break; } BUnionType constraintType = BUnionType.create(null, memberTypes); resultType = new BMapType(TypeTags.MAP, constraintType, symTable.mapType.tsymbol); break; default: dlog.error(waitForAllExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, getWaitForAllExprReturnType(waitForAllExpr.keyValuePairs, waitForAllExpr.pos)); resultType = symTable.semanticError; break; } waitForAllExpr.type = resultType; if (resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(waitForAllExpr, waitForAllExpr.type, expType); } } private BRecordType getWaitForAllExprReturnType(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals, DiagnosticPos pos) { BRecordType retType = new BRecordType(null); for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) { BLangIdentifier fieldName; if (keyVal.valueExpr == null || keyVal.valueExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { fieldName = keyVal.key; } else { fieldName = ((BLangSimpleVarRef) keyVal.valueExpr).variableName; } BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(fieldName)); BType fieldType = symbol.type.tag == TypeTags.FUTURE ? ((BFutureType) symbol.type).constraint : symbol.type; BField field = new BField(names.fromIdNode(keyVal.key), null, new BVarSymbol(0, names.fromIdNode(keyVal.key), env.enclPkg.packageID, fieldType, null, keyVal.pos, VIRTUAL)); retType.fields.put(field.name.value, field); } retType.restFieldType = symTable.noType; retType.sealed = true; retType.tsymbol = Symbols.createRecordSymbol(0, Names.EMPTY, env.enclPkg.packageID, retType, null, pos, VIRTUAL); return retType; } private LinkedHashSet<BType> collectWaitExprTypes(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals) { LinkedHashSet<BType> memberTypes = new LinkedHashSet<>(); for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) { BType bType = keyVal.keyExpr != null ? keyVal.keyExpr.type : keyVal.valueExpr.type; if (bType.tag == TypeTags.FUTURE) { memberTypes.add(((BFutureType) bType).constraint); } else { memberTypes.add(bType); } } return memberTypes; } private void checkTypesForMap(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValuePairs, BType expType) { keyValuePairs.forEach(keyVal -> checkWaitKeyValExpr(keyVal, expType)); } private void checkTypesForRecords(BLangWaitForAllExpr waitExpr) { List<BLangWaitForAllExpr.BLangWaitKeyValue> rhsFields = waitExpr.getKeyValuePairs(); Map<String, BField> lhsFields = ((BRecordType) expType).fields; if (((BRecordType) expType).sealed && rhsFields.size() > lhsFields.size()) { dlog.error(waitExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, getWaitForAllExprReturnType(waitExpr.keyValuePairs, waitExpr.pos)); resultType = symTable.semanticError; return; } for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : rhsFields) { String key = keyVal.key.value; if (!lhsFields.containsKey(key)) { if (((BRecordType) expType).sealed) { dlog.error(waitExpr.pos, DiagnosticCode.INVALID_FIELD_NAME_RECORD_LITERAL, key, expType); resultType = symTable.semanticError; } else { BType restFieldType = ((BRecordType) expType).restFieldType; checkWaitKeyValExpr(keyVal, restFieldType); } } else { checkWaitKeyValExpr(keyVal, lhsFields.get(key).type); } } checkMissingReqFieldsForWait(((BRecordType) expType), rhsFields, waitExpr.pos); if (symTable.semanticError != resultType) { resultType = expType; } } private void checkMissingReqFieldsForWait(BRecordType type, List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValPairs, DiagnosticPos pos) { type.fields.values().forEach(field -> { boolean hasField = keyValPairs.stream().anyMatch(keyVal -> field.name.value.equals(keyVal.key.value)); if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { dlog.error(pos, DiagnosticCode.MISSING_REQUIRED_RECORD_FIELD, field.name); } }); } private void checkWaitKeyValExpr(BLangWaitForAllExpr.BLangWaitKeyValue keyVal, BType type) { BLangExpression expr; if (keyVal.keyExpr != null) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode (((BLangSimpleVarRef) keyVal.keyExpr).variableName)); keyVal.keyExpr.type = symbol.type; expr = keyVal.keyExpr; } else { expr = keyVal.valueExpr; } BFutureType futureType = new BFutureType(TypeTags.FUTURE, type, null); checkExpr(expr, env, futureType); } public void visit(BLangTernaryExpr ternaryExpr) { BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType); SymbolEnv thenEnv = typeNarrower.evaluateTruth(ternaryExpr.expr, ternaryExpr.thenExpr, env); BType thenType = checkExpr(ternaryExpr.thenExpr, thenEnv, expType); SymbolEnv elseEnv = typeNarrower.evaluateFalsity(ternaryExpr.expr, ternaryExpr.elseExpr, env); BType elseType = checkExpr(ternaryExpr.elseExpr, elseEnv, expType); if (condExprType == symTable.semanticError || thenType == symTable.semanticError || elseType == symTable.semanticError) { resultType = symTable.semanticError; } else if (expType == symTable.noType) { if (types.isAssignable(elseType, thenType)) { resultType = thenType; } else if (types.isAssignable(thenType, elseType)) { resultType = elseType; } else { dlog.error(ternaryExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, thenType, elseType); resultType = symTable.semanticError; } } else { resultType = expType; } } public void visit(BLangWaitExpr waitExpr) { expType = new BFutureType(TypeTags.FUTURE, expType, null); checkExpr(waitExpr.getExpression(), env, expType); if (resultType.tag == TypeTags.UNION) { LinkedHashSet<BType> memberTypes = collectMemberTypes((BUnionType) resultType, new LinkedHashSet<>()); if (memberTypes.size() == 1) { resultType = memberTypes.toArray(new BType[0])[0]; } else { resultType = BUnionType.create(null, memberTypes); } } else if (resultType != symTable.semanticError) { resultType = ((BFutureType) resultType).constraint; } waitExpr.type = resultType; if (resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(waitExpr, waitExpr.type, ((BFutureType) expType).constraint); } } private LinkedHashSet<BType> collectMemberTypes(BUnionType unionType, LinkedHashSet<BType> memberTypes) { for (BType memberType : unionType.getMemberTypes()) { if (memberType.tag == TypeTags.FUTURE) { memberTypes.add(((BFutureType) memberType).constraint); } else { memberTypes.add(memberType); } } return memberTypes; } @Override public void visit(BLangTrapExpr trapExpr) { boolean firstVisit = trapExpr.expr.type == null; BType actualType; BType exprType = checkExpr(trapExpr.expr, env, expType); boolean definedWithVar = expType == symTable.noType; if (trapExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) { if (firstVisit) { isTypeChecked = false; resultType = expType; return; } else { expType = trapExpr.type; exprType = trapExpr.expr.type; } } if (expType == symTable.semanticError || exprType == symTable.semanticError) { actualType = symTable.semanticError; } else { LinkedHashSet<BType> resultTypes = new LinkedHashSet<>(); if (exprType.tag == TypeTags.UNION) { resultTypes.addAll(((BUnionType) exprType).getMemberTypes()); } else { resultTypes.add(exprType); } resultTypes.add(symTable.errorType); actualType = BUnionType.create(null, resultTypes); } resultType = types.checkType(trapExpr, actualType, expType); if (definedWithVar && resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(trapExpr.expr, trapExpr.expr.type, resultType); } } public void visit(BLangBinaryExpr binaryExpr) { if (expType.tag == TypeTags.FUTURE && binaryExpr.opKind == OperatorKind.BITWISE_OR) { BType lhsResultType = checkExpr(binaryExpr.lhsExpr, env, expType); BType rhsResultType = checkExpr(binaryExpr.rhsExpr, env, expType); if (lhsResultType == symTable.semanticError || rhsResultType == symTable.semanticError) { resultType = symTable.semanticError; return; } resultType = BUnionType.create(null, lhsResultType, rhsResultType); return; } checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(binaryExpr); SymbolEnv rhsExprEnv; BType lhsType = checkExpr(binaryExpr.lhsExpr, env); if (binaryExpr.opKind == OperatorKind.AND) { rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env, true); } else if (binaryExpr.opKind == OperatorKind.OR) { rhsExprEnv = typeNarrower.evaluateFalsity(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env); } else { rhsExprEnv = env; } BType rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv); BType actualType = symTable.semanticError; switch (binaryExpr.opKind) { case ADD: BType leftConstituent = getXMLConstituents(lhsType); BType rightConstituent = getXMLConstituents(rhsType); if (leftConstituent != null && rightConstituent != null) { actualType = new BXMLType(BUnionType.create(null, leftConstituent, rightConstituent), null); break; } default: if (lhsType != symTable.semanticError && rhsType != symTable.semanticError) { BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType); if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType, binaryExpr); } if (opSymbol == symTable.notFoundSymbol) { dlog.error(binaryExpr.pos, DiagnosticCode.BINARY_OP_INCOMPATIBLE_TYPES, binaryExpr.opKind, lhsType, rhsType); } else { if ((binaryExpr.opKind == OperatorKind.EQUAL || binaryExpr.opKind == OperatorKind.NOT_EQUAL) && (couldHoldTableValues(lhsType, new ArrayList<>()) && couldHoldTableValues(rhsType, new ArrayList<>()))) { dlog.error(binaryExpr.pos, DiagnosticCode.EQUALITY_NOT_YET_SUPPORTED, TABLE_TNAME); } binaryExpr.opSymbol = (BOperatorSymbol) opSymbol; actualType = opSymbol.type.getReturnType(); } } } resultType = types.checkType(binaryExpr, actualType, expType); } private SymbolEnv getEnvBeforeInputNode(SymbolEnv env, BLangNode node) { while (env != null && env.node != node) { env = env.enclEnv; } return env != null && env.enclEnv != null ? env.enclEnv.createClone() : new SymbolEnv(node, null); } private SymbolEnv getEnvAfterJoinNode(SymbolEnv env, BLangNode node) { SymbolEnv clone = env.createClone(); while (clone != null && clone.node != node) { clone = clone.enclEnv; } if (clone != null) { clone.enclEnv = getEnvBeforeInputNode(clone.enclEnv, getLastInputNodeFromEnv(clone.enclEnv)); } else { clone = new SymbolEnv(node, null); } return clone; } private BLangNode getLastInputNodeFromEnv(SymbolEnv env) { while (env != null && (env.node.getKind() != NodeKind.FROM && env.node.getKind() != NodeKind.JOIN)) { env = env.enclEnv; } return env != null ? env.node : null; } public void visit(BLangTransactionalExpr transactionalExpr) { resultType = types.checkType(transactionalExpr, symTable.booleanType, expType); } public void visit(BLangCommitExpr commitExpr) { BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(commitExpr, actualType, expType); } private BType getXMLConstituents(BType type) { BType constituent = null; if (type.tag == TypeTags.XML) { constituent = ((BXMLType) type).constraint; } else if (TypeTags.isXMLNonSequenceType(type.tag)) { constituent = type; } return constituent; } private void checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(BLangBinaryExpr binaryExpr) { if (expType.tag != TypeTags.DECIMAL) { return; } switch (binaryExpr.opKind) { case ADD: case SUB: case MUL: case DIV: checkExpr(binaryExpr.lhsExpr, env, expType); checkExpr(binaryExpr.rhsExpr, env, expType); break; default: break; } } public void visit(BLangElvisExpr elvisExpr) { BType lhsType = checkExpr(elvisExpr.lhsExpr, env); BType actualType = symTable.semanticError; if (lhsType != symTable.semanticError) { if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) { BUnionType unionType = (BUnionType) lhsType; LinkedHashSet<BType> memberTypes = unionType.getMemberTypes().stream() .filter(type -> type.tag != TypeTags.NIL) .collect(Collectors.toCollection(LinkedHashSet::new)); if (memberTypes.size() == 1) { actualType = memberTypes.toArray(new BType[0])[0]; } else { actualType = BUnionType.create(null, memberTypes); } } else { dlog.error(elvisExpr.pos, DiagnosticCode.OPERATOR_NOT_SUPPORTED, OperatorKind.ELVIS, lhsType); } } BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType); BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType, DiagnosticCode.INCOMPATIBLE_TYPES); if (rhsReturnType == symTable.semanticError || lhsReturnType == symTable.semanticError) { resultType = symTable.semanticError; } else if (expType == symTable.noType) { if (types.isSameType(rhsReturnType, lhsReturnType)) { resultType = lhsReturnType; } else { dlog.error(elvisExpr.rhsExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, lhsReturnType, rhsReturnType); resultType = symTable.semanticError; } } else { resultType = expType; } } @Override public void visit(BLangGroupExpr groupExpr) { resultType = checkExpr(groupExpr.expression, env, expType); } public void visit(BLangTypedescExpr accessExpr) { accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env); int resolveTypeTag = accessExpr.resolvedType.tag; final BType actualType; if (resolveTypeTag != TypeTags.TYPEDESC && resolveTypeTag != TypeTags.NONE) { actualType = new BTypedescType(accessExpr.resolvedType, null); } else { actualType = accessExpr.resolvedType; } resultType = types.checkType(accessExpr, actualType, expType); } public void visit(BLangUnaryExpr unaryExpr) { BType exprType; BType actualType = symTable.semanticError; if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) { exprType = checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { actualType = exprType; } } else if (OperatorKind.TYPEOF.equals(unaryExpr.operator)) { exprType = checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { actualType = new BTypedescType(exprType, null); } } else { exprType = OperatorKind.ADD.equals(unaryExpr.operator) ? checkExpr(unaryExpr.expr, env, expType) : checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.pos, unaryExpr.operator, exprType); if (symbol == symTable.notFoundSymbol) { dlog.error(unaryExpr.pos, DiagnosticCode.UNARY_OP_INCOMPATIBLE_TYPES, unaryExpr.operator, exprType); } else { unaryExpr.opSymbol = (BOperatorSymbol) symbol; actualType = symbol.type.getReturnType(); } } } resultType = types.checkType(unaryExpr, actualType, expType); } public void visit(BLangTypeConversionExpr conversionExpr) { BType actualType = symTable.semanticError; for (BLangAnnotationAttachment annAttachment : conversionExpr.annAttachments) { annAttachment.attachPoints.add(AttachPoint.Point.TYPE); semanticAnalyzer.analyzeNode(annAttachment, this.env); } BLangExpression expr = conversionExpr.expr; if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) { resultType = checkExpr(expr, env, this.expType); return; } BType targetType = symResolver.resolveTypeNode(conversionExpr.typeNode, env); boolean requiresTypeInference = requireTypeInference(expr, false); if (requiresTypeInference) { targetType = getEffectiveReadOnlyType(conversionExpr.typeNode.pos, targetType); } conversionExpr.targetType = targetType; BType expType = requiresTypeInference ? targetType : symTable.noType; BType sourceType = checkExpr(expr, env, expType); if (types.isTypeCastable(expr, sourceType, targetType)) { actualType = targetType; } else { dlog.error(conversionExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES_CAST, sourceType, targetType); } resultType = types.checkType(conversionExpr, actualType, this.expType); } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { bLangLambdaFunction.type = bLangLambdaFunction.function.symbol.type; bLangLambdaFunction.capturedClosureEnv = env.createClone(); env.enclPkg.lambdaFunctions.add(bLangLambdaFunction); resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.type, expType); } @Override public void visit(BLangArrowFunction bLangArrowFunction) { BType expectedType = expType; if (expectedType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expectedType; BType invokableType = unionType.getMemberTypes().stream().filter(type -> type.tag == TypeTags.INVOKABLE) .collect(Collectors.collectingAndThen(Collectors.toList(), list -> { if (list.size() != 1) { return null; } return list.get(0); } )); if (invokableType != null) { expectedType = invokableType; } } if (expectedType.tag != TypeTags.INVOKABLE) { dlog.error(bLangArrowFunction.pos, DiagnosticCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS); resultType = symTable.semanticError; return; } BInvokableType expectedInvocation = (BInvokableType) expectedType; populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes); bLangArrowFunction.body.expr.type = populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType); if (expectedInvocation.retType.tag == TypeTags.NONE) { expectedInvocation.retType = bLangArrowFunction.body.expr.type; } resultType = bLangArrowFunction.funcType = expectedInvocation; } public void visit(BLangXMLQName bLangXMLQName) { String prefix = bLangXMLQName.prefix.value; resultType = types.checkType(bLangXMLQName, symTable.stringType, expType); if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty() && bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) { ((BLangXMLAttribute) env.node).isNamespaceDeclr = true; return; } if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { ((BLangXMLAttribute) env.node).isNamespaceDeclr = true; return; } if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { dlog.error(bLangXMLQName.pos, DiagnosticCode.INVALID_NAMESPACE_PREFIX, prefix); bLangXMLQName.type = symTable.semanticError; return; } if (bLangXMLQName.prefix.value.isEmpty()) { return; } BSymbol xmlnsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromIdNode(bLangXMLQName.prefix)); if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) { return; } if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) { logUndefinedSymbolError(bLangXMLQName.pos, prefix); bLangXMLQName.type = symTable.semanticError; return; } if (xmlnsSymbol.getKind() == SymbolKind.PACKAGE) { xmlnsSymbol = findXMLNamespaceFromPackageConst(bLangXMLQName.localname.value, bLangXMLQName.prefix.value, (BPackageSymbol) xmlnsSymbol, bLangXMLQName.pos); } if (xmlnsSymbol == null || xmlnsSymbol.getKind() != SymbolKind.XMLNS) { resultType = symTable.semanticError; return; } bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol; bLangXMLQName.namespaceURI = bLangXMLQName.nsSymbol.namespaceURI; } private BSymbol findXMLNamespaceFromPackageConst(String localname, String prefix, BPackageSymbol pkgSymbol, DiagnosticPos pos) { BSymbol constSymbol = symResolver.lookupMemberSymbol(pos, pkgSymbol.scope, env, names.fromString(localname), SymTag.CONSTANT); if (constSymbol == symTable.notFoundSymbol) { if (!missingNodesHelper.isMissingNode(prefix) && !missingNodesHelper.isMissingNode(localname)) { dlog.error(pos, DiagnosticCode.UNDEFINED_SYMBOL, prefix + ":" + localname); } return null; } BConstantSymbol constantSymbol = (BConstantSymbol) constSymbol; if (constantSymbol.literalType.tag != TypeTags.STRING) { dlog.error(pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.stringType, constantSymbol.literalType); return null; } String constVal = (String) constantSymbol.value.value; int s = constVal.indexOf('{'); int e = constVal.lastIndexOf('}'); if (e > s + 1) { pkgSymbol.isUsed = true; String nsURI = constVal.substring(s + 1, e); String local = constVal.substring(e); return new BXMLNSSymbol(names.fromString(local), nsURI, constantSymbol.pkgID, constantSymbol.owner, pos, SOURCE); } dlog.error(pos, DiagnosticCode.INVALID_ATTRIBUTE_REFERENCE, prefix + ":" + localname); return null; } public void visit(BLangXMLAttribute bLangXMLAttribute) { SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env); BLangXMLQName name = (BLangXMLQName) bLangXMLAttribute.name; checkExpr(name, xmlAttributeEnv, symTable.stringType); if (name.prefix.value.isEmpty()) { name.namespaceURI = null; } checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType); symbolEnter.defineNode(bLangXMLAttribute, env); } public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) { SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env); Set<String> usedPrefixes = new HashSet<>(); BLangIdentifier elemNamePrefix = ((BLangXMLQName) bLangXMLElementLiteral.startTagName).prefix; if (elemNamePrefix != null && !elemNamePrefix.value.isEmpty()) { usedPrefixes.add(elemNamePrefix.value); } for (BLangXMLAttribute attribute : bLangXMLElementLiteral.attributes) { if (attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute)) { BLangXMLQuotedString value = attribute.value; if (value.getKind() == NodeKind.XML_QUOTED_STRING && value.textFragments.size() > 1) { dlog.error(value.pos, DiagnosticCode.INVALID_XML_NS_INTERPOLATION); } checkExpr(attribute, xmlElementEnv, symTable.noType); } BLangIdentifier prefix = ((BLangXMLQName) attribute.name).prefix; if (prefix != null && !prefix.value.isEmpty()) { usedPrefixes.add(prefix.value); } } bLangXMLElementLiteral.attributes.forEach(attribute -> { if (!(attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute))) { checkExpr(attribute, xmlElementEnv, symTable.noType); } }); Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv); Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX); if (namespaces.containsKey(defaultNs)) { bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs); } for (Map.Entry<Name, BXMLNSSymbol> nsEntry : namespaces.entrySet()) { if (usedPrefixes.contains(nsEntry.getKey().value)) { bLangXMLElementLiteral.namespacesInScope.put(nsEntry.getKey(), nsEntry.getValue()); } } validateTags(bLangXMLElementLiteral, xmlElementEnv); bLangXMLElementLiteral.modifiedChildren = concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlElementType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLElementLiteral.pos, symTable.xmlElementType, this.expType); if (Symbols.isFlagOn(resultType.flags, Flags.READONLY)) { markChildrenAsImmutable(bLangXMLElementLiteral); } } private boolean isXmlNamespaceAttribute(BLangXMLAttribute attribute) { BLangXMLQName attrName = (BLangXMLQName) attribute.name; return (attrName.prefix.value.isEmpty() && attrName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) || attrName.prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE); } public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) { checkStringTemplateExprs(bLangXMLTextLiteral.textFragments, false); resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlTextType, expType); } public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) { checkStringTemplateExprs(bLangXMLCommentLiteral.textFragments, false); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlCommentType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLCommentLiteral.pos, symTable.xmlCommentType, this.expType); } public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) { checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType); checkStringTemplateExprs(bLangXMLProcInsLiteral.dataFragments, false); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlPIType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLProcInsLiteral.pos, symTable.xmlPIType, this.expType); } public void visit(BLangXMLQuotedString bLangXMLQuotedString) { checkStringTemplateExprs(bLangXMLQuotedString.textFragments, false); resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType); } public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { dlog.error(xmlAttributeAccessExpr.pos, DiagnosticCode.DEPRECATED_XML_ATTRIBUTE_ACCESS); resultType = symTable.semanticError; } public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { checkStringTemplateExprs(stringTemplateLiteral.exprs, false); resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType); } @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { BType type = determineRawTemplateLiteralType(rawTemplateLiteral, expType); if (type == symTable.semanticError) { resultType = type; return; } BObjectType literalType = (BObjectType) type; BType stringsType = literalType.fields.get("strings").type; if (evaluateRawTemplateExprs(rawTemplateLiteral.strings, stringsType, INVALID_NUM_STRINGS, rawTemplateLiteral.pos)) { type = symTable.semanticError; } BType insertionsType = literalType.fields.get("insertions").type; if (evaluateRawTemplateExprs(rawTemplateLiteral.insertions, insertionsType, INVALID_NUM_INSERTIONS, rawTemplateLiteral.pos)) { type = symTable.semanticError; } resultType = type; } private BType determineRawTemplateLiteralType(BLangRawTemplateLiteral rawTemplateLiteral, BType expType) { if (expType == symTable.noType || containsAnyType(expType)) { return symTable.rawTemplateType; } BType compatibleType = getCompatibleRawTemplateType(expType, rawTemplateLiteral.pos); BType type = types.checkType(rawTemplateLiteral, compatibleType, symTable.rawTemplateType, DiagnosticCode.INVALID_RAW_TEMPLATE_TYPE); if (type == symTable.semanticError) { return type; } if (Symbols.isFlagOn(type.tsymbol.flags, Flags.CLASS)) { dlog.error(rawTemplateLiteral.pos, DiagnosticCode.INVALID_RAW_TEMPLATE_ASSIGNMENT, type); return symTable.semanticError; } BObjectType litObjType = (BObjectType) type; BObjectTypeSymbol objTSymbol = (BObjectTypeSymbol) litObjType.tsymbol; if (litObjType.fields.size() > 2) { dlog.error(rawTemplateLiteral.pos, DiagnosticCode.INVALID_NUM_FIELDS, litObjType); type = symTable.semanticError; } if (!objTSymbol.attachedFuncs.isEmpty()) { dlog.error(rawTemplateLiteral.pos, DiagnosticCode.METHODS_NOT_ALLOWED, litObjType); type = symTable.semanticError; } return type; } private boolean evaluateRawTemplateExprs(List<? extends BLangExpression> exprs, BType fieldType, DiagnosticCode code, DiagnosticPos pos) { BType listType = getResolvedIntersectionType(fieldType); boolean errored = false; if (listType.tag == TypeTags.ARRAY) { BArrayType arrayType = (BArrayType) listType; if (arrayType.state == BArrayState.CLOSED_SEALED && (exprs.size() != arrayType.size)) { dlog.error(pos, code, arrayType.size, exprs.size()); return false; } for (BLangExpression expr : exprs) { errored = (checkExpr(expr, env, arrayType.eType) == symTable.semanticError) || errored; } } else if (listType.tag == TypeTags.TUPLE) { BTupleType tupleType = (BTupleType) listType; final int size = exprs.size(); final int requiredItems = tupleType.tupleTypes.size(); if (size < requiredItems || (size > requiredItems && tupleType.restType == null)) { dlog.error(pos, code, requiredItems, size); return false; } int i; List<BType> memberTypes = tupleType.tupleTypes; for (i = 0; i < requiredItems; i++) { errored = (checkExpr(exprs.get(i), env, memberTypes.get(i)) == symTable.semanticError) || errored; } if (size > requiredItems) { for (; i < size; i++) { errored = (checkExpr(exprs.get(i), env, tupleType.restType) == symTable.semanticError) || errored; } } } else { throw new IllegalStateException("Expected a list type, but found: " + listType); } return errored; } private BType getResolvedIntersectionType(BType type) { return type.tag != TypeTags.INTERSECTION ? type : ((BIntersectionType) type).effectiveType; } private boolean containsAnyType(BType type) { if (type == symTable.anyType) { return true; } if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().contains(symTable.anyType); } return false; } private BType getCompatibleRawTemplateType(BType expType, DiagnosticPos pos) { if (expType.tag != TypeTags.UNION) { return expType; } BUnionType unionType = (BUnionType) expType; List<BType> compatibleTypes = new ArrayList<>(); for (BType type : unionType.getMemberTypes()) { if (types.isAssignable(type, symTable.rawTemplateType)) { compatibleTypes.add(type); } } if (compatibleTypes.size() == 0) { return expType; } if (compatibleTypes.size() > 1) { dlog.error(pos, DiagnosticCode.MULTIPLE_COMPATIBLE_RAW_TEMPLATE_TYPES, symTable.rawTemplateType, expType); return symTable.semanticError; } return compatibleTypes.get(0); } @Override public void visit(BLangIntRangeExpression intRangeExpression) { checkExpr(intRangeExpression.startExpr, env, symTable.intType); checkExpr(intRangeExpression.endExpr, env, symTable.intType); resultType = new BArrayType(symTable.intType); } @Override public void visit(BLangRestArgsExpression bLangRestArgExpression) { resultType = checkExpr(bLangRestArgExpression.expr, env, expType); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType); bLangNamedArgsExpression.type = bLangNamedArgsExpression.expr.type; } @Override public void visit(BLangMatchExpression bLangMatchExpression) { SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env); checkExpr(bLangMatchExpression.expr, matchExprEnv); bLangMatchExpression.patternClauses.forEach(pattern -> { if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) { symbolEnter.defineNode(pattern.variable, matchExprEnv); } checkExpr(pattern.expr, matchExprEnv, expType); pattern.variable.type = symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv); }); LinkedHashSet<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression); BType actualType; if (matchExprTypes.contains(symTable.semanticError)) { actualType = symTable.semanticError; } else if (matchExprTypes.size() == 1) { actualType = matchExprTypes.toArray(new BType[0])[0]; } else { actualType = BUnionType.create(null, matchExprTypes); } resultType = types.checkType(bLangMatchExpression, actualType, expType); } @Override public void visit(BLangCheckedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr); } @Override public void visit(BLangCheckPanickedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr); } @Override public void visit(BLangQueryExpr queryExpr) { if (prevEnvs.empty()) { prevEnvs.push(env.createClone()); } else { prevEnvs.push(prevEnvs.peek()); } queryEnvs.push(prevEnvs.peek().createClone()); selectClauses.push(queryExpr.getSelectClause()); List<BLangNode> clauses = queryExpr.getQueryClauses(); BLangExpression collectionNode = (BLangExpression) ((BLangFromClause) clauses.get(0)).getCollection(); clauses.forEach(clause -> clause.accept(this)); BType actualType = findAssignableType(queryEnvs.peek(), selectClauses.peek().expression, collectionNode.type, expType, queryExpr); resultType = (actualType == symTable.semanticError) ? actualType : types.checkType(queryExpr.pos, actualType, expType, DiagnosticCode.INCOMPATIBLE_TYPES); selectClauses.pop(); queryEnvs.pop(); prevEnvs.pop(); } @Override public void visit(BLangQueryAction queryAction) { if (prevEnvs.empty()) { prevEnvs.push(env.createClone()); } else { prevEnvs.push(prevEnvs.peek()); } queryEnvs.push(prevEnvs.peek().createClone()); selectClauses.push(null); BLangDoClause doClause = queryAction.getDoClause(); List<BLangNode> clauses = queryAction.getQueryClauses(); clauses.forEach(clause -> clause.accept(this)); semanticAnalyzer.analyzeStmt(doClause.body, SymbolEnv.createBlockEnv(doClause.body, queryEnvs.peek())); BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(doClause.pos, actualType, expType, DiagnosticCode.INCOMPATIBLE_TYPES); selectClauses.pop(); queryEnvs.pop(); prevEnvs.pop(); } @Override public void visit(BLangFromClause fromClause) { queryEnvs.push(SymbolEnv.createTypeNarrowedEnv(fromClause, queryEnvs.pop())); checkExpr(fromClause.collection, queryEnvs.peek()); types.setInputClauseTypedBindingPatternType(fromClause); handleInputClauseVariables(fromClause, queryEnvs.peek()); } @Override public void visit(BLangJoinClause joinClause) { queryEnvs.push(SymbolEnv.createTypeNarrowedEnv(joinClause, queryEnvs.pop())); checkExpr(joinClause.collection, queryEnvs.peek()); types.setInputClauseTypedBindingPatternType(joinClause); handleInputClauseVariables(joinClause, queryEnvs.peek()); if (joinClause.onClause != null) { ((BLangOnClause) joinClause.onClause).accept(this); } } @Override public void visit(BLangLetClause letClause) { queryEnvs.push(SymbolEnv.createTypeNarrowedEnv(letClause, queryEnvs.pop())); for (BLangLetVariable letVariable : letClause.letVarDeclarations) { semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, queryEnvs.peek()); } } @Override public void visit(BLangWhereClause whereClause) { handleFilterClauses(whereClause.expression); } @Override public void visit(BLangSelectClause selectClause) { } @Override public void visit(BLangDoClause doClause) { } @Override public void visit(BLangOnConflictClause onConflictClause) { BType exprType = checkExpr(onConflictClause.expression, queryEnvs.peek(), symTable.errorType); if (!types.isAssignable(exprType, symTable.errorType)) { dlog.error(onConflictClause.expression.pos, DiagnosticCode.ERROR_TYPE_EXPECTED, symTable.errorType, exprType); } } @Override public void visit(BLangLimitClause limitClause) { BType exprType = checkExpr(limitClause.expression, queryEnvs.peek()); if (!types.isAssignable(exprType, symTable.intType)) { dlog.error(limitClause.expression.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.intType, exprType); } } @Override public void visit(BLangOnClause onClause) { SymbolEnv lhsExprEnv, rhsExprEnv; BType lhsType, rhsType; BLangNode joinNode = getLastInputNodeFromEnv(queryEnvs.peek()); lhsExprEnv = getEnvBeforeInputNode(queryEnvs.peek(), joinNode); lhsType = checkExpr(onClause.lhsExpr, lhsExprEnv); rhsExprEnv = getEnvAfterJoinNode(queryEnvs.peek(), joinNode); rhsType = checkExpr(onClause.rhsExpr, rhsExprEnv); if (!types.isAssignable(lhsType, rhsType)) { dlog.error(onClause.rhsExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, lhsType, rhsType); } } @Override public void visit(BLangOrderByClause orderByClause) { for (OrderKeyNode orderKeyNode : orderByClause.getOrderKeyList()) { BType exprType = checkExpr((BLangExpression) orderKeyNode.getOrderKey(), queryEnvs.peek()); if (!types.isOrderedType(exprType)) { dlog.error(((BLangOrderKey) orderKeyNode).expression.pos, DiagnosticCode.ORDER_BY_NOT_SUPPORTED); } } } private BType findAssignableType(SymbolEnv env, BLangExpression selectExp, BType collectionType, BType targetType, BLangQueryExpr queryExpr) { List<BType> assignableSelectTypes = new ArrayList<>(); BType actualType = symTable.semanticError; Map<Boolean, List<BType>> resultTypeMap = types.getAllTypes(targetType).stream() .collect(Collectors.groupingBy(memberType -> (types.isAssignable(memberType, symTable.errorType) || (types.isAssignable(memberType, symTable.nilType))))); final boolean containsXmlOrStr = types.getAllTypes(targetType).stream() .anyMatch(t -> t.tag == TypeTags.STRING || t.tag == TypeTags.XML); for (BType type : resultTypeMap.get(false)) { BType selectType; switch (type.tag) { case TypeTags.ARRAY: selectType = checkExpr(selectExp, env, ((BArrayType) type).eType); break; case TypeTags.TABLE: selectType = checkExpr(selectExp, env, types.getSafeType(((BTableType) type).constraint, true, true)); break; case TypeTags.STREAM: selectType = checkExpr(selectExp, env, types.getSafeType(((BStreamType) type).constraint, true, true)); break; default: selectType = checkExpr(selectExp, env, type); } if (selectType != symTable.semanticError) { assignableSelectTypes.add(selectType); } } if (assignableSelectTypes.size() == 1) { actualType = assignableSelectTypes.get(0); if (!queryExpr.isStream && !queryExpr.isTable && !containsXmlOrStr) { actualType = new BArrayType(actualType); } } else if (assignableSelectTypes.size() > 1) { dlog.error(selectExp.pos, DiagnosticCode.AMBIGUOUS_TYPES, assignableSelectTypes); return actualType; } else { return actualType; } BType nextMethodReturnType = null; BType errorType = null; if (collectionType.tag != TypeTags.SEMANTIC_ERROR) { switch (collectionType.tag) { case TypeTags.STREAM: errorType = ((BStreamType) collectionType).error; break; case TypeTags.OBJECT: nextMethodReturnType = types.getVarTypeFromIterableObject((BObjectType) collectionType); break; default: BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType, names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)); nextMethodReturnType = types.getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType); } } if (nextMethodReturnType != null) { Map<Boolean, List<BType>> collectionTypeMap = types.getAllTypes(nextMethodReturnType).stream() .collect(Collectors.groupingBy(memberType -> types.isAssignable(memberType, symTable.errorType))); List<BType> errorTypes = collectionTypeMap.get(true); if (errorTypes != null && !errorTypes.isEmpty()) { if (errorTypes.size() == 1) { errorType = errorTypes.get(0); } else { errorType = BUnionType.create(null, errorTypes.toArray(new BType[errorTypes.size()])); } } } if (queryExpr.isStream) { return new BStreamType(TypeTags.STREAM, actualType, errorType, symTable.streamType.tsymbol); } else if (queryExpr.isTable) { final BTableType tableType = new BTableType(TypeTags.TABLE, actualType, symTable.tableType.tsymbol); if (!queryExpr.fieldNameIdentifierList.isEmpty()) { tableType.fieldNameList = queryExpr.fieldNameIdentifierList.stream() .map(identifier -> ((BLangIdentifier) identifier).value).collect(Collectors.toList()); return BUnionType.create(null, tableType, symTable.errorType); } return tableType; } else if (errorType != null) { return BUnionType.create(null, actualType, errorType); } return actualType; } @Override public void visit(BLangDo doNode) { if (doNode.onFailClause != null) { doNode.onFailClause.accept(this); } } public void visit(BLangOnFailClause onFailClause) { onFailClause.body.stmts.forEach(stmt -> stmt.accept(this)); } private void handleFilterClauses (BLangExpression filterExpression) { checkExpr(filterExpression, queryEnvs.peek(), symTable.booleanType); BType actualType = filterExpression.type; if (TypeTags.TUPLE == actualType.tag) { dlog.error(filterExpression.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.booleanType, actualType); } queryEnvs.push(typeNarrower.evaluateTruth(filterExpression, selectClauses.peek(), queryEnvs.pop())); } private void handleInputClauseVariables(BLangInputClause bLangInputClause, SymbolEnv blockEnv) { if (bLangInputClause.variableDefinitionNode == null) { return; } BLangVariable variableNode = (BLangVariable) bLangInputClause.variableDefinitionNode.getVariable(); if (bLangInputClause.isDeclaredWithVar) { semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv); return; } BType typeNodeType = symResolver.resolveTypeNode(variableNode.typeNode, blockEnv); if (types.isAssignable(bLangInputClause.varType, typeNodeType)) { semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv); return; } if (typeNodeType != symTable.semanticError) { dlog.error(variableNode.typeNode.pos, DiagnosticCode.INCOMPATIBLE_TYPES, bLangInputClause.varType, typeNodeType); } semanticAnalyzer.handleDeclaredVarInForeach(variableNode, typeNodeType, blockEnv); } private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr) { String operatorType = checkedExpr.getKind() == NodeKind.CHECK_EXPR ? "check" : "checkpanic"; boolean firstVisit = checkedExpr.expr.type == null; BType exprExpType; if (expType == symTable.noType) { exprExpType = symTable.noType; } else { exprExpType = BUnionType.create(null, expType, symTable.errorType); } BType exprType = checkExpr(checkedExpr.expr, env, exprExpType); if (checkedExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) { if (firstVisit) { isTypeChecked = false; resultType = expType; return; } else { expType = checkedExpr.type; exprType = checkedExpr.expr.type; } } if (exprType.tag != TypeTags.UNION) { if (types.isAssignable(exprType, symTable.errorType)) { dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_ALL_ERROR_TYPES_IN_RHS, operatorType); } else if (exprType != symTable.semanticError) { dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType); } checkedExpr.type = symTable.semanticError; return; } BUnionType unionType = (BUnionType) exprType; Map<Boolean, List<BType>> resultTypeMap = unionType.getMemberTypes().stream() .collect(Collectors.groupingBy(memberType -> types.isAssignable(memberType, symTable.errorType))); checkedExpr.equivalentErrorTypeList = resultTypeMap.get(true); if (checkedExpr.equivalentErrorTypeList == null || checkedExpr.equivalentErrorTypeList.size() == 0) { dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType); checkedExpr.type = symTable.semanticError; return; } List<BType> nonErrorTypeList = resultTypeMap.get(false); if (nonErrorTypeList == null || nonErrorTypeList.size() == 0) { dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_ALL_ERROR_TYPES_IN_RHS, operatorType); checkedExpr.type = symTable.semanticError; return; } BType actualType; if (nonErrorTypeList.size() == 1) { actualType = nonErrorTypeList.get(0); } else { actualType = BUnionType.create(null, new LinkedHashSet<>(nonErrorTypeList)); } resultType = types.checkType(checkedExpr, actualType, expType); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { resultType = serviceConstructorExpr.serviceNode.symbol.type; } @Override public void visit(BLangTypeTestExpr typeTestExpr) { typeTestExpr.typeNode.type = symResolver.resolveTypeNode(typeTestExpr.typeNode, env); checkExpr(typeTestExpr.expr, env); resultType = types.checkType(typeTestExpr, symTable.booleanType, expType); } public void visit(BLangAnnotAccessExpr annotAccessExpr) { checkExpr(annotAccessExpr.expr, this.env, symTable.typeDesc); BType actualType = symTable.semanticError; BSymbol symbol = this.symResolver.resolveAnnotation(annotAccessExpr.pos, env, names.fromString(annotAccessExpr.pkgAlias.getValue()), names.fromString(annotAccessExpr.annotationName.getValue())); if (symbol == this.symTable.notFoundSymbol) { this.dlog.error(annotAccessExpr.pos, DiagnosticCode.UNDEFINED_ANNOTATION, annotAccessExpr.annotationName.getValue()); } else { annotAccessExpr.annotationSymbol = (BAnnotationSymbol) symbol; BType annotType = ((BAnnotationSymbol) symbol).attachedType == null ? symTable.trueType : ((BAnnotationSymbol) symbol).attachedType.type; actualType = BUnionType.create(null, annotType, symTable.nilType); } this.resultType = this.types.checkType(annotAccessExpr, actualType, this.expType); } private boolean isValidVariableReference(BLangExpression varRef) { switch (varRef.getKind()) { case SIMPLE_VARIABLE_REF: case RECORD_VARIABLE_REF: case TUPLE_VARIABLE_REF: case ERROR_VARIABLE_REF: case FIELD_BASED_ACCESS_EXPR: case INDEX_BASED_ACCESS_EXPR: case XML_ATTRIBUTE_ACCESS_EXPR: return true; default: dlog.error(varRef.pos, DiagnosticCode.INVALID_RECORD_BINDING_PATTERN, varRef.type); return false; } } private BType getEffectiveReadOnlyType(DiagnosticPos pos, BType origTargetType) { if (origTargetType == symTable.readonlyType) { if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) { return origTargetType; } return ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) expType, env, symTable, anonymousModelHelper, names, new HashSet<>()); } if (origTargetType.tag != TypeTags.UNION) { return origTargetType; } boolean hasReadOnlyType = false; LinkedHashSet<BType> nonReadOnlyTypes = new LinkedHashSet<>(); for (BType memberType : ((BUnionType) origTargetType).getMemberTypes()) { if (memberType == symTable.readonlyType) { hasReadOnlyType = true; continue; } nonReadOnlyTypes.add(memberType); } if (!hasReadOnlyType) { return origTargetType; } if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) { return origTargetType; } BUnionType nonReadOnlyUnion = BUnionType.create(null, nonReadOnlyTypes); nonReadOnlyUnion.add(ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) expType, env, symTable, anonymousModelHelper, names, new HashSet<>())); return nonReadOnlyUnion; } private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) { SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env); bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv)); return checkExpr(bLangArrowFunction.body.expr, arrowFunctionEnv, expectedRetType); } private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) { if (paramTypes.size() != bLangArrowFunction.params.size()) { dlog.error(bLangArrowFunction.pos, DiagnosticCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH, paramTypes.size(), bLangArrowFunction.params.size()); resultType = symTable.semanticError; bLangArrowFunction.params.forEach(param -> param.type = symTable.semanticError); return; } for (int i = 0; i < bLangArrowFunction.params.size(); i++) { BLangSimpleVariable paramIdentifier = bLangArrowFunction.params.get(i); BType bType = paramTypes.get(i); BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); valueTypeNode.setTypeKind(bType.getKind()); paramIdentifier.setTypeNode(valueTypeNode); paramIdentifier.type = bType; } } private void checkSelfReferences(DiagnosticPos pos, SymbolEnv env, BVarSymbol varSymbol) { if (env.enclVarSym == varSymbol) { dlog.error(pos, DiagnosticCode.SELF_REFERENCE_VAR, varSymbol.name); } } public List<BType> getListWithErrorTypes(int count) { List<BType> list = new ArrayList<>(count); for (int i = 0; i < count; i++) { list.add(symTable.semanticError); } return list; } private void checkFunctionInvocationExpr(BLangInvocation iExpr) { Name funcName = names.fromIdNode(iExpr.name); Name pkgAlias = names.fromIdNode(iExpr.pkgAlias); BSymbol funcSymbol = symTable.notFoundSymbol; BSymbol pkgSymbol = symResolver.resolvePrefixSymbol(env, pkgAlias, getCurrentCompUnit(iExpr)); if (pkgSymbol == symTable.notFoundSymbol) { dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_MODULE, pkgAlias); } else { if (funcSymbol == symTable.notFoundSymbol) { BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName); if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) { funcSymbol = symbol; } if (symTable.rootPkgSymbol.pkgID.equals(symbol.pkgID) && (symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) { funcSymbol = symbol; } } if (funcSymbol == symTable.notFoundSymbol || ((funcSymbol.tag & SymTag.TYPE) == SymTag.TYPE)) { BSymbol ctor = symResolver.lookupConstructorSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName); funcSymbol = ctor != symTable.notFoundSymbol ? ctor : funcSymbol; } } if ((funcSymbol.tag & SymTag.ERROR) == SymTag.ERROR || ((funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR && funcSymbol.type.tag == TypeTags.ERROR)) { iExpr.symbol = funcSymbol; iExpr.type = funcSymbol.type; checkErrorConstructorInvocation(iExpr); return; } else if (funcSymbol == symTable.notFoundSymbol || isNotFunction(funcSymbol)) { if (!missingNodesHelper.isMissingNode(funcName)) { dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, funcName); } iExpr.argExprs.forEach(arg -> checkExpr(arg, env)); resultType = symTable.semanticError; return; } if (isFunctionPointer(funcSymbol)) { iExpr.functionPointerInvocation = true; markAndRegisterClosureVariable(funcSymbol, iExpr.pos); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_RESOURCE_FUNCTION_INVOCATION); } boolean langLibPackageID = PackageID.isLangLibPackageID(pkgSymbol.pkgID); if (langLibPackageID) { this.env = SymbolEnv.createInvocationEnv(iExpr, this.env); } iExpr.symbol = funcSymbol; checkInvocationParamAndReturnType(iExpr); if (langLibPackageID && !iExpr.argExprs.isEmpty()) { checkInvalidImmutableValueUpdate(iExpr, iExpr.argExprs.get(0).type, funcSymbol); } } private void markAndRegisterClosureVariable(BSymbol symbol, DiagnosticPos pos) { BLangInvokableNode encInvokable = env.enclInvokable; if (symbol.owner instanceof BPackageSymbol) { return; } if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA) && !isFunctionArgument(symbol, encInvokable.requiredParams)) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) { resolvedSymbol.closure = true; ((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } if (env.node.getKind() == NodeKind.ARROW_EXPR && !isFunctionArgument(symbol, ((BLangArrowFunction) env.node).params)) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol) { resolvedSymbol.closure = true; ((BLangArrowFunction) env.node).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } if (env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, (BLangRecordTypeNode) env.enclType); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) { resolvedSymbol.closure = true; ((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } BLangNode node = env.node; SymbolEnv cEnv = env; while (node != null && node.getKind() != NodeKind.FUNCTION) { if (node.getKind() == NodeKind.TRANSACTION || node.getKind() == NodeKind.RETRY || node.getKind() == NodeKind.ON_FAIL) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol) { resolvedSymbol.closure = true; } break; } else { SymbolEnv enclEnv = cEnv.enclEnv; if (enclEnv == null) { break; } cEnv = enclEnv; node = cEnv.node; } } } private boolean isNotFunction(BSymbol funcSymbol) { if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION || (funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) { return false; } if (isFunctionPointer(funcSymbol)) { return false; } return true; } private boolean isFunctionPointer(BSymbol funcSymbol) { if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) { return false; } return (funcSymbol.tag & SymTag.FUNCTION) == SymTag.VARIABLE && funcSymbol.kind == SymbolKind.FUNCTION && (funcSymbol.flags & Flags.NATIVE) != Flags.NATIVE; } private void checkErrorConstructorInvocation(BLangInvocation iExpr) { BErrorType errorType = (BErrorType) iExpr.symbol.type; if (this.expType == symTable.noType) { this.expType = errorType; } if (!types.isAssignable(errorType, this.expType)) { dlog.error(iExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, this.expType, errorType); resultType = symTable.semanticError; } if (iExpr.argExprs.isEmpty() && !iExpr.requiredArgs.isEmpty()) { resultType = iExpr.type; return; } if (iExpr.argExprs.isEmpty()) { dlog.error(iExpr.pos, DiagnosticCode.MISSING_REQUIRED_ARG_ERROR_MESSAGE); return; } BLangExpression errorMessageArg = iExpr.argExprs.get(0); if (errorMessageArg.getKind() == NodeKind.NAMED_ARGS_EXPR) { dlog.error(iExpr.pos, DiagnosticCode.MISSING_REQUIRED_ARG_ERROR_MESSAGE); return; } checkExpr(errorMessageArg, this.env, symTable.stringType); iExpr.requiredArgs.add(0, errorMessageArg); iExpr.argExprs.remove(0); if (!iExpr.argExprs.isEmpty()) { BLangExpression secondArg = iExpr.argExprs.get(0); if (secondArg.getKind() != NodeKind.NAMED_ARGS_EXPR) { checkExpr(secondArg, this.env, symTable.errorType); iExpr.requiredArgs.add(1, secondArg); iExpr.argExprs.remove(0); } } if (errorType.detailType.tag == TypeTags.MAP) { BMapType detailMapType = (BMapType) errorType.detailType; List<BLangNamedArgsExpression> namedArgs = getProvidedErrorDetails(iExpr); if (namedArgs == null) { resultType = symTable.semanticError; return; } for (BLangNamedArgsExpression namedArg : namedArgs) { if (!types.isAssignable(namedArg.expr.type, detailMapType.constraint)) { dlog.error(namedArg.pos, DiagnosticCode.INVALID_ERROR_DETAIL_ARG_TYPE, namedArg.name, detailMapType.constraint, namedArg.expr.type); resultType = symTable.semanticError; } } if (resultType == symTable.semanticError) { return; } } else if (errorType.detailType.tag == TypeTags.RECORD) { BRecordType targetErrorDetailRec = (BRecordType) errorType.detailType; BRecordType recordType = createErrorDetailRecordType(iExpr, targetErrorDetailRec); if (resultType == symTable.semanticError || targetErrorDetailRec == null) { return; } if (!types.isAssignable(recordType, targetErrorDetailRec)) { reportErrorDetailMissmatchError(iExpr, targetErrorDetailRec, recordType); resultType = symTable.semanticError; return; } } else { resultType = symTable.semanticError; } setErrorDetailArgsToNamedArgsList(iExpr); resultType = errorType; if (iExpr.symbol == symTable.errorType.tsymbol) { iExpr.symbol = ((BErrorTypeSymbol) errorType.tsymbol).ctorSymbol; } } private void reportErrorDetailMissmatchError(BLangInvocation iExpr, BRecordType targetErrorDetailRec, BRecordType recordType) { boolean detailedErrorReported = false; Set<String> checkedFieldNames = new HashSet<>(); for (Map.Entry<String, BField> fieldEntry : targetErrorDetailRec.fields.entrySet()) { checkedFieldNames.add(fieldEntry.getKey()); BField argField = recordType.fields.get(fieldEntry.getKey()); if (argField == null && !Symbols.isOptional(fieldEntry.getValue().symbol)) { dlog.error(iExpr.pos, DiagnosticCode.MISSING_ERROR_DETAIL_ARG, fieldEntry.getKey()); detailedErrorReported = true; } else if (!types.isAssignable(argField.type, fieldEntry.getValue().type)) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_ERROR_DETAIL_ARG_TYPE, fieldEntry.getKey(), fieldEntry.getValue().type, argField.type); } } if (recordType.fields.size() > checkedFieldNames.size()) { for (Map.Entry<String, BField> fieldEntry : recordType.fields.entrySet()) { if (!checkedFieldNames.contains(fieldEntry.getKey())) { BField field = fieldEntry.getValue(); if (targetErrorDetailRec.sealed) { dlog.error(iExpr.pos, DiagnosticCode.UNKNOWN_DETAIL_ARG_TO_SEALED_ERROR_DETAIL_REC, fieldEntry.getKey(), targetErrorDetailRec); detailedErrorReported = true; } else if (!types.isAssignable(field.type, targetErrorDetailRec.restFieldType)) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_ERROR_DETAIL_REST_ARG_TYPE, fieldEntry.getKey(), targetErrorDetailRec); detailedErrorReported = true; } } } } if (!detailedErrorReported) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_ERROR_CONSTRUCTOR_DETAIL, iExpr); } } private void setErrorDetailArgsToNamedArgsList(BLangInvocation iExpr) { List<BLangExpression> namedArgPositions = new ArrayList<>(iExpr.argExprs.size()); for (int i = 0; i < iExpr.argExprs.size(); i++) { BLangExpression argExpr = iExpr.argExprs.get(i); if (argExpr.getKind() == NodeKind.NAMED_ARGS_EXPR) { iExpr.requiredArgs.add(argExpr); namedArgPositions.add(argExpr); } else { dlog.error(argExpr.pos, DiagnosticCode.ERROR_DETAIL_ARG_IS_NOT_NAMED_ARG); resultType = symTable.semanticError; } } for (BLangExpression expr : namedArgPositions) { iExpr.argExprs.remove(expr); } } /** * Create a error detail record using all metadata from {@code targetErrorDetailsType} and put actual error details * from {@code iExpr} expression. * * @param iExpr error constructor invocation * @param targetErrorDetailsType target error details type to extract metadata such as pkgId from * @return error detail record */ private BRecordType createErrorDetailRecordType(BLangInvocation iExpr, BRecordType targetErrorDetailsType) { List<BLangNamedArgsExpression> namedArgs = getProvidedErrorDetails(iExpr); if (namedArgs == null) { return null; } BRecordTypeSymbol recordTypeSymbol = new BRecordTypeSymbol( SymTag.RECORD, targetErrorDetailsType.tsymbol.flags, Names.EMPTY, targetErrorDetailsType.tsymbol.pkgID, symTable.recordType, null, targetErrorDetailsType.tsymbol.pos, VIRTUAL); BRecordType recordType = new BRecordType(recordTypeSymbol); recordType.sealed = targetErrorDetailsType.sealed; recordType.restFieldType = targetErrorDetailsType.restFieldType; Set<Name> availableErrorDetailFields = new HashSet<>(); for (BLangNamedArgsExpression arg : namedArgs) { Name fieldName = names.fromIdNode(arg.name); BField field = new BField(fieldName, arg.pos, new BVarSymbol(0, fieldName, null, arg.type, null, arg.pos, VIRTUAL)); recordType.fields.put(field.name.value, field); availableErrorDetailFields.add(fieldName); } for (BField field : targetErrorDetailsType.fields.values()) { boolean notRequired = (field.symbol.flags & Flags.REQUIRED) != Flags.REQUIRED; if (notRequired && !availableErrorDetailFields.contains(field.name)) { BField defaultableField = new BField(field.name, iExpr.pos, new BVarSymbol(field.symbol.flags, field.name, null, field.type, null, iExpr.pos, VIRTUAL)); recordType.fields.put(defaultableField.name.value, defaultableField); } } return recordType; } private List<BLangNamedArgsExpression> getProvidedErrorDetails(BLangInvocation iExpr) { List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); for (int i = 0; i < iExpr.argExprs.size(); i++) { BLangExpression argExpr = iExpr.argExprs.get(i); checkExpr(argExpr, env); if (argExpr.getKind() != NodeKind.NAMED_ARGS_EXPR) { dlog.error(argExpr.pos, DiagnosticCode.ERROR_DETAIL_ARG_IS_NOT_NAMED_ARG); resultType = symTable.semanticError; return null; } namedArgs.add((BLangNamedArgsExpression) argExpr); } return namedArgs; } private void checkObjectFunctionInvocationExpr(BLangInvocation iExpr, BObjectType objectType) { if (objectType.getKind() == TypeKind.SERVICE && !(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) { dlog.error(iExpr.pos, DiagnosticCode.SERVICE_FUNCTION_INVALID_INVOCATION); return; } Name funcName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, iExpr.name.value)); BSymbol funcSymbol = symResolver.resolveObjectMethod(iExpr.pos, env, funcName, (BObjectTypeSymbol) objectType.tsymbol); if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) { if (!checkLangLibMethodInvocationExpr(iExpr, objectType)) { dlog.error(iExpr.name.pos, DiagnosticCode.UNDEFINED_METHOD_IN_OBJECT, iExpr.name.value, objectType); resultType = symTable.semanticError; return; } } else { iExpr.symbol = funcSymbol; } if (iExpr.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value) && !(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_INIT_INVOCATION); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_RESOURCE_FUNCTION_INVOCATION); } checkInvocationParamAndReturnType(iExpr); } private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BObjectType expType) { BLangVariableReference varRef = (BLangVariableReference) aInv.expr; if (((varRef.symbol.tag & SymTag.ENDPOINT) != SymTag.ENDPOINT) && !aInv.async) { dlog.error(aInv.pos, DiagnosticCode.INVALID_ACTION_INVOCATION, varRef.type); this.resultType = symTable.semanticError; aInv.symbol = symTable.notFoundSymbol; return; } BVarSymbol epSymbol = (BVarSymbol) varRef.symbol; Name remoteMethodQName = names .fromString(Symbols.getAttachedFuncSymbolName(expType.tsymbol.name.value, aInv.name.value)); Name actionName = names.fromIdNode(aInv.name); BSymbol remoteFuncSymbol = symResolver .lookupMemberSymbol(aInv.pos, ((BObjectTypeSymbol) epSymbol.type.tsymbol).methodScope, env, remoteMethodQName, SymTag.FUNCTION); if (remoteFuncSymbol == symTable.notFoundSymbol && !checkLangLibMethodInvocationExpr(aInv, expType)) { dlog.error(aInv.name.pos, DiagnosticCode.UNDEFINED_METHOD_IN_OBJECT, aInv.name.value, expType); resultType = symTable.semanticError; return; } if (!Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && aInv.remoteMethodCall) { dlog.error(aInv.pos, DiagnosticCode.INVALID_METHOD_INVOCATION_SYNTAX, actionName); this.resultType = symTable.semanticError; return; } aInv.symbol = remoteFuncSymbol; checkInvocationParamAndReturnType(aInv); } private boolean checkLangLibMethodInvocationExpr(BLangInvocation iExpr, BType bType) { return getLangLibMethod(iExpr, bType) != symTable.notFoundSymbol; } private BSymbol getLangLibMethod(BLangInvocation iExpr, BType bType) { Name funcName = names.fromString(iExpr.name.value); BSymbol funcSymbol = symResolver.lookupLangLibMethod(bType, funcName); if (funcSymbol == symTable.notFoundSymbol) { return symTable.notFoundSymbol; } iExpr.symbol = funcSymbol; iExpr.langLibInvocation = true; SymbolEnv enclEnv = this.env; this.env = SymbolEnv.createInvocationEnv(iExpr, this.env); if (iExpr.argExprs.isEmpty() || !iExpr.argExprs.get(0).equals(iExpr.expr)) { iExpr.argExprs.add(0, iExpr.expr); } checkInvocationParamAndReturnType(iExpr); this.env = enclEnv; return funcSymbol; } private void checkInvocationParamAndReturnType(BLangInvocation iExpr) { BType actualType = checkInvocationParam(iExpr); resultType = types.checkType(iExpr, actualType, this.expType); } private BType checkInvocationParam(BLangInvocation iExpr) { if (iExpr.symbol.type.tag != TypeTags.INVOKABLE) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type); return symTable.noType; } List<BType> paramTypes = ((BInvokableType) iExpr.symbol.type).getParameterTypes(); int parameterCount = paramTypes.size(); iExpr.requiredArgs = new ArrayList<>(); int i = 0; BLangExpression vararg = null; boolean foundNamedArg = false; for (BLangExpression expr : iExpr.argExprs) { switch (expr.getKind()) { case NAMED_ARGS_EXPR: foundNamedArg = true; if (i < parameterCount) { iExpr.requiredArgs.add(expr); } else { dlog.error(expr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); } i++; break; case REST_ARGS_EXPR: if (foundNamedArg) { dlog.error(expr.pos, DiagnosticCode.REST_ARG_DEFINED_AFTER_NAMED_ARG); continue; } vararg = expr; break; default: if (foundNamedArg) { dlog.error(expr.pos, DiagnosticCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG); } if (i < parameterCount) { iExpr.requiredArgs.add(expr); } else { iExpr.restArgs.add(expr); } i++; break; } } return checkInvocationArgs(iExpr, paramTypes, vararg); } private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, BLangExpression vararg) { BInvokableSymbol invokableSymbol = (BInvokableSymbol) iExpr.symbol; BInvokableType bInvokableType = (BInvokableType) invokableSymbol.type; BInvokableTypeSymbol invokableTypeSymbol = (BInvokableTypeSymbol) bInvokableType.tsymbol; List<BVarSymbol> nonRestParams = new ArrayList<>(invokableTypeSymbol.params); List<BLangExpression> nonRestArgs = iExpr.requiredArgs; List<BVarSymbol> valueProvidedParams = new ArrayList<>(); List<BVarSymbol> requiredParams = new ArrayList<>(); for (BVarSymbol nonRestParam : nonRestParams) { if (nonRestParam.defaultableParam) { continue; } requiredParams.add(nonRestParam); } int i = 0; for (; i < nonRestArgs.size(); i++) { BLangExpression arg = nonRestArgs.get(i); BType expectedType = paramTypes.get(i); if (i == 0 && arg.typeChecked && iExpr.expr != null && iExpr.expr == arg) { types.checkType(arg.pos, arg.type, expectedType, DiagnosticCode.INCOMPATIBLE_TYPES); types.setImplicitCastExpr(arg, arg.type, expectedType); } if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) { if (i < nonRestParams.size()) { BVarSymbol param = nonRestParams.get(i); checkTypeParamExpr(arg, this.env, param.type, iExpr.langLibInvocation); valueProvidedParams.add(param); requiredParams.remove(param); continue; } break; } if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) { BLangIdentifier argName = ((NamedArgNode) arg).getName(); BVarSymbol varSym = null; for (BVarSymbol nonRestParam : nonRestParams) { if (nonRestParam.getName().value.equals(argName.value)) { varSym = nonRestParam; } } if (varSym == null) { dlog.error(arg.pos, DiagnosticCode.UNDEFINED_PARAMETER, argName); break; } requiredParams.remove(varSym); if (valueProvidedParams.contains(varSym)) { dlog.error(arg.pos, DiagnosticCode.DUPLICATE_NAMED_ARGS, varSym.name.value); continue; } checkTypeParamExpr(arg, this.env, varSym.type, iExpr.langLibInvocation); valueProvidedParams.add(varSym); } } BVarSymbol restParam = invokableTypeSymbol.restParam; boolean errored = false; if (!requiredParams.isEmpty() && vararg == null) { for (BVarSymbol requiredParam : requiredParams) { dlog.error(iExpr.pos, DiagnosticCode.MISSING_REQUIRED_PARAMETER, requiredParam.name, iExpr.name.value); } errored = true; } if (restParam == null && (!iExpr.restArgs.isEmpty() || (vararg != null && valueProvidedParams.size() == nonRestParams.size()))) { dlog.error(iExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); errored = true; } if (errored) { return symTable.semanticError; } BType restType = restParam == null ? null : restParam.type; if (nonRestArgs.size() < nonRestParams.size() && vararg != null) { List<BType> tupleMemberTypes = new ArrayList<>(); BType tupleRestType = null; for (int j = nonRestArgs.size(); j < nonRestParams.size(); j++) { tupleMemberTypes.add(paramTypes.get(j)); } if (restType != null) { if (restType.tag == TypeTags.ARRAY) { tupleRestType = ((BArrayType) restType).eType; } else if (restType.tag == TypeTags.TUPLE) { BTupleType restTupleType = (BTupleType) restType; tupleMemberTypes.addAll(restTupleType.tupleTypes); if (restTupleType.restType != null) { tupleRestType = restTupleType.restType; } } } BTupleType tupleType = new BTupleType(tupleMemberTypes); tupleType.restType = tupleRestType; restType = tupleType; } if (restType == null && (vararg != null || !iExpr.restArgs.isEmpty())) { dlog.error(iExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); return symTable.semanticError; } if (vararg != null && !iExpr.restArgs.isEmpty()) { BType elementType = ((BArrayType) restType).eType; for (BLangExpression restArg : iExpr.restArgs) { checkTypeParamExpr(restArg, this.env, elementType, true); } checkTypeParamExpr(vararg, this.env, restType, iExpr.langLibInvocation); iExpr.restArgs.add(vararg); } else if (vararg != null) { checkTypeParamExpr(vararg, this.env, restType, iExpr.langLibInvocation); iExpr.restArgs.add(vararg); } else if (!iExpr.restArgs.isEmpty()) { if (restType.tag == TypeTags.ARRAY) { BType elementType = ((BArrayType) restType).eType; for (BLangExpression restArg : iExpr.restArgs) { checkTypeParamExpr(restArg, this.env, elementType, true); } } else { BTupleType tupleType = (BTupleType) restType; List<BType> tupleMemberTypes = tupleType.tupleTypes; BType tupleRestType = tupleType.restType; int tupleMemCount = tupleMemberTypes.size(); for (int j = 0; j < iExpr.restArgs.size(); j++) { BLangExpression restArg = iExpr.restArgs.get(j); BType memType = j < tupleMemCount ? tupleMemberTypes.get(j) : tupleRestType; checkTypeParamExpr(restArg, this.env, memType, true); } } } BType retType = typeParamAnalyzer.getReturnTypeParams(env, bInvokableType.getReturnType()); if (Symbols.isFlagOn(invokableSymbol.flags, Flags.NATIVE) && Symbols.isFlagOn(retType.flags, Flags.PARAMETERIZED)) { retType = typeBuilder.build(retType, iExpr); } boolean langLibPackageID = PackageID.isLangLibPackageID(iExpr.symbol.pkgID); String sortFuncName = "sort"; if (langLibPackageID && sortFuncName.equals(iExpr.name.value)) { checkArrayLibSortFuncArgs(iExpr); } if (iExpr instanceof ActionNode && ((BLangInvocation.BLangActionInvocation) iExpr).async) { return this.generateFutureType(invokableSymbol, retType); } else { return retType; } } private BFutureType generateFutureType(BInvokableSymbol invocableSymbol, BType retType) { boolean isWorkerStart = invocableSymbol.name.value.startsWith(WORKER_LAMBDA_VAR_PREFIX); return new BFutureType(TypeTags.FUTURE, retType, null, isWorkerStart); } private void checkTypeParamExpr(BLangExpression arg, SymbolEnv env, BType expectedType, boolean inferTypeForNumericLiteral) { checkTypeParamExpr(arg.pos, arg, env, expectedType, inferTypeForNumericLiteral); } private void checkTypeParamExpr(DiagnosticPos pos, BLangExpression arg, SymbolEnv env, BType expectedType, boolean inferTypeForNumericLiteral) { if (typeParamAnalyzer.notRequireTypeParams(env)) { checkExpr(arg, env, expectedType); return; } if (requireTypeInference(arg, inferTypeForNumericLiteral)) { BType expType = typeParamAnalyzer.getMatchingBoundType(expectedType, env); BType inferredType = checkExpr(arg, env, expType); typeParamAnalyzer.checkForTypeParamsInArg(pos, inferredType, this.env, expectedType); return; } checkExpr(arg, env, expectedType); typeParamAnalyzer.checkForTypeParamsInArg(pos, arg.type, this.env, expectedType); } private boolean requireTypeInference(BLangExpression expr, boolean inferTypeForNumericLiteral) { switch (expr.getKind()) { case GROUP_EXPR: return requireTypeInference(((BLangGroupExpr) expr).expression, inferTypeForNumericLiteral); case ARROW_EXPR: case LIST_CONSTRUCTOR_EXPR: case RECORD_LITERAL_EXPR: return true; case NUMERIC_LITERAL: return inferTypeForNumericLiteral; default: return false; } } private BType checkMappingField(RecordLiteralNode.RecordField field, BType mappingType) { BType fieldType = symTable.semanticError; boolean keyValueField = field.isKeyValueField(); boolean spreadOpField = field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP; boolean readOnlyConstructorField = false; String fieldName = null; DiagnosticPos pos = null; BLangExpression valueExpr = null; if (keyValueField) { valueExpr = ((BLangRecordKeyValueField) field).valueExpr; } else if (!spreadOpField) { valueExpr = (BLangRecordVarNameField) field; } switch (mappingType.tag) { case TypeTags.RECORD: if (keyValueField) { BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValField.key; fieldType = checkRecordLiteralKeyExpr(key.expr, key.computedKey, (BRecordType) mappingType); readOnlyConstructorField = keyValField.readonly; pos = key.expr.pos; fieldName = getKeyValueFieldName(keyValField); } else if (spreadOpField) { BLangExpression spreadExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; checkExpr(spreadExpr, this.env); BType spreadExprType = spreadExpr.type; if (spreadExprType.tag == TypeTags.MAP) { return types.checkType(spreadExpr.pos, ((BMapType) spreadExprType).constraint, getAllFieldType((BRecordType) mappingType), DiagnosticCode.INCOMPATIBLE_TYPES); } if (spreadExprType.tag != TypeTags.RECORD) { dlog.error(spreadExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES_SPREAD_OP, spreadExprType); return symTable.semanticError; } boolean errored = false; for (BField bField : ((BRecordType) spreadExprType).fields.values()) { BType specFieldType = bField.type; BType expectedFieldType = checkRecordLiteralKeyByName(spreadExpr.pos, this.env, bField.name, (BRecordType) mappingType); if (expectedFieldType != symTable.semanticError && !types.isAssignable(specFieldType, expectedFieldType)) { dlog.error(spreadExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES_FIELD, expectedFieldType, bField.name, specFieldType); if (!errored) { errored = true; } } } return errored ? symTable.semanticError : symTable.noType; } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; fieldType = checkRecordLiteralKeyExpr(varNameField, false, (BRecordType) mappingType); readOnlyConstructorField = varNameField.readonly; pos = varNameField.pos; fieldName = getVarNameFieldName(varNameField); } break; case TypeTags.MAP: if (spreadOpField) { BLangExpression spreadExp = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; BType spreadOpType = checkExpr(spreadExp, this.env); BType spreadOpMemberType; switch (spreadOpType.tag) { case TypeTags.RECORD: List<BType> types = new ArrayList<>(); BRecordType recordType = (BRecordType) spreadOpType; for (BField recField : recordType.fields.values()) { types.add(recField.type); } if (!recordType.sealed) { types.add(recordType.restFieldType); } spreadOpMemberType = getRepresentativeBroadType(types); break; case TypeTags.MAP: spreadOpMemberType = ((BMapType) spreadOpType).constraint; break; default: dlog.error(spreadExp.pos, DiagnosticCode.INCOMPATIBLE_TYPES_SPREAD_OP, spreadOpType); return symTable.semanticError; } return types.checkType(spreadExp.pos, spreadOpMemberType, ((BMapType) mappingType).constraint, DiagnosticCode.INCOMPATIBLE_TYPES); } boolean validMapKey; if (keyValueField) { BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValField.key; validMapKey = checkValidJsonOrMapLiteralKeyExpr(key.expr, key.computedKey); readOnlyConstructorField = keyValField.readonly; pos = key.pos; fieldName = getKeyValueFieldName(keyValField); } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; validMapKey = checkValidJsonOrMapLiteralKeyExpr(varNameField, false); readOnlyConstructorField = varNameField.readonly; pos = varNameField.pos; fieldName = getVarNameFieldName(varNameField); } fieldType = validMapKey ? ((BMapType) mappingType).constraint : symTable.semanticError; break; } if (readOnlyConstructorField) { if (types.isSelectivelyImmutableType(fieldType)) { fieldType = ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) fieldType, env, symTable, anonymousModelHelper, names, new HashSet<>()); } else if (!types.isInherentlyImmutableType(fieldType)) { dlog.error(pos, DiagnosticCode.INVALID_READONLY_MAPPING_FIELD, fieldName, fieldType); fieldType = symTable.semanticError; } } if (spreadOpField) { valueExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; } BLangExpression exprToCheck = valueExpr; if (this.nonErrorLoggingCheck) { valueExpr.cloneAttempt++; exprToCheck = nodeCloner.clone(valueExpr); } else { ((BLangNode) field).type = fieldType; } return checkExpr(exprToCheck, this.env, fieldType); } private BType checkRecordLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey, BRecordType recordType) { Name fieldName; if (computedKey) { checkExpr(keyExpr, this.env, symTable.stringType); if (keyExpr.type == symTable.semanticError) { return symTable.semanticError; } LinkedHashSet<BType> fieldTypes = recordType.fields.values().stream() .map(field -> field.type) .collect(Collectors.toCollection(LinkedHashSet::new)); if (recordType.restFieldType.tag != TypeTags.NONE) { fieldTypes.add(recordType.restFieldType); } return BUnionType.create(null, fieldTypes); } else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr; fieldName = names.fromIdNode(varRef.variableName); } else if (keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).type.tag == TypeTags.STRING) { fieldName = names.fromString((String) ((BLangLiteral) keyExpr).value); } else { dlog.error(keyExpr.pos, DiagnosticCode.INVALID_RECORD_LITERAL_KEY); return symTable.semanticError; } return checkRecordLiteralKeyByName(keyExpr.pos, this.env, fieldName, recordType); } private BType checkRecordLiteralKeyByName(DiagnosticPos pos, SymbolEnv env, Name key, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(pos, env, key, recordType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { return fieldSymbol.type; } if (recordType.sealed) { dlog.error(pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, key, recordType.tsymbol.type.getKind().typeName(), recordType); return symTable.semanticError; } return recordType.restFieldType; } private BType getAllFieldType(BRecordType recordType) { LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BField field : recordType.fields.values()) { possibleTypes.add(field.type); } BType restFieldType = recordType.restFieldType; if (restFieldType != null && restFieldType != symTable.noType) { possibleTypes.add(restFieldType); } return BUnionType.create(null, possibleTypes); } private boolean checkValidJsonOrMapLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey) { if (computedKey) { checkExpr(keyExpr, this.env, symTable.stringType); if (keyExpr.type == symTable.semanticError) { return false; } return true; } else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF || (keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).type.tag == TypeTags.STRING)) { return true; } dlog.error(keyExpr.pos, DiagnosticCode.INVALID_RECORD_LITERAL_KEY); return false; } private BType addNilForNillableAccessType(BType actualType) { if (actualType.isNullable()) { return actualType; } return BUnionType.create(null, actualType, symTable.nilType); } private BType checkRecordRequiredFieldAccess(BLangVariableReference varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol || Symbols.isOptional(fieldSymbol)) { return symTable.semanticError; } varReferExpr.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkRecordOptionalFieldAccess(BLangVariableReference varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol || !Symbols.isOptional(fieldSymbol)) { return symTable.semanticError; } varReferExpr.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkRecordRestFieldAccess(BLangVariableReference varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { return symTable.semanticError; } if (recordType.sealed) { return symTable.semanticError; } return recordType.restFieldType; } private BType checkObjectFieldAccess(BLangFieldBasedAccess bLangFieldBasedAccess, Name fieldName, BObjectType objectType) { BSymbol fieldSymbol = symResolver.resolveStructField(bLangFieldBasedAccess.pos, this.env, fieldName, objectType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { bLangFieldBasedAccess.symbol = fieldSymbol; return fieldSymbol.type; } Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, fieldName.value)); fieldSymbol = symResolver.resolveObjectField(bLangFieldBasedAccess.pos, env, objFuncName, objectType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol) { dlog.error(bLangFieldBasedAccess.field.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName, objectType.tsymbol.type.getKind().typeName(), objectType.tsymbol); return symTable.semanticError; } bLangFieldBasedAccess.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkTupleFieldType(BType tupleType, int indexValue) { BTupleType bTupleType = (BTupleType) tupleType; if (bTupleType.tupleTypes.size() <= indexValue && bTupleType.restType != null) { return bTupleType.restType; } else if (indexValue < 0 || bTupleType.tupleTypes.size() <= indexValue) { return symTable.semanticError; } return bTupleType.tupleTypes.get(indexValue); } private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) { BLangExpression startTagName = bLangXMLElementLiteral.startTagName; checkExpr(startTagName, xmlElementEnv, symTable.stringType); BLangExpression endTagName = bLangXMLElementLiteral.endTagName; if (endTagName == null) { return; } checkExpr(endTagName, xmlElementEnv, symTable.stringType); if (startTagName.getKind() == NodeKind.XML_QNAME && endTagName.getKind() == NodeKind.XML_QNAME && startTagName.equals(endTagName)) { return; } if (startTagName.getKind() != NodeKind.XML_QNAME && endTagName.getKind() != NodeKind.XML_QNAME) { return; } dlog.error(bLangXMLElementLiteral.pos, DiagnosticCode.XML_TAGS_MISMATCH); } private void checkStringTemplateExprs(List<? extends BLangExpression> exprs, boolean allowXml) { for (BLangExpression expr : exprs) { checkExpr(expr, env); BType type = expr.type; if (type == symTable.semanticError) { continue; } if (type.tag >= TypeTags.JSON) { if (allowXml) { if (type.tag != TypeTags.XML) { dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType, symTable.stringType, symTable.booleanType, symTable.xmlType), type); } continue; } dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType, symTable.stringType, symTable.booleanType), type); } } } /** * Concatenate the consecutive text type nodes, and get the reduced set of children. * * @param exprs Child nodes * @param xmlElementEnv * @return Reduced set of children */ private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) { List<BLangExpression> newChildren = new ArrayList<>(); List<BLangExpression> tempConcatExpressions = new ArrayList<>(); for (BLangExpression expr : exprs) { BType exprType = checkExpr(expr, xmlElementEnv); if (TypeTags.isXMLTypeTag(exprType.tag)) { if (!tempConcatExpressions.isEmpty()) { newChildren.add(getXMLTextLiteral(tempConcatExpressions)); tempConcatExpressions = new ArrayList<>(); } newChildren.add(expr); continue; } BType type = expr.type; if (type.tag >= TypeTags.JSON) { if (type != symTable.semanticError && !TypeTags.isXMLTypeTag(type.tag)) { dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType, symTable.stringType, symTable.booleanType, symTable.xmlType), type); } continue; } tempConcatExpressions.add(expr); } if (!tempConcatExpressions.isEmpty()) { newChildren.add(getXMLTextLiteral(tempConcatExpressions)); } return newChildren; } private BLangExpression getXMLTextLiteral(List<BLangExpression> exprs) { BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode(); xmlTextLiteral.textFragments = exprs; xmlTextLiteral.pos = exprs.get(0).pos; xmlTextLiteral.type = symTable.xmlType; return xmlTextLiteral; } private BType getTypeOfExprInFieldAccess(BLangExpression expr) { checkExpr(expr, this.env, symTable.noType); return expr.type; } private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) { accessExpr.originalType = actualType; BUnionType unionType = BUnionType.create(null, actualType); if (returnsNull(accessExpr)) { unionType.add(symTable.nilType); } BType parentType = accessExpr.expr.type; if (accessExpr.errorSafeNavigation && (parentType.tag == TypeTags.SEMANTIC_ERROR || (parentType.tag == TypeTags.UNION && ((BUnionType) parentType).getMemberTypes().contains(symTable.errorType)))) { unionType.add(symTable.errorType); } if (unionType.getMemberTypes().size() == 1) { return unionType.getMemberTypes().toArray(new BType[0])[0]; } return unionType; } private boolean returnsNull(BLangAccessExpression accessExpr) { BType parentType = accessExpr.expr.type; if (parentType.isNullable() && parentType.tag != TypeTags.JSON) { return true; } if (parentType.tag != TypeTags.MAP) { return false; } if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR && accessExpr.expr.type.tag == TypeTags.MAP) { BType constraintType = ((BMapType) accessExpr.expr.type).constraint; return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON; } return false; } private BType checkObjectFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.OBJECT) { return checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) memType); if (individualFieldType == symTable.semanticError) { return individualFieldType; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { return checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { return individualFieldType; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkRecordFieldAccessLhsExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType != symTable.semanticError) { return fieldType; } return checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { return symTable.semanticError; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkOptionalRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType != symTable.semanticError) { return fieldType; } fieldType = checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType == symTable.semanticError) { return fieldType; } return BUnionType.create(null, fieldType, symTable.nilType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); BType fieldType; boolean nonMatchedRecordExists = false; LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { nonMatchedRecordExists = true; continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.isEmpty()) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { fieldType = fieldTypeMembers.iterator().next(); } else { fieldType = BUnionType.create(null, fieldTypeMembers); } return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType; } private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { BType actualType = symTable.semanticError; if (types.isSubTypeOfBaseType(varRefType, TypeTags.OBJECT)) { actualType = checkObjectFieldAccessExpr(fieldAccessExpr, varRefType, fieldName); fieldAccessExpr.originalType = actualType; } else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD)) { actualType = checkRecordFieldAccessExpr(fieldAccessExpr, varRefType, fieldName); if (actualType != symTable.semanticError) { fieldAccessExpr.originalType = actualType; return actualType; } if (!fieldAccessExpr.lhsVar) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_NON_REQUIRED_FIELD, varRefType, fieldName); return actualType; } actualType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, varRefType, fieldName); fieldAccessExpr.originalType = actualType; if (actualType == symTable.semanticError) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName, varRefType.tsymbol.type.getKind().typeName(), varRefType); } } else if (types.isLax(varRefType)) { if (fieldAccessExpr.lhsVar) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_ASSIGNMENT, varRefType); return symTable.semanticError; } if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } BType laxFieldAccessType = getLaxFieldAccessType(varRefType); actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType); fieldAccessExpr.originalType = laxFieldAccessType; } else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) { BType laxFieldAccessType = getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType); if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType); fieldAccessExpr.errorSafeNavigation = true; fieldAccessExpr.originalType = laxFieldAccessType; } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { if (fieldAccessExpr.lhsVar) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_XML_SEQUENCE); } actualType = symTable.xmlType; fieldAccessExpr.originalType = actualType; } else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS, varRefType); } return actualType; } private void resolveXMLNamespace(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess fieldAccessExpr) { BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldAccess = fieldAccessExpr; String nsPrefix = nsPrefixedFieldAccess.nsPrefix.value; BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(nsPrefix)); if (nsSymbol == symTable.notFoundSymbol) { dlog.error(nsPrefixedFieldAccess.nsPrefix.pos, DiagnosticCode.CANNOT_FIND_XML_NAMESPACE, nsPrefixedFieldAccess.nsPrefix); } else if (nsSymbol.getKind() == SymbolKind.PACKAGE) { nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) findXMLNamespaceFromPackageConst( nsPrefixedFieldAccess.field.value, nsPrefixedFieldAccess.nsPrefix.value, (BPackageSymbol) nsSymbol, fieldAccessExpr.pos); } else { nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) nsSymbol; } } private boolean hasLaxOriginalType(BLangFieldBasedAccess fieldBasedAccess) { return fieldBasedAccess.originalType != null && types.isLax(fieldBasedAccess.originalType); } private BType getLaxFieldAccessType(BType exprType) { switch (exprType.tag) { case TypeTags.JSON: return symTable.jsonType; case TypeTags.XML: case TypeTags.XML_ELEMENT: return symTable.stringType; case TypeTags.MAP: return ((BMapType) exprType).constraint; case TypeTags.UNION: BUnionType unionType = (BUnionType) exprType; LinkedHashSet<BType> memberTypes = new LinkedHashSet<>(); unionType.getMemberTypes().forEach(bType -> memberTypes.add(getLaxFieldAccessType(bType))); return memberTypes.size() == 1 ? memberTypes.iterator().next() : BUnionType.create(null, memberTypes); } return symTable.semanticError; } private BType checkOptionalFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { BType actualType = symTable.semanticError; boolean nillableExprType = false; BType effectiveType = varRefType; if (varRefType.tag == TypeTags.UNION) { Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes(); if (memTypes.contains(symTable.nilType)) { LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>(); for (BType bType : memTypes) { if (bType != symTable.nilType) { nilRemovedSet.add(bType); } else { nillableExprType = true; } } effectiveType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() : BUnionType.create(null, nilRemovedSet); } } if (types.isSubTypeOfBaseType(effectiveType, TypeTags.RECORD)) { actualType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, effectiveType, fieldName); if (actualType == symTable.semanticError) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS_FOR_FIELD, varRefType, fieldName); } fieldAccessExpr.nilSafeNavigation = nillableExprType; fieldAccessExpr.originalType = getSafeType(actualType, fieldAccessExpr); } else if (types.isLax(effectiveType)) { BType laxFieldAccessType = getLaxFieldAccessType(effectiveType); actualType = accessCouldResultInError(effectiveType) ? BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } fieldAccessExpr.originalType = laxFieldAccessType; fieldAccessExpr.nilSafeNavigation = true; nillableExprType = true; } else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) { BType laxFieldAccessType = getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType); actualType = accessCouldResultInError(effectiveType) ? BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } fieldAccessExpr.errorSafeNavigation = true; fieldAccessExpr.originalType = laxFieldAccessType; fieldAccessExpr.nilSafeNavigation = true; nillableExprType = true; } else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS, varRefType); } if (nillableExprType && actualType != symTable.semanticError && !actualType.isNullable()) { actualType = BUnionType.create(null, actualType, symTable.nilType); } return actualType; } private boolean accessCouldResultInError(BType type) { if (type.tag == TypeTags.JSON) { return true; } if (type.tag == TypeTags.MAP) { return false; } if (type.tag == TypeTags.XML) { return true; } if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream().anyMatch(this::accessCouldResultInError); } else { return false; } } private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr) { BType varRefType = indexBasedAccessExpr.expr.type; boolean nillableExprType = false; if (varRefType.tag == TypeTags.UNION) { Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes(); if (memTypes.contains(symTable.nilType)) { LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>(); for (BType bType : memTypes) { if (bType != symTable.nilType) { nilRemovedSet.add(bType); } else { nillableExprType = true; } } if (nillableExprType) { varRefType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() : BUnionType.create(null, nilRemovedSet); if (!types.isSubTypeOfMapping(varRefType)) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEXING, indexBasedAccessExpr.expr.type); return symTable.semanticError; } if (indexBasedAccessExpr.lhsVar) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEX_ACCESS_FOR_ASSIGNMENT, indexBasedAccessExpr.expr.type); return symTable.semanticError; } } } } BLangExpression indexExpr = indexBasedAccessExpr.indexExpr; BType actualType = symTable.semanticError; if (types.isSubTypeOfMapping(varRefType)) { checkExpr(indexExpr, this.env, symTable.stringType); if (indexExpr.type == symTable.semanticError) { return symTable.semanticError; } actualType = checkMappingIndexBasedAccess(indexBasedAccessExpr, varRefType); if (actualType == symTable.semanticError) { if (indexExpr.type.tag == TypeTags.STRING && isConst(indexExpr)) { String fieldName = getConstFieldName(indexExpr); dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD, fieldName, indexBasedAccessExpr.expr.type); return actualType; } dlog.error(indexExpr.pos, DiagnosticCode.INVALID_RECORD_INDEX_EXPR, indexExpr.type); return actualType; } indexBasedAccessExpr.nilSafeNavigation = nillableExprType; indexBasedAccessExpr.originalType = getSafeType(actualType, indexBasedAccessExpr); } else if (types.isSubTypeOfList(varRefType)) { checkExpr(indexExpr, this.env, symTable.intType); if (indexExpr.type == symTable.semanticError) { return symTable.semanticError; } actualType = checkListIndexBasedAccess(indexBasedAccessExpr, varRefType); indexBasedAccessExpr.originalType = actualType; if (actualType == symTable.semanticError) { if (indexExpr.type.tag == TypeTags.INT && isConst(indexExpr)) { dlog.error(indexBasedAccessExpr.indexExpr.pos, DiagnosticCode.LIST_INDEX_OUT_OF_RANGE, getConstIndex(indexExpr)); return actualType; } dlog.error(indexExpr.pos, DiagnosticCode.INVALID_LIST_INDEX_EXPR, indexExpr.type); return actualType; } } else if (types.isAssignable(varRefType, symTable.stringType)) { if (indexBasedAccessExpr.lhsVar) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEX_ACCESS_FOR_ASSIGNMENT, indexBasedAccessExpr.expr.type); return symTable.semanticError; } checkExpr(indexExpr, this.env, symTable.intType); if (indexExpr.type == symTable.semanticError) { return symTable.semanticError; } indexBasedAccessExpr.originalType = symTable.stringType; actualType = symTable.stringType; } else if (varRefType.tag == TypeTags.XML) { if (indexBasedAccessExpr.lhsVar) { indexExpr.type = symTable.semanticError; dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_XML_SEQUENCE); return actualType; } BType type = checkExpr(indexExpr, this.env, symTable.intType); if (type == symTable.semanticError) { return type; } actualType = varRefType; indexBasedAccessExpr.originalType = actualType; } else if (varRefType.tag == TypeTags.TABLE) { BTableType tableType = (BTableType) indexBasedAccessExpr.expr.type; BType keyTypeConstraint = tableType.keyTypeConstraint; if (tableType.keyTypeConstraint == null) { keyTypeConstraint = createTableKeyConstraint(((BTableType) indexBasedAccessExpr.expr.type). fieldNameList, ((BTableType) indexBasedAccessExpr.expr.type).constraint); if (keyTypeConstraint == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.MEMBER_ACCESS_NOT_SUPPORT_FOR_KEYLESS_TABLE, indexBasedAccessExpr.expr); return symTable.semanticError; } } if (indexExpr.getKind() != NodeKind.TABLE_MULTI_KEY) { checkExpr(indexExpr, this.env, keyTypeConstraint); if (indexExpr.type == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } } else { List<BLangExpression> multiKeyExpressionList = ((BLangTableMultiKeyExpr) indexBasedAccessExpr.indexExpr).multiKeyIndexExprs; List<BType> keyConstraintTypes = ((BTupleType) keyTypeConstraint).tupleTypes; if (keyConstraintTypes.size() != multiKeyExpressionList.size()) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } for (int i = 0; i < multiKeyExpressionList.size(); i++) { BLangExpression keyExpr = multiKeyExpressionList.get(i); checkExpr(keyExpr, this.env, keyConstraintTypes.get(i)); if (keyExpr.type == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } } } if (expType.tag != TypeTags.NONE) { BType resultType = checkExpr(indexBasedAccessExpr.expr, env, expType); if (resultType == symTable.semanticError) { return symTable.semanticError; } } indexBasedAccessExpr.originalType = tableType.constraint; actualType = tableType.constraint; } else if (varRefType == symTable.semanticError) { indexBasedAccessExpr.indexExpr.type = symTable.semanticError; return symTable.semanticError; } else { indexBasedAccessExpr.indexExpr.type = symTable.semanticError; dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEXING, indexBasedAccessExpr.expr.type); return symTable.semanticError; } if (nillableExprType && !actualType.isNullable()) { actualType = BUnionType.create(null, actualType, symTable.nilType); } return actualType; } private Long getConstIndex(BLangExpression indexExpr) { return indexExpr.getKind() == NodeKind.NUMERIC_LITERAL ? (Long) ((BLangLiteral) indexExpr).value : (Long) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value; } private String getConstFieldName(BLangExpression indexExpr) { return indexExpr.getKind() == NodeKind.LITERAL ? (String) ((BLangLiteral) indexExpr).value : (String) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value; } private BType checkArrayIndexBasedAccess(BLangIndexBasedAccess indexBasedAccess, BType indexExprType, BArrayType arrayType) { BType actualType = symTable.semanticError; switch (indexExprType.tag) { case TypeTags.INT: BLangExpression indexExpr = indexBasedAccess.indexExpr; if (!isConst(indexExpr) || arrayType.state == BArrayState.UNSEALED) { actualType = arrayType.eType; break; } actualType = getConstIndex(indexExpr) >= arrayType.size ? symTable.semanticError : arrayType.eType; break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) indexExprType; boolean validIndexExists = false; for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue(); if (indexValue >= 0 && (arrayType.state == BArrayState.UNSEALED || indexValue < arrayType.size)) { validIndexExists = true; break; } } if (!validIndexExists) { return symTable.semanticError; } actualType = arrayType.eType; break; case TypeTags.UNION: List<BFiniteType> finiteTypes = ((BUnionType) indexExprType).getMemberTypes().stream() .filter(memType -> memType.tag == TypeTags.FINITE) .map(matchedType -> (BFiniteType) matchedType) .collect(Collectors.toList()); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType elementType = checkArrayIndexBasedAccess(indexBasedAccess, finiteType, arrayType); if (elementType == symTable.semanticError) { return symTable.semanticError; } actualType = arrayType.eType; } return actualType; } private BType checkListIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) { if (type.tag == TypeTags.ARRAY) { return checkArrayIndexBasedAccess(accessExpr, accessExpr.indexExpr.type, (BArrayType) type); } if (type.tag == TypeTags.TUPLE) { return checkTupleIndexBasedAccess(accessExpr, (BTupleType) type, accessExpr.indexExpr.type); } LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : ((BUnionType) type).getMemberTypes()) { BType individualFieldType = checkListIndexBasedAccess(accessExpr, memType); if (individualFieldType == symTable.semanticError) { continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 0) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkTupleIndexBasedAccess(BLangIndexBasedAccess accessExpr, BTupleType tuple, BType currentType) { BType actualType = symTable.semanticError; BLangExpression indexExpr = accessExpr.indexExpr; switch (currentType.tag) { case TypeTags.INT: if (isConst(indexExpr)) { actualType = checkTupleFieldType(tuple, getConstIndex(indexExpr).intValue()); } else { BTupleType tupleExpr = (BTupleType) accessExpr.expr.type; LinkedHashSet<BType> tupleTypes = collectTupleFieldTypes(tupleExpr, new LinkedHashSet<>()); actualType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes); } break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) currentType; LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue(); BType fieldType = checkTupleFieldType(tuple, indexValue); if (fieldType.tag != TypeTags.SEMANTIC_ERROR) { possibleTypes.add(fieldType); } } if (possibleTypes.size() == 0) { return symTable.semanticError; } actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() : BUnionType.create(null, possibleTypes); break; case TypeTags.UNION: LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>(); List<BFiniteType> finiteTypes = new ArrayList<>(); ((BUnionType) currentType).getMemberTypes().forEach(memType -> { if (memType.tag == TypeTags.FINITE) { finiteTypes.add((BFiniteType) memType); } else { BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, memType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } } }); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, finiteType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } if (possibleTypesByMember.contains(symTable.semanticError)) { return symTable.semanticError; } actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() : BUnionType.create(null, possibleTypesByMember); } return actualType; } private LinkedHashSet<BType> collectTupleFieldTypes(BTupleType tupleType, LinkedHashSet<BType> memberTypes) { tupleType.tupleTypes .forEach(memberType -> { if (memberType.tag == TypeTags.UNION) { collectMemberTypes((BUnionType) memberType, memberTypes); } else { memberTypes.add(memberType); } }); return memberTypes; } private BType checkMappingIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) { if (type.tag == TypeTags.MAP) { BType constraint = ((BMapType) type).constraint; return accessExpr.lhsVar ? constraint : addNilForNillableAccessType(constraint); } if (type.tag == TypeTags.RECORD) { return checkRecordIndexBasedAccess(accessExpr, (BRecordType) type, accessExpr.indexExpr.type); } BType fieldType; boolean nonMatchedRecordExists = false; LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : ((BUnionType) type).getMemberTypes()) { BType individualFieldType = checkMappingIndexBasedAccess(accessExpr, memType); if (individualFieldType == symTable.semanticError) { nonMatchedRecordExists = true; continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 0) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { fieldType = fieldTypeMembers.iterator().next(); } else { fieldType = BUnionType.create(null, fieldTypeMembers); } return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType; } private BType checkRecordIndexBasedAccess(BLangIndexBasedAccess accessExpr, BRecordType record, BType currentType) { BType actualType = symTable.semanticError; BLangExpression indexExpr = accessExpr.indexExpr; switch (currentType.tag) { case TypeTags.STRING: if (isConst(indexExpr)) { String fieldName = getConstFieldName(indexExpr); actualType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType != symTable.semanticError) { return actualType; } actualType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType == symTable.semanticError) { actualType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType == symTable.semanticError) { return actualType; } if (actualType == symTable.neverType) { return actualType; } return addNilForNillableAccessType(actualType); } if (accessExpr.lhsVar) { return actualType; } return addNilForNillableAccessType(actualType); } LinkedHashSet<BType> fieldTypes = record.fields.values().stream() .map(field -> field.type) .collect(Collectors.toCollection(LinkedHashSet::new)); if (record.restFieldType.tag != TypeTags.NONE) { fieldTypes.add(record.restFieldType); } if (fieldTypes.stream().noneMatch(BType::isNullable)) { fieldTypes.add(symTable.nilType); } actualType = BUnionType.create(null, fieldTypes); break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) currentType; LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { String fieldName = (String) ((BLangLiteral) finiteMember).value; BType fieldType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record); if (fieldType == symTable.semanticError) { fieldType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record); if (fieldType == symTable.semanticError) { fieldType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record); } if (fieldType != symTable.semanticError) { fieldType = addNilForNillableAccessType(fieldType); } } if (fieldType.tag == TypeTags.SEMANTIC_ERROR) { continue; } possibleTypes.add(fieldType); } if (possibleTypes.isEmpty()) { return symTable.semanticError; } if (possibleTypes.stream().noneMatch(BType::isNullable)) { possibleTypes.add(symTable.nilType); } actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() : BUnionType.create(null, possibleTypes); break; case TypeTags.UNION: LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>(); List<BFiniteType> finiteTypes = new ArrayList<>(); ((BUnionType) currentType).getMemberTypes().forEach(memType -> { if (memType.tag == TypeTags.FINITE) { finiteTypes.add((BFiniteType) memType); } else { BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, memType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } } }); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, finiteType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } if (possibleTypesByMember.contains(symTable.semanticError)) { return symTable.semanticError; } actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() : BUnionType.create(null, possibleTypesByMember); } return actualType; } private BType getSafeType(BType type, BLangAccessExpression accessExpr) { if (type.tag != TypeTags.UNION) { return type; } List<BType> lhsTypes = new ArrayList<>(((BUnionType) type).getMemberTypes()); if (accessExpr.errorSafeNavigation) { if (!lhsTypes.contains(symTable.errorType)) { dlog.error(accessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, type); return symTable.semanticError; } lhsTypes = lhsTypes.stream() .filter(memberType -> memberType != symTable.errorType) .collect(Collectors.toList()); if (lhsTypes.isEmpty()) { dlog.error(accessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, type); return symTable.semanticError; } } if (accessExpr.nilSafeNavigation) { lhsTypes = lhsTypes.stream() .filter(memberType -> memberType != symTable.nilType) .collect(Collectors.toList()); } if (lhsTypes.size() == 1) { return lhsTypes.get(0); } return BUnionType.create(null, new LinkedHashSet<>(lhsTypes)); } private List<BType> getTypesList(BType type) { if (type.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) type; return new ArrayList<>(unionType.getMemberTypes()); } else { return Lists.of(type); } } private LinkedHashSet<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) { List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.type); LinkedHashSet<BType> matchExprTypes = new LinkedHashSet<>(); for (BType type : exprTypes) { boolean assignable = false; for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) { BType patternExprType = pattern.expr.type; matchExprTypes.addAll(getTypesList(patternExprType)); if (type.tag == TypeTags.SEMANTIC_ERROR || patternExprType.tag == TypeTags.SEMANTIC_ERROR) { return new LinkedHashSet<BType>() { { add(symTable.semanticError); } }; } assignable = this.types.isAssignable(type, pattern.variable.type); if (assignable) { break; } } if (!assignable) { matchExprTypes.add(type); } } return matchExprTypes; } private boolean couldHoldTableValues(BType type, List<BType> encounteredTypes) { if (encounteredTypes.contains(type)) { return false; } encounteredTypes.add(type); switch (type.tag) { case TypeTags.UNION: for (BType bType1 : ((BUnionType) type).getMemberTypes()) { if (couldHoldTableValues(bType1, encounteredTypes)) { return true; } } return false; case TypeTags.MAP: return couldHoldTableValues(((BMapType) type).constraint, encounteredTypes); case TypeTags.RECORD: BRecordType recordType = (BRecordType) type; for (BField field : recordType.fields.values()) { if (couldHoldTableValues(field.type, encounteredTypes)) { return true; } } return !recordType.sealed && couldHoldTableValues(recordType.restFieldType, encounteredTypes); case TypeTags.ARRAY: return couldHoldTableValues(((BArrayType) type).eType, encounteredTypes); case TypeTags.TUPLE: for (BType bType : ((BTupleType) type).getTupleTypes()) { if (couldHoldTableValues(bType, encounteredTypes)) { return true; } } return false; } return false; } private boolean isConst(BLangExpression expression) { if (ConstantAnalyzer.isValidConstantExpressionNode(expression)) { return true; } if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { return false; } return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT; } private Name getCurrentCompUnit(BLangNode node) { return names.fromString(node.pos.getSource().getCompilationUnitName()); } private BType getRepresentativeBroadType(List<BType> inferredTypeList) { for (int i = 0; i < inferredTypeList.size(); i++) { BType type = inferredTypeList.get(i); if (type.tag == TypeTags.SEMANTIC_ERROR) { return type; } for (int j = i + 1; j < inferredTypeList.size(); j++) { BType otherType = inferredTypeList.get(j); if (otherType.tag == TypeTags.SEMANTIC_ERROR) { return otherType; } if (types.isAssignable(otherType, type)) { inferredTypeList.remove(j); j -= 1; continue; } if (types.isAssignable(type, otherType)) { inferredTypeList.remove(i); i -= 1; break; } } } if (inferredTypeList.size() == 1) { return inferredTypeList.get(0); } return BUnionType.create(null, inferredTypeList.toArray(new BType[0])); } private BType defineInferredRecordType(BLangRecordLiteral recordLiteral, BType expType) { PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL); Map<String, FieldInfo> nonRestFieldTypes = new LinkedHashMap<>(); List<BType> restFieldTypes = new ArrayList<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.isKeyValueField()) { BLangRecordKeyValueField keyValue = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValue.key; BLangExpression expression = keyValue.valueExpr; BLangExpression keyExpr = key.expr; if (key.computedKey) { checkExpr(keyExpr, env, symTable.stringType); BType exprType = checkExpr(expression, env, expType); if (isUniqueType(restFieldTypes, exprType)) { restFieldTypes.add(exprType); } } else { addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(keyExpr), keyValue.readonly ? checkExpr(expression, env, symTable.readonlyType) : checkExpr(expression, env, expType), true, keyValue.readonly); } } else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { BType type = checkExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env, expType); int typeTag = type.tag; if (typeTag == TypeTags.MAP) { BType constraintType = ((BMapType) type).constraint; if (isUniqueType(restFieldTypes, constraintType)) { restFieldTypes.add(constraintType); } } if (type.tag != TypeTags.RECORD) { continue; } BRecordType recordType = (BRecordType) type; for (BField recField : recordType.fields.values()) { addToNonRestFieldTypes(nonRestFieldTypes, recField.name.value, recField.type, !Symbols.isOptional(recField.symbol), false); } if (!recordType.sealed) { BType restFieldType = recordType.restFieldType; if (isUniqueType(restFieldTypes, restFieldType)) { restFieldTypes.add(restFieldType); } } } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(varNameField), varNameField.readonly ? checkExpr(varNameField, env, symTable.readonlyType) : checkExpr(varNameField, env, expType), true, varNameField.readonly); } } LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); boolean allReadOnlyNonRestFields = true; for (Map.Entry<String, FieldInfo> entry : nonRestFieldTypes.entrySet()) { FieldInfo fieldInfo = entry.getValue(); List<BType> types = fieldInfo.types; if (types.contains(symTable.semanticError)) { return symTable.semanticError; } String key = entry.getKey(); Name fieldName = names.fromString(key); BType type = types.size() == 1 ? types.get(0) : BUnionType.create(null, types.toArray(new BType[0])); Set<Flag> flags = new HashSet<>(); if (fieldInfo.required) { flags.add(Flag.REQUIRED); } else { flags.add(Flag.OPTIONAL); } if (fieldInfo.readonly) { flags.add(Flag.READONLY); } else if (allReadOnlyNonRestFields) { allReadOnlyNonRestFields = false; } BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(flags), fieldName, pkgID, type, recordSymbol, symTable.builtinPos, VIRTUAL); fields.put(fieldName.value, new BField(fieldName, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordType = new BRecordType(recordSymbol); recordType.fields = fields; if (restFieldTypes.contains(symTable.semanticError)) { return symTable.semanticError; } if (restFieldTypes.isEmpty()) { recordType.sealed = true; recordType.restFieldType = symTable.noType; } else if (restFieldTypes.size() == 1) { recordType.restFieldType = restFieldTypes.get(0); } else { recordType.restFieldType = BUnionType.create(null, restFieldTypes.toArray(new BType[0])); } recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; if (expType == symTable.readonlyType || (recordType.sealed && allReadOnlyNonRestFields)) { recordType.flags |= Flags.READONLY; recordSymbol.flags |= Flags.READONLY; } BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, recordLiteral.pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); return recordType; } private BRecordTypeSymbol createRecordTypeSymbol(PackageID pkgID, DiagnosticPos pos, SymbolOrigin origin) { BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0, names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(pkgID)), pkgID, null, env.scope.owner, pos, origin); BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null); BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol( Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false, symTable.builtinPos, VIRTUAL); initFuncSymbol.retType = symTable.nilType; recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, pos); recordSymbol.scope = new Scope(recordSymbol); recordSymbol.scope.define( names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value), recordSymbol.initializerFunc.symbol); return recordSymbol; } private String getKeyName(BLangExpression key) { return key.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? ((BLangSimpleVarRef) key).variableName.value : (String) ((BLangLiteral) key).value; } private void addToNonRestFieldTypes(Map<String, FieldInfo> nonRestFieldTypes, String keyString, BType exprType, boolean required, boolean readonly) { if (!nonRestFieldTypes.containsKey(keyString)) { nonRestFieldTypes.put(keyString, new FieldInfo(new ArrayList<BType>() {{ add(exprType); }}, required, readonly)); return; } FieldInfo fieldInfo = nonRestFieldTypes.get(keyString); List<BType> typeList = fieldInfo.types; if (isUniqueType(typeList, exprType)) { typeList.add(exprType); } if (required && !fieldInfo.required) { fieldInfo.required = true; } } private boolean isUniqueType(List<BType> typeList, BType type) { boolean isRecord = type.tag == TypeTags.RECORD; for (BType bType : typeList) { if (isRecord) { if (type == bType) { return false; } } else if (types.isSameType(type, bType)) { return false; } } return true; } private BType checkXmlSubTypeLiteralCompatibility(DiagnosticPos pos, BXMLSubType mutableXmlSubType, BType expType) { if (expType == symTable.semanticError) { return expType; } boolean unionExpType = expType.tag == TypeTags.UNION; if (expType == mutableXmlSubType) { return expType; } if (!unionExpType && types.isAssignable(mutableXmlSubType, expType)) { return mutableXmlSubType; } BXMLSubType immutableXmlSubType = (BXMLSubType) ImmutableTypeCloner.getEffectiveImmutableType(pos, types, mutableXmlSubType, env, symTable, anonymousModelHelper, names); if (expType == immutableXmlSubType) { return expType; } if (!unionExpType && types.isAssignable(immutableXmlSubType, expType)) { return immutableXmlSubType; } if (!unionExpType) { dlog.error(pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType); return symTable.semanticError; } List<BType> compatibleTypes = new ArrayList<>(); for (BType memberType : ((BUnionType) expType).getMemberTypes()) { if (compatibleTypes.contains(memberType)) { continue; } if (memberType == mutableXmlSubType || memberType == immutableXmlSubType) { compatibleTypes.add(memberType); continue; } if (types.isAssignable(mutableXmlSubType, memberType) && !compatibleTypes.contains(mutableXmlSubType)) { compatibleTypes.add(mutableXmlSubType); continue; } if (types.isAssignable(immutableXmlSubType, memberType) && !compatibleTypes.contains(immutableXmlSubType)) { compatibleTypes.add(immutableXmlSubType); } } if (compatibleTypes.isEmpty()) { dlog.error(pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType); return symTable.semanticError; } if (compatibleTypes.size() == 1) { return compatibleTypes.get(0); } dlog.error(pos, DiagnosticCode.AMBIGUOUS_TYPES, expType); return symTable.semanticError; } private void markChildrenAsImmutable(BLangXMLElementLiteral bLangXMLElementLiteral) { for (BLangExpression modifiedChild : bLangXMLElementLiteral.modifiedChildren) { BType childType = modifiedChild.type; if (Symbols.isFlagOn(childType.flags, Flags.READONLY) || !types.isSelectivelyImmutableType(childType)) { continue; } modifiedChild.type = ImmutableTypeCloner.getEffectiveImmutableType(modifiedChild.pos, types, (SelectivelyImmutableReferenceType) childType, env, symTable, anonymousModelHelper, names); if (modifiedChild.getKind() == NodeKind.XML_ELEMENT_LITERAL) { markChildrenAsImmutable((BLangXMLElementLiteral) modifiedChild); } } } private void logUndefinedSymbolError(DiagnosticPos pos, String name) { if (!missingNodesHelper.isMissingNode(name)) { dlog.error(pos, DiagnosticCode.UNDEFINED_SYMBOL, name); } } private static class FieldInfo { List<BType> types; boolean required; boolean readonly; private FieldInfo(List<BType> types, boolean required, boolean readonly) { this.types = types; this.required = required; this.readonly = readonly; } } }
class TypeChecker extends BLangNodeVisitor { private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY = new CompilerContext.Key<>(); private static Set<String> listLengthModifierFunctions = new HashSet<>(); private static Map<String, HashSet<String>> modifierFunctions = new HashMap<>(); private static final String TABLE_TNAME = "table"; private static final String LIST_LANG_LIB = "lang.array"; private static final String MAP_LANG_LIB = "lang.map"; private static final String TABLE_LANG_LIB = "lang.table"; private static final String VALUE_LANG_LIB = "lang.value"; private static final String XML_LANG_LIB = "lang.xml"; private static final String FUNCTION_NAME_PUSH = "push"; private static final String FUNCTION_NAME_POP = "pop"; private static final String FUNCTION_NAME_SHIFT = "shift"; private static final String FUNCTION_NAME_UNSHIFT = "unshift"; private Names names; private SymbolTable symTable; private SymbolEnter symbolEnter; private SymbolResolver symResolver; private NodeCloner nodeCloner; private Types types; private BLangDiagnosticLogHelper dlog; private SymbolEnv env; private boolean isTypeChecked; private TypeNarrower typeNarrower; private TypeParamAnalyzer typeParamAnalyzer; private BLangAnonymousModelHelper anonymousModelHelper; private SemanticAnalyzer semanticAnalyzer; private ResolvedTypeBuilder typeBuilder; private boolean nonErrorLoggingCheck = false; private int letCount = 0; private Stack<SymbolEnv> queryEnvs, prevEnvs; private Stack<BLangSelectClause> selectClauses; private BLangMissingNodesHelper missingNodesHelper; /** * Expected types or inherited types. */ private BType expType; private BType resultType; private DiagnosticCode diagCode; static { listLengthModifierFunctions.add(FUNCTION_NAME_PUSH); listLengthModifierFunctions.add(FUNCTION_NAME_POP); listLengthModifierFunctions.add(FUNCTION_NAME_SHIFT); listLengthModifierFunctions.add(FUNCTION_NAME_UNSHIFT); modifierFunctions.put(LIST_LANG_LIB, new HashSet<String>() {{ add("remove"); add("removeAll"); add("setLength"); add("reverse"); add("sort"); add("pop"); add("push"); add("shift"); add("unshift"); }}); modifierFunctions.put(MAP_LANG_LIB, new HashSet<String>() {{ add("remove"); add("removeIfHasKey"); add("removeAll"); }}); modifierFunctions.put(TABLE_LANG_LIB, new HashSet<String>() {{ add("put"); add("add"); add("remove"); add("removeIfHasKey"); add("removeAll"); }}); modifierFunctions.put(VALUE_LANG_LIB, new HashSet<String>() {{ add("mergeJson"); }}); modifierFunctions.put(XML_LANG_LIB, new HashSet<String>() {{ add("setName"); add("setChildren"); add("strip"); }}); } public static TypeChecker getInstance(CompilerContext context) { TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY); if (typeChecker == null) { typeChecker = new TypeChecker(context); } return typeChecker; } public TypeChecker(CompilerContext context) { context.put(TYPE_CHECKER_KEY, this); this.names = Names.getInstance(context); this.symTable = SymbolTable.getInstance(context); this.symbolEnter = SymbolEnter.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.nodeCloner = NodeCloner.getInstance(context); this.types = Types.getInstance(context); this.dlog = BLangDiagnosticLogHelper.getInstance(context); this.typeNarrower = TypeNarrower.getInstance(context); this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context); this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context); this.semanticAnalyzer = SemanticAnalyzer.getInstance(context); this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context); this.typeBuilder = new ResolvedTypeBuilder(); this.selectClauses = new Stack<>(); this.queryEnvs = new Stack<>(); this.prevEnvs = new Stack<>(); } public BType checkExpr(BLangExpression expr, SymbolEnv env) { return checkExpr(expr, env, symTable.noType); } public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) { return checkExpr(expr, env, expType, DiagnosticCode.INCOMPATIBLE_TYPES); } public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) { if (expr.typeChecked) { return expr.type; } if (expType.tag == TypeTags.INTERSECTION) { expType = ((BIntersectionType) expType).effectiveType; } SymbolEnv prevEnv = this.env; BType preExpType = this.expType; DiagnosticCode preDiagCode = this.diagCode; this.env = env; this.diagCode = diagCode; this.expType = expType; this.isTypeChecked = true; expr.expectedType = expType; expr.accept(this); if (resultType.tag == TypeTags.INTERSECTION) { resultType = ((BIntersectionType) resultType).effectiveType; } expr.type = resultType; expr.typeChecked = isTypeChecked; this.env = prevEnv; this.expType = preExpType; this.diagCode = preDiagCode; validateAndSetExprExpectedType(expr); return resultType; } private void validateAndSetExprExpectedType(BLangExpression expr) { if (resultType.tag == TypeTags.SEMANTIC_ERROR) { return; } if (expr.getKind() == NodeKind.RECORD_LITERAL_EXPR && expr.expectedType != null && expr.expectedType.tag == TypeTags.MAP && expr.type.tag == TypeTags.RECORD) { return; } expr.expectedType = resultType; } public void visit(BLangLiteral literalExpr) { BType literalType = setLiteralValueAndGetType(literalExpr, expType); if (literalType == symTable.semanticError || literalExpr.isFiniteContext) { return; } resultType = types.checkType(literalExpr, literalType, expType); } @Override public void visit(BLangXMLElementAccess xmlElementAccess) { checkXMLNamespacePrefixes(xmlElementAccess.filters); checkExpr(xmlElementAccess.expr, env, symTable.xmlType); resultType = new BXMLType(symTable.xmlElementType, null); } @Override public void visit(BLangXMLNavigationAccess xmlNavigation) { if (xmlNavigation.lhsVar) { dlog.error(xmlNavigation.pos, DiagnosticCode.CANNOT_UPDATE_XML_SEQUENCE); } checkXMLNamespacePrefixes(xmlNavigation.filters); if (xmlNavigation.childIndex != null) { checkExpr(xmlNavigation.childIndex, env, symTable.intType); } BType actualType = checkExpr(xmlNavigation.expr, env, symTable.xmlType); types.checkType(xmlNavigation, actualType, expType); if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) { resultType = symTable.xmlType; } else { resultType = new BXMLType(symTable.xmlElementType, null); } } private void checkXMLNamespacePrefixes(List<BLangXMLElementFilter> filters) { for (BLangXMLElementFilter filter : filters) { if (!filter.namespace.isEmpty()) { Name nsName = names.fromString(filter.namespace); BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, nsName); filter.namespaceSymbol = nsSymbol; if (nsSymbol == symTable.notFoundSymbol) { dlog.error(filter.nsPos, DiagnosticCode.CANNOT_FIND_XML_NAMESPACE, nsName); } } } } private BType setLiteralValueAndGetType(BLangLiteral literalExpr, BType expType) { BType literalType = symTable.getTypeFromTag(literalExpr.type.tag); Object literalValue = literalExpr.value; literalExpr.isJSONContext = types.isJSONContext(expType); if (literalType.tag == TypeTags.INT) { if (expType.tag == TypeTags.FLOAT) { literalType = symTable.floatType; literalExpr.value = ((Long) literalValue).doubleValue(); } else if (expType.tag == TypeTags.DECIMAL && !NumericLiteralSupport.hasHexIndicator(literalExpr.originalValue)) { literalType = symTable.decimalType; literalExpr.value = String.valueOf(literalValue); } else if (TypeTags.isIntegerTypeTag(expType.tag) || expType.tag == TypeTags.BYTE) { literalType = getIntLiteralType(literalExpr.pos, expType, literalType, literalValue); if (literalType == symTable.semanticError) { return symTable.semanticError; } } else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.intType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.BYTE)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.byteType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED32_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed32IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED16_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed16IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED8_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed8IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED32_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned32IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED16_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned16IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED8_INT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned8IntType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes(); if (memberTypes.stream() .anyMatch(memType -> memType.tag == TypeTags.INT || memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY)) { return setLiteralValueAndGetType(literalExpr, symTable.intType); } BType finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.intType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.BYTE)) { return setLiteralValueAndGetType(literalExpr, symTable.byteType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.byteType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.FLOAT)) { return setLiteralValueAndGetType(literalExpr, symTable.floatType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.floatType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) { return setLiteralValueAndGetType(literalExpr, symTable.decimalType); } finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.decimalType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } } } else if (literalType.tag == TypeTags.FLOAT) { String literal = String.valueOf(literalValue); String numericLiteral = NumericLiteralSupport.stripDiscriminator(literal); boolean isDiscriminatedFloat = NumericLiteralSupport.isFloatDiscriminated(literal); if (expType.tag == TypeTags.DECIMAL) { if (isDiscriminatedFloat || NumericLiteralSupport.isHexLiteral(numericLiteral)) { dlog.error(literalExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, symTable.floatType); resultType = symTable.semanticError; return resultType; } literalType = symTable.decimalType; literalExpr.value = numericLiteral; } else if (expType.tag == TypeTags.FLOAT) { literalExpr.value = Double.parseDouble(String.valueOf(numericLiteral)); } else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } else if (!isDiscriminatedFloat && literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expType; BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType); if (unionMember != symTable.noType) { return unionMember; } } } else if (literalType.tag == TypeTags.DECIMAL) { return decimalLiteral(literalValue, literalExpr, expType); } else if (literalType.tag == TypeTags.STRING && this.expType.tag == TypeTags.CHAR_STRING && types.isCharLiteralValue((String) literalValue)) { return symTable.charStringType; } else { BType expected = getResolvedIntersectionType(this.expType); if (expected.tag == TypeTags.FINITE) { boolean foundMember = types.isAssignableToFiniteType(expected, literalExpr); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } else if (expected.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expected; boolean foundMember = unionType.getMemberTypes() .stream() .anyMatch(memberType -> types.isAssignableToFiniteType(memberType, literalExpr)); if (foundMember) { setLiteralValueForFiniteType(literalExpr, literalType); return literalType; } } } if (literalExpr.type.tag == TypeTags.BYTE_ARRAY) { literalType = new BArrayType(symTable.byteType); } return literalType; } private BType getAndSetAssignableUnionMember(BLangLiteral literalExpr, BUnionType expType, BType desiredType) { Set<BType> memberTypes = expType.getMemberTypes(); if (memberTypes.stream() .anyMatch(memType -> memType.tag == desiredType.tag || memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY)) { return setLiteralValueAndGetType(literalExpr, desiredType); } BType finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.floatType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) { return setLiteralValueAndGetType(literalExpr, symTable.decimalType); } finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.decimalType); if (finiteType != symTable.semanticError) { BType setType = setLiteralValueAndGetType(literalExpr, finiteType); if (literalExpr.isFiniteContext) { return setType; } } return symTable.noType; } private boolean literalAssignableToFiniteType(BLangLiteral literalExpr, BFiniteType finiteType, int targetMemberTypeTag) { for (BLangExpression valueExpr : finiteType.getValueSpace()) { if (valueExpr.type.tag == targetMemberTypeTag && types.checkLiteralAssignabilityBasedOnType((BLangLiteral) valueExpr, literalExpr)) { return true; } } return false; } private BType decimalLiteral(Object literalValue, BLangLiteral literalExpr, BType expType) { String literal = String.valueOf(literalValue); if (expType.tag == TypeTags.FLOAT && NumericLiteralSupport.isDecimalDiscriminated(literal)) { dlog.error(literalExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, symTable.decimalType); resultType = symTable.semanticError; return resultType; } if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) { BFiniteType finiteType = (BFiniteType) expType; if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) { BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType); setLiteralValueForFiniteType(literalExpr, valueType); return valueType; } } else if (expType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expType; BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.decimalType); if (unionMember != symTable.noType) { return unionMember; } } literalExpr.value = NumericLiteralSupport.stripDiscriminator(literal); resultType = symTable.decimalType; return symTable.decimalType; } private void setLiteralValueForFiniteType(BLangLiteral literalExpr, BType type) { types.setImplicitCastExpr(literalExpr, type, this.expType); this.resultType = type; literalExpr.isFiniteContext = true; } private BType getFiniteTypeWithValuesOfSingleType(BUnionType unionType, BType matchType) { List<BFiniteType> finiteTypeMembers = unionType.getMemberTypes().stream() .filter(memType -> memType.tag == TypeTags.FINITE) .map(memFiniteType -> (BFiniteType) memFiniteType) .collect(Collectors.toList()); if (finiteTypeMembers.isEmpty()) { return symTable.semanticError; } int tag = matchType.tag; Set<BLangExpression> matchedValueSpace = new LinkedHashSet<>(); for (BFiniteType finiteType : finiteTypeMembers) { Set<BLangExpression> set = new HashSet<>(); for (BLangExpression expression : finiteType.getValueSpace()) { if (expression.type.tag == tag) { set.add(expression); } } matchedValueSpace.addAll(set); } if (matchedValueSpace.isEmpty()) { return symTable.semanticError; } return new BFiniteType(null, matchedValueSpace); } private BType getIntLiteralType(DiagnosticPos pos, BType expType, BType literalType, Object literalValue) { switch (expType.tag) { case TypeTags.INT: return symTable.intType; case TypeTags.BYTE: if (types.isByteLiteralValue((Long) literalValue)) { return symTable.byteType; } break; case TypeTags.SIGNED32_INT: if (types.isSigned32LiteralValue((Long) literalValue)) { return symTable.signed32IntType; } break; case TypeTags.SIGNED16_INT: if (types.isSigned16LiteralValue((Long) literalValue)) { return symTable.signed16IntType; } break; case TypeTags.SIGNED8_INT: if (types.isSigned8LiteralValue((Long) literalValue)) { return symTable.signed8IntType; } break; case TypeTags.UNSIGNED32_INT: if (types.isUnsigned32LiteralValue((Long) literalValue)) { return symTable.unsigned32IntType; } break; case TypeTags.UNSIGNED16_INT: if (types.isUnsigned16LiteralValue((Long) literalValue)) { return symTable.unsigned16IntType; } break; case TypeTags.UNSIGNED8_INT: if (types.isUnsigned8LiteralValue((Long) literalValue)) { return symTable.unsigned8IntType; } break; default: } dlog.error(pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, literalType); resultType = symTable.semanticError; return resultType; } @Override public void visit(BLangListConstructorExpr listConstructor) { if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.READONLY) { BType inferredType = getInferredTupleType(listConstructor, expType); resultType = inferredType == symTable.semanticError ? symTable.semanticError : types.checkType(listConstructor, inferredType, expType); return; } resultType = checkListConstructorCompatibility(expType, listConstructor); } @Override public void visit(BLangTableConstructorExpr tableConstructorExpr) { if (expType.tag == TypeTags.NONE) { List<BType> memTypes = checkExprList(new ArrayList<>(tableConstructorExpr.recordLiteralList), env); for (BType memType : memTypes) { if (memType == symTable.semanticError) { resultType = symTable.semanticError; return; } } if (tableConstructorExpr.recordLiteralList.size() == 0) { dlog.error(tableConstructorExpr.pos, DiagnosticCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE); resultType = symTable.semanticError; return; } BType inherentMemberType = inferTableMemberType(memTypes, tableConstructorExpr); BTableType tableType = new BTableType(TypeTags.TABLE, inherentMemberType, null); for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { recordLiteral.type = inherentMemberType; } if (!validateTableConstructorExpr(tableConstructorExpr, tableType)) { resultType = symTable.semanticError; return; } if (checkKeySpecifier(tableConstructorExpr, tableType)) { return; } resultType = tableType; return; } BType applicableExpType = expType.tag == TypeTags.INTERSECTION ? ((BIntersectionType) expType).effectiveType : expType; if (applicableExpType.tag == TypeTags.TABLE) { List<BType> memTypes = new ArrayList<>(); for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) { BLangRecordLiteral clonedExpr = recordLiteral; if (this.nonErrorLoggingCheck) { clonedExpr.cloneAttempt++; clonedExpr = nodeCloner.clone(recordLiteral); } BType recordType = checkExpr(clonedExpr, env, ((BTableType) applicableExpType).constraint); if (recordType == symTable.semanticError) { resultType = symTable.semanticError; return; } memTypes.add(recordType); } if (((BTableType) applicableExpType).constraint.tag == TypeTags.MAP) { validateMapConstraintTable(tableConstructorExpr, applicableExpType); return; } if (!(validateTableType((BTableType) applicableExpType, tableConstructorExpr.recordLiteralList) && validateTableConstructorExpr(tableConstructorExpr, (BTableType) applicableExpType))) { resultType = symTable.semanticError; return; } BTableType tableType = new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, applicableExpType), null); if (Symbols.isFlagOn(applicableExpType.flags, Flags.READONLY)) { tableType.flags |= Flags.READONLY; } if (checkKeySpecifier(tableConstructorExpr, tableType)) { return; } BTableType expectedTableType = (BTableType) applicableExpType; if (expectedTableType.fieldNameList != null && tableType.fieldNameList == null) { tableType.fieldNameList = expectedTableType.fieldNameList; } resultType = tableType; } else if (applicableExpType.tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; BLangDiagnosticLog prevDLog = this.dlog.getCurrentLog(); this.dlog.setNonConsoleDLog(); List<BType> matchingTypes = new ArrayList<>(); BUnionType expectedType = (BUnionType) applicableExpType; for (BType memType : expectedType.getMemberTypes()) { BLangTableConstructorExpr clonedTableExpr = tableConstructorExpr; if (this.nonErrorLoggingCheck) { tableConstructorExpr.cloneAttempt++; clonedTableExpr = nodeCloner.clone(tableConstructorExpr); } BType resultType = checkExpr(clonedTableExpr, env, memType); if (resultType != symTable.semanticError && dlog.getErrorCount() == 0 && isUniqueType(matchingTypes, resultType)) { matchingTypes.add(resultType); } dlog.resetErrorCount(); } this.dlog.setCurrentLog(prevDLog); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; if (matchingTypes.isEmpty()) { BLangTableConstructorExpr exprToLog = tableConstructorExpr; if (this.nonErrorLoggingCheck) { tableConstructorExpr.cloneAttempt++; exprToLog = nodeCloner.clone(tableConstructorExpr); } dlog.error(tableConstructorExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, getInferredTableType(exprToLog)); } else if (matchingTypes.size() != 1) { dlog.error(tableConstructorExpr.pos, DiagnosticCode.AMBIGUOUS_TYPES, expType); } else { resultType = checkExpr(tableConstructorExpr, env, matchingTypes.get(0)); return; } resultType = symTable.semanticError; } else { resultType = symTable.semanticError; } } private BType getInferredTableType(BLangTableConstructorExpr exprToLog) { List<BType> memTypes = checkExprList(new ArrayList<>(exprToLog.recordLiteralList), env); for (BType memType : memTypes) { if (memType == symTable.semanticError) { return symTable.semanticError; } } return new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, exprToLog), null); } private boolean checkKeySpecifier(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) { if (tableConstructorExpr.tableKeySpecifier != null) { if (!(validateTableConstructorRecordLiterals(getTableKeyNameList(tableConstructorExpr. tableKeySpecifier), tableConstructorExpr.recordLiteralList))) { resultType = symTable.semanticError; return true; } tableType.fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier); } return false; } private BType inferTableMemberType(List<BType> memTypes, BType expType) { if (memTypes.isEmpty()) { return ((BTableType) expType).constraint; } LinkedHashSet<BType> result = new LinkedHashSet<>(); result.add(memTypes.get(0)); BUnionType unionType = BUnionType.create(null, result); for (int i = 1; i < memTypes.size(); i++) { BType source = memTypes.get(i); if (!types.isAssignable(source, unionType)) { result.add(source); unionType = BUnionType.create(null, result); } } if (unionType.getMemberTypes().size() == 1) { return memTypes.get(0); } return unionType; } private BType inferTableMemberType(List<BType> memTypes, BLangTableConstructorExpr tableConstructorExpr) { BLangTableKeySpecifier keySpecifier = tableConstructorExpr.tableKeySpecifier; List<String> keySpecifierFieldNames = new ArrayList<>(); Set<BField> allFieldSet = new LinkedHashSet<>(); for (BType memType : memTypes) { allFieldSet.addAll(((BRecordType) memType).fields.values()); } Set<BField> commonFieldSet = new LinkedHashSet<>(allFieldSet); for (BType memType : memTypes) { commonFieldSet.retainAll(((BRecordType) memType).fields.values()); } List<String> requiredFieldNames = new ArrayList<>(); if (keySpecifier != null) { for (IdentifierNode identifierNode : keySpecifier.fieldNameIdentifierList) { requiredFieldNames.add(((BLangIdentifier) identifierNode).value); keySpecifierFieldNames.add(((BLangIdentifier) identifierNode).value); } } List<String> fieldNames = new ArrayList<>(); for (BField field : allFieldSet) { String fieldName = field.name.value; if (fieldNames.contains(fieldName)) { dlog.error(tableConstructorExpr.pos, DiagnosticCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE_DUE_AMBIGUITY, fieldName); return symTable.semanticError; } fieldNames.add(fieldName); boolean isOptional = true; for (BField commonField : commonFieldSet) { if (commonField.name.value.equals(fieldName)) { isOptional = false; requiredFieldNames.add(commonField.name.value); } } if (isOptional) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.OPTIONAL)); } else if (requiredFieldNames.contains(fieldName) && keySpecifierFieldNames.contains(fieldName)) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)) + Flags.asMask(EnumSet.of(Flag.READONLY)); } else if (requiredFieldNames.contains(fieldName)) { field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)); } } return createTableConstraintRecordType(allFieldSet, tableConstructorExpr.pos); } private BRecordType createTableConstraintRecordType(Set<BField> allFieldSet, DiagnosticPos pos) { PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL); for (BField field : allFieldSet) { recordSymbol.scope.define(field.name, field.symbol); } BRecordType recordType = new BRecordType(recordSymbol); recordType.fields = allFieldSet.stream().collect(getFieldCollector()); recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); recordType.sealed = true; recordType.restFieldType = symTable.noType; return recordType; } private Collector<BField, ?, LinkedHashMap<String, BField>> getFieldCollector() { BinaryOperator<BField> mergeFunc = (u, v) -> { throw new IllegalStateException(String.format("Duplicate key %s", u)); }; return Collectors.toMap(field -> field.name.value, Function.identity(), mergeFunc, LinkedHashMap::new); } private boolean validateTableType(BTableType tableType, List<BLangRecordLiteral> recordLiterals) { BType constraint = tableType.constraint; if (!types.isAssignable(constraint, symTable.mapAllType)) { dlog.error(tableType.constraintPos, DiagnosticCode.TABLE_CONSTRAINT_INVALID_SUBTYPE, constraint); resultType = symTable.semanticError; return false; } List<String> fieldNameList = tableType.fieldNameList; if (fieldNameList != null) { return validateKeySpecifier(fieldNameList, constraint.tag != TypeTags.INTERSECTION ? constraint : ((BIntersectionType) constraint).effectiveType, tableType.keyPos) && validateTableConstructorRecordLiterals(fieldNameList, recordLiterals); } return true; } private boolean validateTableConstructorRecordLiterals(List<String> keySpecifierFieldNames, List<BLangRecordLiteral> recordLiterals) { for (String fieldName : keySpecifierFieldNames) { for (BLangRecordLiteral recordLiteral : recordLiterals) { BLangRecordKeyValueField recordKeyValueField = getRecordKeyValueField(recordLiteral, fieldName); if (recordKeyValueField.getValue().getKind() == NodeKind.LITERAL || recordKeyValueField.getValue().getKind() == NodeKind.NUMERIC_LITERAL || recordKeyValueField.getValue().getKind() == NodeKind.RECORD_LITERAL_EXPR || recordKeyValueField.getValue().getKind() == NodeKind.ARRAY_LITERAL_EXPR || recordKeyValueField.getValue().getKind() == NodeKind.TUPLE_LITERAL_EXPR || recordKeyValueField.getValue().getKind() == NodeKind.XML_ELEMENT_LITERAL || recordKeyValueField.getValue().getKind() == NodeKind.XML_TEXT_LITERAL) { continue; } dlog.error(recordLiteral.pos, DiagnosticCode.KEY_SPECIFIER_FIELD_VALUE_MUST_BE_CONSTANT, fieldName); resultType = symTable.semanticError; return false; } } return true; } private BLangRecordKeyValueField getRecordKeyValueField(BLangRecordLiteral recordLiteral, String fieldName) { for (RecordLiteralNode.RecordField recordField : recordLiteral.fields) { BLangRecordKeyValueField recordKeyValueField = (BLangRecordKeyValueField) recordField; if (fieldName.equals(recordKeyValueField.key.toString())) { return recordKeyValueField; } } return null; } private boolean validateKeySpecifier(List<String> fieldNameList, BType constraint, DiagnosticPos pos) { for (String fieldName : fieldNameList) { BField field = types.getTableConstraintField(constraint, fieldName); if (field == null) { dlog.error(pos, DiagnosticCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER, fieldName, constraint); resultType = symTable.semanticError; return false; } if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) { dlog.error(pos, DiagnosticCode.KEY_SPECIFIER_FIELD_MUST_BE_READONLY, fieldName); resultType = symTable.semanticError; return false; } if (!Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { dlog.error(pos, DiagnosticCode.KEY_SPECIFIER_FIELD_MUST_BE_REQUIRED, fieldName); resultType = symTable.semanticError; return false; } if (!types.isAssignable(field.type, symTable.anydataType)) { dlog.error(pos, DiagnosticCode.KEY_SPECIFIER_FIELD_MUST_BE_ANYDATA, fieldName, constraint); resultType = symTable.semanticError; return false; } } return true; } private boolean validateTableConstructorExpr(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) { BType constraintType = tableType.constraint; if (tableConstructorExpr.tableKeySpecifier != null) { List<String> fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier); if (tableType.fieldNameList == null && !validateKeySpecifier(fieldNameList, constraintType.tag != TypeTags.INTERSECTION ? constraintType : ((BIntersectionType) constraintType).effectiveType, tableConstructorExpr.tableKeySpecifier.pos)) { return false; } if (tableType.fieldNameList != null && !tableType.fieldNameList.equals(fieldNameList)) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticCode.TABLE_KEY_SPECIFIER_MISMATCH, tableType.fieldNameList.toString(), fieldNameList.toString()); resultType = symTable.semanticError; return false; } } BType keyTypeConstraint = tableType.keyTypeConstraint; if (keyTypeConstraint != null) { List<BType> memberTypes = new ArrayList<>(); if (keyTypeConstraint.tag == TypeTags.TUPLE) { for (Type type : ((TupleType) keyTypeConstraint).getTupleTypes()) { memberTypes.add((BType) type); } } else { memberTypes.add(keyTypeConstraint); } if (tableConstructorExpr.tableKeySpecifier == null && keyTypeConstraint.tag == TypeTags.NEVER) { return true; } if (tableConstructorExpr.tableKeySpecifier == null || tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size() != memberTypes.size()) { dlog.error(tableConstructorExpr.pos, DiagnosticCode.KEY_SPECIFIER_SIZE_MISMATCH_WITH_KEY_CONSTRAINT, memberTypes.size(), tableConstructorExpr.tableKeySpecifier == null ? 0 : tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size()); resultType = symTable.semanticError; return false; } List<IdentifierNode> fieldNameIdentifierList = tableConstructorExpr.tableKeySpecifier. fieldNameIdentifierList; int index = 0; for (IdentifierNode identifier : fieldNameIdentifierList) { BField field = types.getTableConstraintField(constraintType, ((BLangIdentifier) identifier).value); if (!types.isAssignable(field.type, memberTypes.get(index))) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticCode.KEY_SPECIFIER_MISMATCH_WITH_KEY_CONSTRAINT, fieldNameIdentifierList.toString(), memberTypes.toString()); resultType = symTable.semanticError; return false; } index++; } } return true; } private void validateMapConstraintTable(BLangTableConstructorExpr tableConstructorExpr, BType expType) { if (((BTableType) expType).fieldNameList != null || ((BTableType) expType).keyTypeConstraint != null) { dlog.error(((BTableType) expType).keyPos, DiagnosticCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT); resultType = symTable.semanticError; return; } if (tableConstructorExpr.tableKeySpecifier != null) { dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT); resultType = symTable.semanticError; return; } if (!(validateTableType((BTableType) expType, tableConstructorExpr.recordLiteralList))) { resultType = symTable.semanticError; return; } resultType = expType; } private List<String> getTableKeyNameList(BLangTableKeySpecifier tableKeySpecifier) { List<String> fieldNamesList = new ArrayList<>(); for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) { fieldNamesList.add(((BLangIdentifier) identifier).value); } return fieldNamesList; } private BType createTableKeyConstraint(List<String> fieldNames, BType constraintType) { if (fieldNames == null) { return symTable.semanticError; } List<BType> memTypes = new ArrayList<>(); for (String fieldName : fieldNames) { BField tableConstraintField = types.getTableConstraintField(constraintType, fieldName); if (tableConstraintField == null) { return symTable.semanticError; } BType fieldType = tableConstraintField.type; memTypes.add(fieldType); } if (memTypes.size() == 1) { return memTypes.get(0); } return new BTupleType(memTypes); } private BType checkListConstructorCompatibility(BType bType, BLangListConstructorExpr listConstructor) { int tag = bType.tag; if (tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; BLangDiagnosticLog prevDLog = this.dlog.getCurrentLog(); this.dlog.setNonConsoleDLog(); List<BType> compatibleTypes = new ArrayList<>(); boolean erroredExpType = false; for (BType memberType : ((BUnionType) bType).getMemberTypes()) { if (memberType == symTable.semanticError) { if (!erroredExpType) { erroredExpType = true; } continue; } BType listCompatibleMemType = getListConstructorCompatibleNonUnionType(memberType); if (listCompatibleMemType == symTable.semanticError) { continue; } BType memCompatibiltyType = checkListConstructorCompatibility(listCompatibleMemType, listConstructor); if (memCompatibiltyType != symTable.semanticError && dlog.getErrorCount() == 0 && isUniqueType(compatibleTypes, memCompatibiltyType)) { compatibleTypes.add(memCompatibiltyType); } dlog.resetErrorCount(); } this.dlog.setCurrentLog(prevDLog); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; if (compatibleTypes.isEmpty()) { BLangListConstructorExpr exprToLog = listConstructor; if (this.nonErrorLoggingCheck) { listConstructor.cloneAttempt++; exprToLog = nodeCloner.clone(listConstructor); } BType inferredTupleType = getInferredTupleType(exprToLog, symTable.noType); if (!erroredExpType && inferredTupleType != symTable.semanticError) { dlog.error(listConstructor.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, inferredTupleType); } return symTable.semanticError; } else if (compatibleTypes.size() != 1) { dlog.error(listConstructor.pos, DiagnosticCode.AMBIGUOUS_TYPES, expType); return symTable.semanticError; } return checkListConstructorCompatibility(compatibleTypes.get(0), listConstructor); } if (tag == TypeTags.INTERSECTION) { return checkListConstructorCompatibility(((BIntersectionType) bType).effectiveType, listConstructor); } BType possibleType = getListConstructorCompatibleNonUnionType(bType); switch (possibleType.tag) { case TypeTags.ARRAY: return checkArrayType(listConstructor, (BArrayType) possibleType); case TypeTags.TUPLE: return checkTupleType(listConstructor, (BTupleType) possibleType); case TypeTags.READONLY: return checkReadOnlyListType(listConstructor); case TypeTags.TYPEDESC: List<BType> results = new ArrayList<>(); listConstructor.isTypedescExpr = true; for (int i = 0; i < listConstructor.exprs.size(); i++) { results.add(checkExpr(listConstructor.exprs.get(i), env, symTable.noType)); } List<BType> actualTypes = new ArrayList<>(); for (int i = 0; i < listConstructor.exprs.size(); i++) { final BLangExpression expr = listConstructor.exprs.get(i); if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) { actualTypes.add(((BLangTypedescExpr) expr).resolvedType); } else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { actualTypes.add(((BLangSimpleVarRef) expr).symbol.type); } else { actualTypes.add(results.get(i)); } } if (actualTypes.size() == 1) { listConstructor.typedescType = actualTypes.get(0); } else { listConstructor.typedescType = new BTupleType(actualTypes); } return new BTypedescType(listConstructor.typedescType, null); } BLangListConstructorExpr exprToLog = listConstructor; if (this.nonErrorLoggingCheck) { listConstructor.cloneAttempt++; exprToLog = nodeCloner.clone(listConstructor); } if (bType == symTable.semanticError) { getInferredTupleType(exprToLog, symTable.semanticError); } else { dlog.error(listConstructor.pos, DiagnosticCode.INCOMPATIBLE_TYPES, bType, getInferredTupleType(exprToLog, symTable.noType)); } return symTable.semanticError; } private BType getListConstructorCompatibleNonUnionType(BType type) { switch (type.tag) { case TypeTags.ARRAY: case TypeTags.TUPLE: case TypeTags.READONLY: case TypeTags.TYPEDESC: return type; case TypeTags.JSON: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayJsonType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayJsonType, env, symTable, anonymousModelHelper, names); case TypeTags.ANYDATA: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayAnydataType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayAnydataType, env, symTable, anonymousModelHelper, names); case TypeTags.ANY: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayType, env, symTable, anonymousModelHelper, names); case TypeTags.INTERSECTION: return ((BIntersectionType) type).effectiveType; } return symTable.semanticError; } private BType checkArrayType(BLangListConstructorExpr listConstructor, BArrayType arrayType) { BType eType = arrayType.eType; if (arrayType.state == BArrayState.OPEN_SEALED) { arrayType.size = listConstructor.exprs.size(); arrayType.state = BArrayState.CLOSED_SEALED; } else if ((arrayType.state != BArrayState.UNSEALED) && (arrayType.size != listConstructor.exprs.size())) { if (arrayType.size < listConstructor.exprs.size()) { dlog.error(listConstructor.pos, DiagnosticCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size, listConstructor.exprs.size()); return symTable.semanticError; } if (!types.hasFillerValue(eType)) { dlog.error(listConstructor.pos, DiagnosticCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE, expType); return symTable.semanticError; } } boolean errored = false; for (BLangExpression expr : listConstructor.exprs) { if (exprIncompatible(eType, expr) && !errored) { errored = true; } } return errored ? symTable.semanticError : arrayType; } private BType checkTupleType(BLangListConstructorExpr listConstructor, BTupleType tupleType) { List<BLangExpression> exprs = listConstructor.exprs; List<BType> memberTypes = tupleType.tupleTypes; BType restType = tupleType.restType; int listExprSize = exprs.size(); int memberTypeSize = memberTypes.size(); if (listExprSize < memberTypeSize) { for (int i = listExprSize; i < memberTypeSize; i++) { if (!types.hasFillerValue(memberTypes.get(i))) { dlog.error(listConstructor.pos, DiagnosticCode.SYNTAX_ERROR, "tuple and expression size does not match"); return symTable.semanticError; } } } else if (listExprSize > memberTypeSize && restType == null) { dlog.error(listConstructor.pos, DiagnosticCode.SYNTAX_ERROR, "tuple and expression size does not match"); return symTable.semanticError; } boolean errored = false; int nonRestCountToCheck = listExprSize < memberTypeSize ? listExprSize : memberTypeSize; for (int i = 0; i < nonRestCountToCheck; i++) { if (exprIncompatible(memberTypes.get(i), exprs.get(i)) && !errored) { errored = true; } } for (int i = nonRestCountToCheck; i < exprs.size(); i++) { if (exprIncompatible(restType, exprs.get(i)) && !errored) { errored = true; } } return errored ? symTable.semanticError : tupleType; } private BType checkReadOnlyListType(BLangListConstructorExpr listConstructor) { if (!this.nonErrorLoggingCheck) { BType inferredType = getInferredTupleType(listConstructor, symTable.readonlyType); if (inferredType == symTable.semanticError) { return symTable.semanticError; } return types.checkType(listConstructor, inferredType, symTable.readonlyType); } for (BLangExpression expr : listConstructor.exprs) { if (exprIncompatible(symTable.readonlyType, expr)) { return symTable.semanticError; } } return symTable.readonlyType; } private boolean exprIncompatible(BType eType, BLangExpression expr) { if (expr.typeChecked) { return expr.type == symTable.semanticError; } BLangExpression exprToCheck = expr; if (this.nonErrorLoggingCheck) { expr.cloneAttempt++; exprToCheck = nodeCloner.clone(expr); } return checkExpr(exprToCheck, this.env, eType) == symTable.semanticError; } private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env) { return checkExprList(exprs, env, symTable.noType); } private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env, BType expType) { List<BType> types = new ArrayList<>(); SymbolEnv prevEnv = this.env; BType preExpType = this.expType; this.env = env; this.expType = expType; for (BLangExpression e : exprs) { checkExpr(e, this.env, expType); types.add(resultType); } this.env = prevEnv; this.expType = preExpType; return types; } private BType getInferredTupleType(BLangListConstructorExpr listConstructor, BType expType) { List<BType> memTypes = checkExprList(listConstructor.exprs, env, expType); for (BType memType : memTypes) { if (memType == symTable.semanticError) { return symTable.semanticError; } } BTupleType tupleType = new BTupleType(memTypes); if (expType.tag != TypeTags.READONLY) { return tupleType; } tupleType.flags |= Flags.READONLY; return tupleType; } public void visit(BLangRecordLiteral recordLiteral) { int expTypeTag = expType.tag; if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.READONLY) { expType = defineInferredRecordType(recordLiteral, expType); } else if (expTypeTag == TypeTags.OBJECT) { dlog.error(recordLiteral.pos, DiagnosticCode.INVALID_RECORD_LITERAL, expType); resultType = symTable.semanticError; return; } resultType = getEffectiveMappingType(recordLiteral, checkMappingConstructorCompatibility(expType, recordLiteral)); } private BType getEffectiveMappingType(BLangRecordLiteral recordLiteral, BType applicableMappingType) { if (applicableMappingType == symTable.semanticError || (applicableMappingType.tag == TypeTags.RECORD && Symbols.isFlagOn(applicableMappingType.flags, Flags.READONLY))) { return applicableMappingType; } Map<String, RecordLiteralNode.RecordField> readOnlyFields = new LinkedHashMap<>(); LinkedHashMap<String, BField> applicableTypeFields = applicableMappingType.tag == TypeTags.RECORD ? ((BRecordType) applicableMappingType).fields : new LinkedHashMap<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { continue; } String name; if (field.isKeyValueField()) { BLangRecordKeyValueField keyValueField = (BLangRecordKeyValueField) field; if (!keyValueField.readonly) { continue; } BLangExpression keyExpr = keyValueField.key.expr; if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { name = ((BLangSimpleVarRef) keyExpr).variableName.value; } else { name = (String) ((BLangLiteral) keyExpr).value; } } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; if (!varNameField.readonly) { continue; } name = varNameField.variableName.value; } if (applicableTypeFields.containsKey(name) && Symbols.isFlagOn(applicableTypeFields.get(name).symbol.flags, Flags.READONLY)) { continue; } readOnlyFields.put(name, field); } if (readOnlyFields.isEmpty()) { return applicableMappingType; } PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL); LinkedHashMap<String, BField> newFields = new LinkedHashMap<>(); for (Map.Entry<String, RecordLiteralNode.RecordField> readOnlyEntry : readOnlyFields.entrySet()) { RecordLiteralNode.RecordField field = readOnlyEntry.getValue(); String key = readOnlyEntry.getKey(); Name fieldName = names.fromString(key); BType readOnlyFieldType; if (field.isKeyValueField()) { readOnlyFieldType = ((BLangRecordKeyValueField) field).valueExpr.type; } else { readOnlyFieldType = ((BLangRecordVarNameField) field).type; } BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{ add(Flag.REQUIRED); add(Flag.READONLY); }}), fieldName, pkgID, readOnlyFieldType, recordSymbol, ((BLangNode) field).pos, VIRTUAL); newFields.put(key, new BField(fieldName, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordType = new BRecordType(recordSymbol); if (applicableMappingType.tag == TypeTags.MAP) { recordType.sealed = false; recordType.restFieldType = ((BMapType) applicableMappingType).constraint; } else { BRecordType applicableRecordType = (BRecordType) applicableMappingType; boolean allReadOnlyFields = true; for (Map.Entry<String, BField> origEntry : applicableRecordType.fields.entrySet()) { String fieldName = origEntry.getKey(); BField field = origEntry.getValue(); if (readOnlyFields.containsKey(fieldName)) { continue; } BVarSymbol origFieldSymbol = field.symbol; int origFieldFlags = origFieldSymbol.flags; if (allReadOnlyFields && !Symbols.isFlagOn(origFieldFlags, Flags.READONLY)) { allReadOnlyFields = false; } BVarSymbol fieldSymbol = new BVarSymbol(origFieldFlags, field.name, pkgID, origFieldSymbol.type, recordSymbol, field.pos, VIRTUAL); newFields.put(fieldName, new BField(field.name, null, fieldSymbol)); recordSymbol.scope.define(field.name, fieldSymbol); } recordType.sealed = applicableRecordType.sealed; recordType.restFieldType = applicableRecordType.restFieldType; if (recordType.sealed && allReadOnlyFields) { recordType.flags |= Flags.READONLY; recordType.tsymbol.flags |= Flags.READONLY; } } recordType.fields = newFields; recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, recordLiteral.pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); if (applicableMappingType.tag == TypeTags.MAP) { recordLiteral.expectedType = applicableMappingType; } return recordType; } private BType checkMappingConstructorCompatibility(BType bType, BLangRecordLiteral mappingConstructor) { int tag = bType.tag; if (tag == TypeTags.UNION) { boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck; this.nonErrorLoggingCheck = true; BLangDiagnosticLog prevDLog = this.dlog.getCurrentLog(); this.dlog.setNonConsoleDLog(); List<BType> compatibleTypes = new ArrayList<>(); boolean erroredExpType = false; for (BType memberType : ((BUnionType) bType).getMemberTypes()) { if (memberType == symTable.semanticError) { if (!erroredExpType) { erroredExpType = true; } continue; } BType listCompatibleMemType = getMappingConstructorCompatibleNonUnionType(memberType); if (listCompatibleMemType == symTable.semanticError) { continue; } BType memCompatibiltyType = checkMappingConstructorCompatibility(listCompatibleMemType, mappingConstructor); if (memCompatibiltyType != symTable.semanticError && dlog.getErrorCount() == 0 && isUniqueType(compatibleTypes, memCompatibiltyType)) { compatibleTypes.add(memCompatibiltyType); } dlog.resetErrorCount(); } this.dlog.setCurrentLog(prevDLog); this.nonErrorLoggingCheck = prevNonErrorLoggingCheck; if (compatibleTypes.isEmpty()) { if (!erroredExpType) { reportIncompatibleMappingConstructorError(mappingConstructor, bType); } validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } else if (compatibleTypes.size() != 1) { dlog.error(mappingConstructor.pos, DiagnosticCode.AMBIGUOUS_TYPES, bType); validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } return checkMappingConstructorCompatibility(compatibleTypes.get(0), mappingConstructor); } if (tag == TypeTags.INTERSECTION) { return checkMappingConstructorCompatibility(((BIntersectionType) bType).effectiveType, mappingConstructor); } BType possibleType = getMappingConstructorCompatibleNonUnionType(bType); switch (possibleType.tag) { case TypeTags.MAP: return validateSpecifiedFields(mappingConstructor, possibleType) ? possibleType : symTable.semanticError; case TypeTags.RECORD: boolean isSpecifiedFieldsValid = validateSpecifiedFields(mappingConstructor, possibleType); boolean hasAllRequiredFields = validateRequiredFields((BRecordType) possibleType, mappingConstructor.fields, mappingConstructor.pos); return isSpecifiedFieldsValid && hasAllRequiredFields ? possibleType : symTable.semanticError; case TypeTags.READONLY: return checkReadOnlyMappingType(mappingConstructor); } reportIncompatibleMappingConstructorError(mappingConstructor, bType); validateSpecifiedFields(mappingConstructor, symTable.semanticError); return symTable.semanticError; } private BType checkReadOnlyMappingType(BLangRecordLiteral mappingConstructor) { if (!this.nonErrorLoggingCheck) { BType inferredType = defineInferredRecordType(mappingConstructor, symTable.readonlyType); if (inferredType == symTable.semanticError) { return symTable.semanticError; } return checkMappingConstructorCompatibility(inferredType, mappingConstructor); } for (RecordLiteralNode.RecordField field : mappingConstructor.fields) { BLangExpression exprToCheck; if (field.isKeyValueField()) { exprToCheck = ((BLangRecordKeyValueField) field).valueExpr; } else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { exprToCheck = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; } else { exprToCheck = (BLangRecordVarNameField) field; } if (exprIncompatible(symTable.readonlyType, exprToCheck)) { return symTable.semanticError; } } return symTable.readonlyType; } private BType getMappingConstructorCompatibleNonUnionType(BType type) { switch (type.tag) { case TypeTags.MAP: case TypeTags.RECORD: case TypeTags.READONLY: return type; case TypeTags.JSON: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapJsonType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapJsonType, env, symTable, anonymousModelHelper, names); case TypeTags.ANYDATA: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapAnydataType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapAnydataType, env, symTable, anonymousModelHelper, names); case TypeTags.ANY: return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapType : ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapType, env, symTable, anonymousModelHelper, names); case TypeTags.INTERSECTION: return ((BIntersectionType) type).effectiveType; } return symTable.semanticError; } private boolean isMappingConstructorCompatibleType(BType type) { return type.tag == TypeTags.RECORD || type.tag == TypeTags.MAP; } private void reportIncompatibleMappingConstructorError(BLangRecordLiteral mappingConstructorExpr, BType expType) { if (expType == symTable.semanticError) { return; } if (expType.tag != TypeTags.UNION) { dlog.error(mappingConstructorExpr.pos, DiagnosticCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType); return; } BUnionType unionType = (BUnionType) expType; BType[] memberTypes = unionType.getMemberTypes().toArray(new BType[0]); if (memberTypes.length == 2) { BRecordType recType = null; if (memberTypes[0].tag == TypeTags.RECORD && memberTypes[1].tag == TypeTags.NIL) { recType = (BRecordType) memberTypes[0]; } else if (memberTypes[1].tag == TypeTags.RECORD && memberTypes[0].tag == TypeTags.NIL) { recType = (BRecordType) memberTypes[1]; } if (recType != null) { validateSpecifiedFields(mappingConstructorExpr, recType); validateRequiredFields(recType, mappingConstructorExpr.fields, mappingConstructorExpr.pos); return; } } for (BType bType : memberTypes) { if (isMappingConstructorCompatibleType(bType)) { dlog.error(mappingConstructorExpr.pos, DiagnosticCode.INCOMPATIBLE_MAPPING_CONSTRUCTOR, unionType); return; } } dlog.error(mappingConstructorExpr.pos, DiagnosticCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, unionType); } private boolean validateSpecifiedFields(BLangRecordLiteral mappingConstructor, BType possibleType) { boolean isFieldsValid = true; for (RecordLiteralNode.RecordField field : mappingConstructor.fields) { BType checkedType = checkMappingField(field, possibleType); if (isFieldsValid && checkedType == symTable.semanticError) { isFieldsValid = false; } } return isFieldsValid; } private boolean validateRequiredFields(BRecordType type, List<RecordLiteralNode.RecordField> specifiedFields, DiagnosticPos pos) { HashSet<String> specFieldNames = getFieldNames(specifiedFields); boolean hasAllRequiredFields = true; for (BField field : type.fields.values()) { String fieldName = field.name.value; if (!specFieldNames.contains(fieldName) && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { dlog.error(pos, DiagnosticCode.MISSING_REQUIRED_RECORD_FIELD, field.name); if (hasAllRequiredFields) { hasAllRequiredFields = false; } } } return hasAllRequiredFields; } private HashSet<String> getFieldNames(List<RecordLiteralNode.RecordField> specifiedFields) { HashSet<String> fieldNames = new HashSet<>(); for (RecordLiteralNode.RecordField specifiedField : specifiedFields) { if (specifiedField.isKeyValueField()) { String name = getKeyValueFieldName((BLangRecordKeyValueField) specifiedField); if (name == null) { continue; } fieldNames.add(name); } else if (specifiedField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { fieldNames.add(getVarNameFieldName((BLangRecordVarNameField) specifiedField)); } else { fieldNames.addAll(getSpreadOpFieldRequiredFieldNames( (BLangRecordLiteral.BLangRecordSpreadOperatorField) specifiedField)); } } return fieldNames; } private String getKeyValueFieldName(BLangRecordKeyValueField field) { BLangRecordKey key = field.key; if (key.computedKey) { return null; } BLangExpression keyExpr = key.expr; if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { return ((BLangSimpleVarRef) keyExpr).variableName.value; } else if (keyExpr.getKind() == NodeKind.LITERAL) { return (String) ((BLangLiteral) keyExpr).value; } return null; } private String getVarNameFieldName(BLangRecordVarNameField field) { return field.variableName.value; } private List<String> getSpreadOpFieldRequiredFieldNames(BLangRecordLiteral.BLangRecordSpreadOperatorField field) { BType spreadType = checkExpr(field.expr, env); if (spreadType.tag != TypeTags.RECORD) { return Collections.emptyList(); } List<String> fieldNames = new ArrayList<>(); for (BField bField : ((BRecordType) spreadType).getFields().values()) { if (!Symbols.isOptional(bField.symbol)) { fieldNames.add(bField.name.value); } } return fieldNames; } @Override public void visit(BLangWorkerFlushExpr workerFlushExpr) { if (workerFlushExpr.workerIdentifier != null) { String workerName = workerFlushExpr.workerIdentifier.getValue(); if (!this.workerExists(this.env, workerName)) { this.dlog.error(workerFlushExpr.pos, DiagnosticCode.UNDEFINED_WORKER, workerName); } } BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(workerFlushExpr, actualType, expType); } @Override public void visit(BLangWorkerSyncSendExpr syncSendExpr) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(syncSendExpr.workerIdentifier)); if (symTable.notFoundSymbol.equals(symbol)) { syncSendExpr.workerType = symTable.semanticError; } else { syncSendExpr.workerType = symbol.type; } syncSendExpr.env = this.env; checkExpr(syncSendExpr.expr, this.env); if (!syncSendExpr.expr.type.isAnydata()) { this.dlog.error(syncSendExpr.pos, DiagnosticCode.INVALID_TYPE_FOR_SEND, syncSendExpr.expr.type); } String workerName = syncSendExpr.workerIdentifier.getValue(); if (!this.workerExists(this.env, workerName)) { this.dlog.error(syncSendExpr.pos, DiagnosticCode.UNDEFINED_WORKER, workerName); } syncSendExpr.expectedType = expType; resultType = expType == symTable.noType ? symTable.nilType : expType; } @Override public void visit(BLangWorkerReceive workerReceiveExpr) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerReceiveExpr.workerIdentifier)); if (workerReceiveExpr.isChannel) { this.dlog.error(workerReceiveExpr.pos, DiagnosticCode.UNDEFINED_ACTION); return; } workerReceiveExpr.env = this.env; if (symTable.notFoundSymbol.equals(symbol)) { workerReceiveExpr.workerType = symTable.semanticError; } else { workerReceiveExpr.workerType = symbol.type; } if (symTable.noType == this.expType) { this.dlog.error(workerReceiveExpr.pos, DiagnosticCode.INVALID_USAGE_OF_RECEIVE_EXPRESSION); } workerReceiveExpr.type = this.expType; resultType = this.expType; } private boolean workerExists(SymbolEnv env, String workerName) { if (workerName.equals(DEFAULT_WORKER_NAME)) { return true; } BSymbol symbol = this.symResolver.lookupSymbolInMainSpace(env, new Name(workerName)); return symbol != this.symTable.notFoundSymbol && symbol.type.tag == TypeTags.FUTURE && ((BFutureType) symbol.type).workerDerivative; } @Override public void visit(BLangConstRef constRef) { constRef.symbol = symResolver.lookupMainSpaceSymbolInPackage(constRef.pos, env, names.fromIdNode(constRef.pkgAlias), names.fromIdNode(constRef.variableName)); types.setImplicitCastExpr(constRef, constRef.type, expType); resultType = constRef.type; } public void visit(BLangSimpleVarRef varRefExpr) { BType actualType = symTable.semanticError; Name varName = names.fromIdNode(varRefExpr.variableName); if (varName == Names.IGNORE) { if (varRefExpr.lhsVar) { varRefExpr.type = this.symTable.anyType; } else { varRefExpr.type = this.symTable.semanticError; dlog.error(varRefExpr.pos, DiagnosticCode.UNDERSCORE_NOT_ALLOWED); } varRefExpr.symbol = new BVarSymbol(0, varName, env.enclPkg.symbol.pkgID, varRefExpr.type, env.scope.owner, varRefExpr.pos, VIRTUAL); resultType = varRefExpr.type; return; } Name compUnitName = getCurrentCompUnit(varRefExpr); varRefExpr.pkgSymbol = symResolver.resolvePrefixSymbol(env, names.fromIdNode(varRefExpr.pkgAlias), compUnitName); if (varRefExpr.pkgSymbol == symTable.notFoundSymbol) { dlog.error(varRefExpr.pos, DiagnosticCode.UNDEFINED_MODULE, varRefExpr.pkgAlias); } if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) { actualType = symTable.stringType; } else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) { BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(varRefExpr.pos, env, names.fromIdNode(varRefExpr.pkgAlias), varName); if (symbol == symTable.notFoundSymbol && env.enclType != null) { Name objFuncName = names.fromString(Symbols .getAttachedFuncSymbolName(env.enclType.type.tsymbol.name.value, varName.value)); symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName, env.enclType.type.tsymbol); } if (((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE)) { BVarSymbol varSym = (BVarSymbol) symbol; checkSelfReferences(varRefExpr.pos, env, varSym); varRefExpr.symbol = varSym; actualType = varSym.type; markAndRegisterClosureVariable(symbol, varRefExpr.pos); } else if ((symbol.tag & SymTag.TYPE_DEF) == SymTag.TYPE_DEF) { actualType = symbol.type.tag == TypeTags.TYPEDESC ? symbol.type : new BTypedescType(symbol.type, null); varRefExpr.symbol = symbol; } else if ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) { BConstantSymbol constSymbol = (BConstantSymbol) symbol; varRefExpr.symbol = constSymbol; BType symbolType = symbol.type; if (symbolType != symTable.noType && expType.tag == TypeTags.FINITE || (expType.tag == TypeTags.UNION && ((BUnionType) expType).getMemberTypes().stream() .anyMatch(memType -> memType.tag == TypeTags.FINITE && types.isAssignable(symbolType, memType)))) { actualType = symbolType; } else { actualType = constSymbol.literalType; } if (varRefExpr.lhsVar || varRefExpr.compoundAssignmentLhsVar) { actualType = symTable.semanticError; dlog.error(varRefExpr.pos, DiagnosticCode.CANNOT_UPDATE_CONSTANT_VALUE); } } else { logUndefinedSymbolError(varRefExpr.pos, varName.value); } } if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) { dlog.error(varRefExpr.pos, DiagnosticCode.SEALED_ARRAY_TYPE_CAN_NOT_INFER_SIZE); return; } resultType = types.checkType(varRefExpr, actualType, expType); } @Override public void visit(BLangRecordVarRef varRefExpr) { LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); String recordName = this.anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.symbol.pkgID); BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0, names.fromString(recordName), env.enclPkg.symbol.pkgID, null, env.scope.owner, varRefExpr.pos, SOURCE); symbolEnter.defineSymbol(varRefExpr.pos, recordSymbol, env); boolean unresolvedReference = false; for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) { ((BLangVariableReference) recordRefField.variableReference).lhsVar = true; checkExpr(recordRefField.variableReference, env); if (((BLangVariableReference) recordRefField.variableReference).symbol == null || !isValidVariableReference(recordRefField.variableReference)) { unresolvedReference = true; continue; } BVarSymbol bVarSymbol = (BVarSymbol) ((BLangVariableReference) recordRefField.variableReference).symbol; BField field = new BField(names.fromIdNode(recordRefField.variableName), varRefExpr.pos, new BVarSymbol(0, names.fromIdNode(recordRefField.variableName), env.enclPkg.symbol.pkgID, bVarSymbol.type, recordSymbol, varRefExpr.pos, SOURCE)); fields.put(field.name.value, field); } BLangExpression restParam = (BLangExpression) varRefExpr.restParam; if (restParam != null) { checkExpr(restParam, env); unresolvedReference = !isValidVariableReference(restParam); } if (unresolvedReference) { resultType = symTable.semanticError; return; } BRecordType bRecordType = new BRecordType(recordSymbol); bRecordType.fields = fields; recordSymbol.type = bRecordType; varRefExpr.symbol = new BVarSymbol(0, recordSymbol.name, env.enclPkg.symbol.pkgID, bRecordType, env.scope.owner, varRefExpr.pos, SOURCE); if (restParam == null) { bRecordType.sealed = true; bRecordType.restFieldType = symTable.noType; } else if (restParam.type == symTable.semanticError) { bRecordType.restFieldType = symTable.mapType; } else { BMapType restParamType = (BMapType) restParam.type; bRecordType.restFieldType = restParamType.constraint; } resultType = bRecordType; } @Override public void visit(BLangErrorVarRef varRefExpr) { if (varRefExpr.typeNode != null) { BType bType = symResolver.resolveTypeNode(varRefExpr.typeNode, env); varRefExpr.type = bType; checkIndirectErrorVarRef(varRefExpr); resultType = bType; return; } if (varRefExpr.message != null) { varRefExpr.message.lhsVar = true; checkExpr(varRefExpr.message, env); if (!types.isAssignable(symTable.stringType, varRefExpr.message.type)) { dlog.error(varRefExpr.message.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.stringType, varRefExpr.message.type); } } if (varRefExpr.cause != null) { varRefExpr.cause.lhsVar = true; checkExpr(varRefExpr.cause, env); if (!types.isAssignable(symTable.errorOrNilType, varRefExpr.cause.type)) { dlog.error(varRefExpr.cause.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.errorOrNilType, varRefExpr.cause.type); } } boolean unresolvedReference = false; for (BLangNamedArgsExpression detailItem : varRefExpr.detail) { BLangVariableReference refItem = (BLangVariableReference) detailItem.expr; refItem.lhsVar = true; checkExpr(refItem, env); if (!isValidVariableReference(refItem)) { unresolvedReference = true; continue; } if (refItem.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR || refItem.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { dlog.error(refItem.pos, DiagnosticCode.INVALID_VARIABLE_REFERENCE_IN_BINDING_PATTERN, refItem); unresolvedReference = true; continue; } if (refItem.symbol == null) { unresolvedReference = true; } } if (varRefExpr.restVar != null) { varRefExpr.restVar.lhsVar = true; if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { checkExpr(varRefExpr.restVar, env); unresolvedReference = unresolvedReference || varRefExpr.restVar.symbol == null || !isValidVariableReference(varRefExpr.restVar); } else if (varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR || varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) { unresolvedReference = checkErrorRestParamVarRef(varRefExpr, unresolvedReference); } } if (unresolvedReference) { resultType = symTable.semanticError; return; } BType errorRefRestFieldType; if (varRefExpr.restVar == null) { errorRefRestFieldType = symTable.anydataOrReadonly; } else if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF && ((BLangSimpleVarRef) varRefExpr.restVar).variableName.value.equals(Names.IGNORE.value)) { errorRefRestFieldType = symTable.anydataOrReadonly; } else if (varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR || varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) { errorRefRestFieldType = varRefExpr.restVar.type; } else if (varRefExpr.restVar.type.tag == TypeTags.MAP) { errorRefRestFieldType = ((BMapType) varRefExpr.restVar.type).constraint; } else { dlog.error(varRefExpr.restVar.pos, DiagnosticCode.INCOMPATIBLE_TYPES, varRefExpr.restVar.type, symTable.detailType); resultType = symTable.semanticError; return; } BType errorDetailType = errorRefRestFieldType == symTable.anydataOrReadonly ? symTable.errorType.detailType : new BMapType(TypeTags.MAP, errorRefRestFieldType, null, Flags.PUBLIC); resultType = new BErrorType(symTable.errorType.tsymbol, errorDetailType); } private void checkIndirectErrorVarRef(BLangErrorVarRef varRefExpr) { for (BLangNamedArgsExpression detailItem : varRefExpr.detail) { checkExpr(detailItem.expr, env); checkExpr(detailItem, env, detailItem.expr.type); } if (varRefExpr.restVar != null) { checkExpr(varRefExpr.restVar, env); } if (varRefExpr.message != null) { varRefExpr.message.lhsVar = true; checkExpr(varRefExpr.message, env); } if (varRefExpr.cause != null) { varRefExpr.cause.lhsVar = true; checkExpr(varRefExpr.cause, env); } } private boolean checkErrorRestParamVarRef(BLangErrorVarRef varRefExpr, boolean unresolvedReference) { BLangAccessExpression accessExpression = (BLangAccessExpression) varRefExpr.restVar; Name exprName = names.fromIdNode(((BLangSimpleVarRef) accessExpression.expr).variableName); BSymbol fSym = symResolver.lookupSymbolInMainSpace(env, exprName); if (fSym != null) { if (fSym.type.getKind() == TypeKind.MAP) { BType constraint = ((BMapType) fSym.type).constraint; if (types.isAssignable(constraint, symTable.anydataOrReadonly)) { varRefExpr.restVar.type = constraint; } else { varRefExpr.restVar.type = symTable.anydataOrReadonly; } } else { throw new UnsupportedOperationException("rec field base access"); } } else { unresolvedReference = true; } return unresolvedReference; } @Override public void visit(BLangTupleVarRef varRefExpr) { List<BType> results = new ArrayList<>(); for (int i = 0; i < varRefExpr.expressions.size(); i++) { ((BLangVariableReference) varRefExpr.expressions.get(i)).lhsVar = true; results.add(checkExpr(varRefExpr.expressions.get(i), env, symTable.noType)); } BTupleType actualType = new BTupleType(results); if (varRefExpr.restParam != null) { BLangExpression restExpr = (BLangExpression) varRefExpr.restParam; ((BLangVariableReference) restExpr).lhsVar = true; BType checkedType = checkExpr(restExpr, env, symTable.noType); if (checkedType.tag != TypeTags.ARRAY) { dlog.error(varRefExpr.pos, DiagnosticCode.INVALID_TYPE_FOR_REST_DESCRIPTOR, checkedType); resultType = symTable.semanticError; return; } actualType.restType = ((BArrayType) checkedType).eType; } resultType = types.checkType(varRefExpr, actualType, expType); } /** * This method will recursively check if a multidimensional array has at least one open sealed dimension. * * @param arrayType array to check if open sealed * @return true if at least one dimension is open sealed */ public boolean isArrayOpenSealedType(BArrayType arrayType) { if (arrayType.state == BArrayState.OPEN_SEALED) { return true; } if (arrayType.eType.tag == TypeTags.ARRAY) { return isArrayOpenSealedType((BArrayType) arrayType.eType); } return false; } /** * This method will recursively traverse and find the symbol environment of a lambda node (which is given as the * enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the * enclosing invokable node's environment, which are outside of the scope of a lambda function. */ private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) { if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) { return env.enclEnv; } if (env.enclEnv.node != null && ((env.enclEnv.node.getKind() == NodeKind.TRANSACTION) || (env.enclEnv.node.getKind() == NodeKind.RETRY) || (env.enclEnv.node.getKind() == NodeKind.ON_FAIL))) { return env.enclEnv; } if (env.enclInvokable != null && env.enclInvokable == encInvokable) { return findEnclosingInvokableEnv(env.enclEnv, encInvokable); } return env; } private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangRecordTypeNode recordTypeNode) { if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) { return env.enclEnv; } if (env.enclEnv.node != null && ((env.enclEnv.node.getKind() == NodeKind.TRANSACTION) || (env.enclEnv.node.getKind() == NodeKind.RETRY) || (env.enclEnv.node.getKind() == NodeKind.ON_FAIL))) { return env.enclEnv; } if (env.enclType != null && env.enclType == recordTypeNode) { return findEnclosingInvokableEnv(env.enclEnv, recordTypeNode); } return env; } private boolean isFunctionArgument(BSymbol symbol, List<BLangSimpleVariable> params) { return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) && param.type.tag == symbol.type.tag)); } public void visit(BLangFieldBasedAccess fieldAccessExpr) { ((BLangVariableReference) fieldAccessExpr.expr).lhsVar = fieldAccessExpr.lhsVar; ((BLangVariableReference) fieldAccessExpr.expr).compoundAssignmentLhsVar = fieldAccessExpr.compoundAssignmentLhsVar; BType varRefType = getTypeOfExprInFieldAccess(fieldAccessExpr.expr); if (fieldAccessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess && !isXmlAccess(fieldAccessExpr)) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.INVALID_FIELD_ACCESS_EXPRESSION); resultType = symTable.semanticError; return; } BType actualType; if (fieldAccessExpr.fieldKind == FieldKind.ALL && varRefType.tag != TypeTags.XML) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_GET_ALL_FIELDS, varRefType); actualType = symTable.semanticError; } else { if (fieldAccessExpr.optionalFieldAccess) { if (fieldAccessExpr.lhsVar || fieldAccessExpr.compoundAssignmentLhsVar) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPTIONAL_FIELD_ACCESS_NOT_REQUIRED_ON_LHS); resultType = symTable.semanticError; return; } actualType = checkOptionalFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field)); } else { actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field)); if (actualType != symTable.semanticError && (fieldAccessExpr.lhsVar || fieldAccessExpr.compoundAssignmentLhsVar)) { if (isAllReadonlyTypes(varRefType)) { if (varRefType.tag != TypeTags.OBJECT || !isInitializationInInit(varRefType)) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType); resultType = symTable.semanticError; return; } } else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD) && isInvalidReadonlyFieldUpdate(varRefType, fieldAccessExpr.field.value)) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_READONLY_RECORD_FIELD, fieldAccessExpr.field.value, varRefType); resultType = symTable.semanticError; return; } } } } resultType = types.checkType(fieldAccessExpr, actualType, this.expType); } private boolean isAllReadonlyTypes(BType type) { if (type.tag != TypeTags.UNION) { return Symbols.isFlagOn(type.flags, Flags.READONLY); } for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isAllReadonlyTypes(memberType)) { return false; } } return true; } private boolean isInitializationInInit(BType type) { BObjectType objectType = (BObjectType) type; BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) objectType.tsymbol; BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc; return env.enclInvokable != null && initializerFunc != null && env.enclInvokable.symbol == initializerFunc.symbol; } private boolean isInvalidReadonlyFieldUpdate(BType type, String fieldName) { if (type.tag == TypeTags.RECORD) { if (Symbols.isFlagOn(type.flags, Flags.READONLY)) { return true; } BRecordType recordType = (BRecordType) type; for (BField field : recordType.fields.values()) { if (!field.name.value.equals(fieldName)) { continue; } return Symbols.isFlagOn(field.symbol.flags, Flags.READONLY); } return recordType.sealed; } boolean allInvalidUpdates = true; for (BType memberType : ((BUnionType) type).getMemberTypes()) { if (!isInvalidReadonlyFieldUpdate(memberType, fieldName)) { allInvalidUpdates = false; } } return allInvalidUpdates; } private boolean isXmlAccess(BLangFieldBasedAccess fieldAccessExpr) { BLangExpression expr = fieldAccessExpr.expr; BType exprType = expr.type; if (exprType.tag == TypeTags.XML || exprType.tag == TypeTags.XML_ELEMENT) { return true; } if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType((BLangFieldBasedAccess) expr) && exprType.tag == TypeTags.UNION) { Set<BType> memberTypes = ((BUnionType) exprType).getMemberTypes(); return memberTypes.contains(symTable.xmlType) || memberTypes.contains(symTable.xmlElementType); } return false; } public void visit(BLangIndexBasedAccess indexBasedAccessExpr) { ((BLangVariableReference) indexBasedAccessExpr.expr).lhsVar = indexBasedAccessExpr.lhsVar; ((BLangVariableReference) indexBasedAccessExpr.expr).compoundAssignmentLhsVar = indexBasedAccessExpr.compoundAssignmentLhsVar; checkExpr(indexBasedAccessExpr.expr, this.env, symTable.noType); if (indexBasedAccessExpr.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY && indexBasedAccessExpr.expr.type.tag != TypeTags.TABLE) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.MULTI_KEY_MEMBER_ACCESS_NOT_SUPPORTED, indexBasedAccessExpr.expr.type); resultType = symTable.semanticError; return; } BType actualType = checkIndexAccessExpr(indexBasedAccessExpr); BType exprType = indexBasedAccessExpr.expr.type; BLangExpression indexExpr = indexBasedAccessExpr.indexExpr; if (actualType != symTable.semanticError && (indexBasedAccessExpr.lhsVar || indexBasedAccessExpr.compoundAssignmentLhsVar)) { if (isAllReadonlyTypes(exprType)) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, exprType); resultType = symTable.semanticError; return; } else if (types.isSubTypeOfBaseType(exprType, TypeTags.RECORD) && (indexExpr.getKind() == NodeKind.LITERAL || isConst(indexExpr)) && isInvalidReadonlyFieldUpdate(exprType, getConstFieldName(indexExpr))) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_READONLY_RECORD_FIELD, getConstFieldName(indexExpr), exprType); resultType = symTable.semanticError; return; } } if (indexBasedAccessExpr.lhsVar) { indexBasedAccessExpr.originalType = actualType; indexBasedAccessExpr.type = actualType; resultType = actualType; return; } this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType); } public void visit(BLangInvocation iExpr) { if (iExpr.expr == null) { checkFunctionInvocationExpr(iExpr); return; } if (invalidModuleAliasUsage(iExpr)) { return; } checkExpr(iExpr.expr, this.env, symTable.noType); BType varRefType = iExpr.expr.type; switch (varRefType.tag) { case TypeTags.OBJECT: checkObjectFunctionInvocationExpr(iExpr, (BObjectType) varRefType); break; case TypeTags.RECORD: checkFieldFunctionPointer(iExpr, this.env); break; case TypeTags.NONE: dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, iExpr.name); break; case TypeTags.SEMANTIC_ERROR: break; default: checkInLangLib(iExpr, varRefType); } } public void visit(BLangInvocation.BLangActionInvocation aInv) { if (aInv.expr == null) { checkFunctionInvocationExpr(aInv); return; } if (invalidModuleAliasUsage(aInv)) { return; } checkExpr(aInv.expr, this.env, symTable.noType); BLangExpression varRef = aInv.expr; switch (varRef.type.tag) { case TypeTags.OBJECT: checkActionInvocation(aInv, (BObjectType) varRef.type); break; case TypeTags.RECORD: checkFieldFunctionPointer(aInv, this.env); break; case TypeTags.NONE: dlog.error(aInv.pos, DiagnosticCode.UNDEFINED_FUNCTION, aInv.name); resultType = symTable.semanticError; break; case TypeTags.SEMANTIC_ERROR: default: dlog.error(aInv.pos, DiagnosticCode.INVALID_ACTION_INVOCATION, varRef.type); resultType = symTable.semanticError; break; } } private boolean invalidModuleAliasUsage(BLangInvocation invocation) { Name pkgAlias = names.fromIdNode(invocation.pkgAlias); if (pkgAlias != Names.EMPTY) { dlog.error(invocation.pos, DiagnosticCode.PKG_ALIAS_NOT_ALLOWED_HERE); return true; } return false; } public void visit(BLangLetExpression letExpression) { BLetSymbol letSymbol = new BLetSymbol(SymTag.LET, Flags.asMask(new HashSet<>(Lists.of())), new Name(String.format("$let_symbol_%d$", letCount++)), env.enclPkg.symbol.pkgID, letExpression.type, env.scope.owner, letExpression.pos); letExpression.env = SymbolEnv.createExprEnv(letExpression, env, letSymbol); for (BLangLetVariable letVariable : letExpression.letVarDeclarations) { semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letExpression.env); } BType exprType = checkExpr(letExpression.expr, letExpression.env); types.checkType(letExpression, exprType, this.expType); } private void checkInLangLib(BLangInvocation iExpr, BType varRefType) { BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType); if (langLibMethodSymbol == symTable.notFoundSymbol) { dlog.error(iExpr.name.pos, DiagnosticCode.UNDEFINED_FUNCTION_IN_TYPE, iExpr.name.value, iExpr.expr.type); resultType = symTable.semanticError; return; } if (checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol)) { return; } checkIllegalStorageSizeChangeMethodCall(iExpr, varRefType); } private boolean checkInvalidImmutableValueUpdate(BLangInvocation iExpr, BType varRefType, BSymbol langLibMethodSymbol) { if (!Symbols.isFlagOn(varRefType.flags, Flags.READONLY)) { return false; } String packageId = langLibMethodSymbol.pkgID.name.value; if (!modifierFunctions.containsKey(packageId)) { return false; } String funcName = langLibMethodSymbol.name.value; if (!modifierFunctions.get(packageId).contains(funcName)) { return false; } if (funcName.equals("mergeJson") && varRefType.tag != TypeTags.MAP) { return false; } dlog.error(iExpr.pos, DiagnosticCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType); resultType = symTable.semanticError; return true; } private boolean isFixedLengthList(BType type) { switch(type.tag) { case TypeTags.ARRAY: return (((BArrayType) type).state != BArrayState.UNSEALED); case TypeTags.TUPLE: return (((BTupleType) type).restType == null); case TypeTags.UNION: BUnionType unionType = (BUnionType) type; for (BType member : unionType.getMemberTypes()) { if (!isFixedLengthList(member)) { return false; } } return true; default: return false; } } private void checkIllegalStorageSizeChangeMethodCall(BLangInvocation iExpr, BType varRefType) { String invocationName = iExpr.name.getValue(); if (!listLengthModifierFunctions.contains(invocationName)) { return; } if (isFixedLengthList(varRefType)) { dlog.error(iExpr.name.pos, DiagnosticCode.ILLEGAL_FUNCTION_CHANGE_LIST_SIZE, invocationName, varRefType); resultType = symTable.semanticError; return; } if (isShiftOnIncompatibleTuples(varRefType, invocationName)) { dlog.error(iExpr.name.pos, DiagnosticCode.ILLEGAL_FUNCTION_CHANGE_TUPLE_SHAPE, invocationName, varRefType); resultType = symTable.semanticError; return; } } private boolean isShiftOnIncompatibleTuples(BType varRefType, String invocationName) { if ((varRefType.tag == TypeTags.TUPLE) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0) && hasDifferentTypeThanRest((BTupleType) varRefType)) { return true; } if ((varRefType.tag == TypeTags.UNION) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0)) { BUnionType unionVarRef = (BUnionType) varRefType; boolean allMemberAreFixedShapeTuples = true; for (BType member : unionVarRef.getMemberTypes()) { if (member.tag != TypeTags.TUPLE) { allMemberAreFixedShapeTuples = false; break; } if (!hasDifferentTypeThanRest((BTupleType) member)) { allMemberAreFixedShapeTuples = false; break; } } return allMemberAreFixedShapeTuples; } return false; } private boolean hasDifferentTypeThanRest(BTupleType tupleType) { if (tupleType.restType == null) { return false; } for (BType member : tupleType.getTupleTypes()) { if (!types.isSameType(tupleType.restType, member)) { return true; } } return false; } private boolean checkFieldFunctionPointer(BLangInvocation iExpr, SymbolEnv env) { BType type = checkExpr(iExpr.expr, env); BLangIdentifier invocationIdentifier = iExpr.name; if (type == symTable.semanticError) { return false; } BSymbol funcSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(invocationIdentifier), type.tsymbol); if (funcSymbol == symTable.notFoundSymbol) { BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, type); if (langLibMethodSymbol == symTable.notFoundSymbol) { dlog.error(iExpr.name.pos, DiagnosticCode.UNDEFINED_FIELD_IN_RECORD, invocationIdentifier, type); resultType = symTable.semanticError; } else { checkInvalidImmutableValueUpdate(iExpr, type, langLibMethodSymbol); } return false; } iExpr.symbol = funcSymbol; iExpr.type = ((BInvokableSymbol) funcSymbol).retType; checkInvocationParamAndReturnType(iExpr); iExpr.functionPointerInvocation = true; return true; } public void visit(BLangTypeInit cIExpr) { if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null) || expType.tag == TypeTags.RECORD) { dlog.error(cIExpr.pos, DiagnosticCode.INVALID_TYPE_NEW_LITERAL, expType); resultType = symTable.semanticError; return; } BType actualType; if (cIExpr.userDefinedType != null) { actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env); } else { actualType = expType; } if (actualType == symTable.semanticError) { resultType = symTable.semanticError; return; } if (actualType.tag == TypeTags.INTERSECTION) { actualType = ((BIntersectionType) actualType).effectiveType; } switch (actualType.tag) { case TypeTags.OBJECT: if ((actualType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) { dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, actualType.tsymbol); cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType)); resultType = symTable.semanticError; return; } if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) { cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol; checkInvocationParam(cIExpr.initInvocation); cIExpr.initInvocation.type = ((BInvokableSymbol) cIExpr.initInvocation.symbol).retType; } else { if (!isValidInitInvocation(cIExpr, (BObjectType) actualType)) { return; } } break; case TypeTags.STREAM: if (cIExpr.initInvocation.argExprs.size() != 1) { dlog.error(cIExpr.pos, DiagnosticCode.INVALID_STREAM_CONSTRUCTOR, cIExpr.initInvocation.name); resultType = symTable.semanticError; return; } BStreamType actualStreamType = (BStreamType) actualType; if (actualStreamType.error != null) { BType error = actualStreamType.error; if (error != symTable.neverType && !types.containsErrorType(error)) { dlog.error(cIExpr.pos, DiagnosticCode.ERROR_TYPE_EXPECTED, error.toString()); resultType = symTable.semanticError; return; } } BLangExpression iteratorExpr = cIExpr.initInvocation.argExprs.get(0); BType constructType = checkExpr(iteratorExpr, env, symTable.noType); BUnionType nextReturnType = types.getVarTypeFromIteratorFuncReturnType(constructType); BUnionType expectedReturnType = createNextReturnType(cIExpr.pos, (BStreamType) actualType); if (nextReturnType == null) { dlog.error(iteratorExpr.pos, DiagnosticCode.MISSING_REQUIRED_METHOD_NEXT, constructType, expectedReturnType); resultType = symTable.semanticError; return; } if (types.getErrorType(nextReturnType) == null && (types.getErrorType(expectedReturnType) != null)) { dlog.error(iteratorExpr.pos, DiagnosticCode.INVALID_STREAM_CONSTRUCTOR_EXP_TYPE, iteratorExpr); resultType = symTable.semanticError; return; } types.checkType(iteratorExpr.pos, nextReturnType, expectedReturnType, DiagnosticCode.INCOMPATIBLE_TYPES); resultType = actualType; return; case TypeTags.UNION: List<BType> matchingMembers = findMembersWithMatchingInitFunc(cIExpr, (BUnionType) actualType); BType matchedType = getMatchingType(matchingMembers, cIExpr, actualType); cIExpr.initInvocation.type = symTable.nilType; if (matchedType.tag == TypeTags.OBJECT) { if (((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc != null) { cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc.symbol; checkInvocationParam(cIExpr.initInvocation); cIExpr.initInvocation.type = ((BInvokableSymbol) cIExpr.initInvocation.symbol).retType; actualType = matchedType; break; } else { if (!isValidInitInvocation(cIExpr, (BObjectType) matchedType)) { return; } } } types.checkType(cIExpr, matchedType, expType); cIExpr.type = matchedType; resultType = matchedType; return; default: dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType); resultType = symTable.semanticError; return; } if (cIExpr.initInvocation.type == null) { cIExpr.initInvocation.type = symTable.nilType; } BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.type); resultType = types.checkType(cIExpr, actualTypeInitType, expType); } private BUnionType createNextReturnType(DiagnosticPos pos, BStreamType streamType) { BRecordType recordType = new BRecordType(null); recordType.restFieldType = symTable.noType; recordType.sealed = true; Name fieldName = Names.VALUE; BField field = new BField(fieldName, pos, new BVarSymbol(Flags.PUBLIC, fieldName, env.enclPkg.packageID, streamType.constraint, env.scope.owner, pos, VIRTUAL)); field.type = streamType.constraint; recordType.fields.put(field.name.value, field); recordType.tsymbol = Symbols.createRecordSymbol(0, Names.EMPTY, env.enclPkg.packageID, recordType, env.scope.owner, pos, VIRTUAL); recordType.tsymbol.scope = new Scope(env.scope.owner); recordType.tsymbol.scope.define(fieldName, field.symbol); LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>(); retTypeMembers.add(recordType); if (streamType.error != symTable.neverType && streamType.error != null) { retTypeMembers.add(streamType.error); } retTypeMembers.add(symTable.nilType); BUnionType unionType = BUnionType.create(null, retTypeMembers); unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY, env.enclPkg.symbol.pkgID, unionType, env.scope.owner, pos, VIRTUAL); return unionType; } private boolean isValidInitInvocation(BLangTypeInit cIExpr, BObjectType objType) { if (!cIExpr.initInvocation.argExprs.isEmpty() && ((BObjectTypeSymbol) objType.tsymbol).initializerFunc == null) { dlog.error(cIExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, cIExpr.initInvocation.exprSymbol); cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType)); resultType = symTable.semanticError; return false; } return true; } private BType getObjectConstructorReturnType(BType objType, BType initRetType) { if (initRetType.tag == TypeTags.UNION) { LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>(); retTypeMembers.add(objType); retTypeMembers.addAll(((BUnionType) initRetType).getMemberTypes()); retTypeMembers.remove(symTable.nilType); BUnionType unionType = BUnionType.create(null, retTypeMembers); unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY, env.enclPkg.symbol.pkgID, unionType, env.scope.owner, symTable.builtinPos, VIRTUAL); return unionType; } else if (initRetType.tag == TypeTags.NIL) { return objType; } return symTable.semanticError; } private List<BType> findMembersWithMatchingInitFunc(BLangTypeInit cIExpr, BUnionType lhsUnionType) { int objectCount = 0; for (BType memberType : lhsUnionType.getMemberTypes()) { int tag = memberType.tag; if (tag == TypeTags.OBJECT) { objectCount++; continue; } if (tag != TypeTags.INTERSECTION) { continue; } if (((BIntersectionType) memberType).effectiveType.tag == TypeTags.OBJECT) { objectCount++; } } boolean containsSingleObject = objectCount == 1; List<BType> matchingLhsMemberTypes = new ArrayList<>(); for (BType memberType : lhsUnionType.getMemberTypes()) { if (memberType.tag != TypeTags.OBJECT) { continue; } if ((memberType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) { dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, lhsUnionType.tsymbol); } if (containsSingleObject) { return Collections.singletonList(memberType); } BAttachedFunction initializerFunc = ((BObjectTypeSymbol) memberType.tsymbol).initializerFunc; if (isArgsMatchesFunction(cIExpr.argsExpr, initializerFunc)) { matchingLhsMemberTypes.add(memberType); } } return matchingLhsMemberTypes; } private BType getMatchingType(List<BType> matchingLhsMembers, BLangTypeInit cIExpr, BType lhsUnion) { if (matchingLhsMembers.isEmpty()) { dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, lhsUnion); resultType = symTable.semanticError; return symTable.semanticError; } else if (matchingLhsMembers.size() == 1) { return matchingLhsMembers.get(0).tsymbol.type; } else { dlog.error(cIExpr.pos, DiagnosticCode.AMBIGUOUS_TYPES, lhsUnion); resultType = symTable.semanticError; return symTable.semanticError; } } private boolean isArgsMatchesFunction(List<BLangExpression> invocationArguments, BAttachedFunction function) { invocationArguments.forEach(expr -> checkExpr(expr, env, symTable.noType)); if (function == null) { return invocationArguments.isEmpty(); } if (function.symbol.params.isEmpty() && invocationArguments.isEmpty()) { return true; } List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); List<BLangExpression> positionalArgs = new ArrayList<>(); for (BLangExpression argument : invocationArguments) { if (argument.getKind() == NodeKind.NAMED_ARGS_EXPR) { namedArgs.add((BLangNamedArgsExpression) argument); } else { positionalArgs.add(argument); } } List<BVarSymbol> requiredParams = function.symbol.params.stream() .filter(param -> !param.defaultableParam) .collect(Collectors.toList()); if (requiredParams.size() > invocationArguments.size()) { return false; } List<BVarSymbol> defaultableParams = function.symbol.params.stream() .filter(param -> param.defaultableParam) .collect(Collectors.toList()); int givenRequiredParamCount = 0; for (int i = 0; i < positionalArgs.size(); i++) { if (function.symbol.params.size() > i) { givenRequiredParamCount++; BVarSymbol functionParam = function.symbol.params.get(i); if (!types.isAssignable(positionalArgs.get(i).type, functionParam.type)) { return false; } requiredParams.remove(functionParam); defaultableParams.remove(functionParam); continue; } if (function.symbol.restParam != null) { BType restParamType = ((BArrayType) function.symbol.restParam.type).eType; if (!types.isAssignable(positionalArgs.get(i).type, restParamType)) { return false; } continue; } return false; } for (BLangNamedArgsExpression namedArg : namedArgs) { boolean foundNamedArg = false; List<BVarSymbol> params = function.symbol.params; for (int i = givenRequiredParamCount; i < params.size(); i++) { BVarSymbol functionParam = params.get(i); if (!namedArg.name.value.equals(functionParam.name.value)) { continue; } foundNamedArg = true; BType namedArgExprType = checkExpr(namedArg.expr, env); if (!types.isAssignable(functionParam.type, namedArgExprType)) { return false; } requiredParams.remove(functionParam); defaultableParams.remove(functionParam); } if (!foundNamedArg) { return false; } } return requiredParams.size() <= 0; } public void visit(BLangWaitForAllExpr waitForAllExpr) { switch (expType.tag) { case TypeTags.RECORD: checkTypesForRecords(waitForAllExpr); break; case TypeTags.MAP: checkTypesForMap(waitForAllExpr.keyValuePairs, ((BMapType) expType).constraint); LinkedHashSet<BType> memberTypesForMap = collectWaitExprTypes(waitForAllExpr.keyValuePairs); if (memberTypesForMap.size() == 1) { resultType = new BMapType(TypeTags.MAP, memberTypesForMap.iterator().next(), symTable.mapType.tsymbol); break; } BUnionType constraintTypeForMap = BUnionType.create(null, memberTypesForMap); resultType = new BMapType(TypeTags.MAP, constraintTypeForMap, symTable.mapType.tsymbol); break; case TypeTags.NONE: case TypeTags.ANY: checkTypesForMap(waitForAllExpr.keyValuePairs, expType); LinkedHashSet<BType> memberTypes = collectWaitExprTypes(waitForAllExpr.keyValuePairs); if (memberTypes.size() == 1) { resultType = new BMapType(TypeTags.MAP, memberTypes.iterator().next(), symTable.mapType.tsymbol); break; } BUnionType constraintType = BUnionType.create(null, memberTypes); resultType = new BMapType(TypeTags.MAP, constraintType, symTable.mapType.tsymbol); break; default: dlog.error(waitForAllExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, getWaitForAllExprReturnType(waitForAllExpr.keyValuePairs, waitForAllExpr.pos)); resultType = symTable.semanticError; break; } waitForAllExpr.type = resultType; if (resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(waitForAllExpr, waitForAllExpr.type, expType); } } private BRecordType getWaitForAllExprReturnType(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals, DiagnosticPos pos) { BRecordType retType = new BRecordType(null); for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) { BLangIdentifier fieldName; if (keyVal.valueExpr == null || keyVal.valueExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { fieldName = keyVal.key; } else { fieldName = ((BLangSimpleVarRef) keyVal.valueExpr).variableName; } BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(fieldName)); BType fieldType = symbol.type.tag == TypeTags.FUTURE ? ((BFutureType) symbol.type).constraint : symbol.type; BField field = new BField(names.fromIdNode(keyVal.key), null, new BVarSymbol(0, names.fromIdNode(keyVal.key), env.enclPkg.packageID, fieldType, null, keyVal.pos, VIRTUAL)); retType.fields.put(field.name.value, field); } retType.restFieldType = symTable.noType; retType.sealed = true; retType.tsymbol = Symbols.createRecordSymbol(0, Names.EMPTY, env.enclPkg.packageID, retType, null, pos, VIRTUAL); return retType; } private LinkedHashSet<BType> collectWaitExprTypes(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals) { LinkedHashSet<BType> memberTypes = new LinkedHashSet<>(); for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) { BType bType = keyVal.keyExpr != null ? keyVal.keyExpr.type : keyVal.valueExpr.type; if (bType.tag == TypeTags.FUTURE) { memberTypes.add(((BFutureType) bType).constraint); } else { memberTypes.add(bType); } } return memberTypes; } private void checkTypesForMap(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValuePairs, BType expType) { keyValuePairs.forEach(keyVal -> checkWaitKeyValExpr(keyVal, expType)); } private void checkTypesForRecords(BLangWaitForAllExpr waitExpr) { List<BLangWaitForAllExpr.BLangWaitKeyValue> rhsFields = waitExpr.getKeyValuePairs(); Map<String, BField> lhsFields = ((BRecordType) expType).fields; if (((BRecordType) expType).sealed && rhsFields.size() > lhsFields.size()) { dlog.error(waitExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, getWaitForAllExprReturnType(waitExpr.keyValuePairs, waitExpr.pos)); resultType = symTable.semanticError; return; } for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : rhsFields) { String key = keyVal.key.value; if (!lhsFields.containsKey(key)) { if (((BRecordType) expType).sealed) { dlog.error(waitExpr.pos, DiagnosticCode.INVALID_FIELD_NAME_RECORD_LITERAL, key, expType); resultType = symTable.semanticError; } else { BType restFieldType = ((BRecordType) expType).restFieldType; checkWaitKeyValExpr(keyVal, restFieldType); } } else { checkWaitKeyValExpr(keyVal, lhsFields.get(key).type); } } checkMissingReqFieldsForWait(((BRecordType) expType), rhsFields, waitExpr.pos); if (symTable.semanticError != resultType) { resultType = expType; } } private void checkMissingReqFieldsForWait(BRecordType type, List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValPairs, DiagnosticPos pos) { type.fields.values().forEach(field -> { boolean hasField = keyValPairs.stream().anyMatch(keyVal -> field.name.value.equals(keyVal.key.value)); if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) { dlog.error(pos, DiagnosticCode.MISSING_REQUIRED_RECORD_FIELD, field.name); } }); } private void checkWaitKeyValExpr(BLangWaitForAllExpr.BLangWaitKeyValue keyVal, BType type) { BLangExpression expr; if (keyVal.keyExpr != null) { BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode (((BLangSimpleVarRef) keyVal.keyExpr).variableName)); keyVal.keyExpr.type = symbol.type; expr = keyVal.keyExpr; } else { expr = keyVal.valueExpr; } BFutureType futureType = new BFutureType(TypeTags.FUTURE, type, null); checkExpr(expr, env, futureType); } public void visit(BLangTernaryExpr ternaryExpr) { BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType); SymbolEnv thenEnv = typeNarrower.evaluateTruth(ternaryExpr.expr, ternaryExpr.thenExpr, env); BType thenType = checkExpr(ternaryExpr.thenExpr, thenEnv, expType); SymbolEnv elseEnv = typeNarrower.evaluateFalsity(ternaryExpr.expr, ternaryExpr.elseExpr, env); BType elseType = checkExpr(ternaryExpr.elseExpr, elseEnv, expType); if (condExprType == symTable.semanticError || thenType == symTable.semanticError || elseType == symTable.semanticError) { resultType = symTable.semanticError; } else if (expType == symTable.noType) { if (types.isAssignable(elseType, thenType)) { resultType = thenType; } else if (types.isAssignable(thenType, elseType)) { resultType = elseType; } else { dlog.error(ternaryExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, thenType, elseType); resultType = symTable.semanticError; } } else { resultType = expType; } } public void visit(BLangWaitExpr waitExpr) { expType = new BFutureType(TypeTags.FUTURE, expType, null); checkExpr(waitExpr.getExpression(), env, expType); if (resultType.tag == TypeTags.UNION) { LinkedHashSet<BType> memberTypes = collectMemberTypes((BUnionType) resultType, new LinkedHashSet<>()); if (memberTypes.size() == 1) { resultType = memberTypes.toArray(new BType[0])[0]; } else { resultType = BUnionType.create(null, memberTypes); } } else if (resultType != symTable.semanticError) { resultType = ((BFutureType) resultType).constraint; } waitExpr.type = resultType; if (resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(waitExpr, waitExpr.type, ((BFutureType) expType).constraint); } } private LinkedHashSet<BType> collectMemberTypes(BUnionType unionType, LinkedHashSet<BType> memberTypes) { for (BType memberType : unionType.getMemberTypes()) { if (memberType.tag == TypeTags.FUTURE) { memberTypes.add(((BFutureType) memberType).constraint); } else { memberTypes.add(memberType); } } return memberTypes; } @Override public void visit(BLangTrapExpr trapExpr) { boolean firstVisit = trapExpr.expr.type == null; BType actualType; BType exprType = checkExpr(trapExpr.expr, env, expType); boolean definedWithVar = expType == symTable.noType; if (trapExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) { if (firstVisit) { isTypeChecked = false; resultType = expType; return; } else { expType = trapExpr.type; exprType = trapExpr.expr.type; } } if (expType == symTable.semanticError || exprType == symTable.semanticError) { actualType = symTable.semanticError; } else { LinkedHashSet<BType> resultTypes = new LinkedHashSet<>(); if (exprType.tag == TypeTags.UNION) { resultTypes.addAll(((BUnionType) exprType).getMemberTypes()); } else { resultTypes.add(exprType); } resultTypes.add(symTable.errorType); actualType = BUnionType.create(null, resultTypes); } resultType = types.checkType(trapExpr, actualType, expType); if (definedWithVar && resultType != null && resultType != symTable.semanticError) { types.setImplicitCastExpr(trapExpr.expr, trapExpr.expr.type, resultType); } } public void visit(BLangBinaryExpr binaryExpr) { if (expType.tag == TypeTags.FUTURE && binaryExpr.opKind == OperatorKind.BITWISE_OR) { BType lhsResultType = checkExpr(binaryExpr.lhsExpr, env, expType); BType rhsResultType = checkExpr(binaryExpr.rhsExpr, env, expType); if (lhsResultType == symTable.semanticError || rhsResultType == symTable.semanticError) { resultType = symTable.semanticError; return; } resultType = BUnionType.create(null, lhsResultType, rhsResultType); return; } checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(binaryExpr); SymbolEnv rhsExprEnv; BType lhsType = checkExpr(binaryExpr.lhsExpr, env); if (binaryExpr.opKind == OperatorKind.AND) { rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env, true); } else if (binaryExpr.opKind == OperatorKind.OR) { rhsExprEnv = typeNarrower.evaluateFalsity(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env); } else { rhsExprEnv = env; } BType rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv); BType actualType = symTable.semanticError; switch (binaryExpr.opKind) { case ADD: BType leftConstituent = getXMLConstituents(lhsType); BType rightConstituent = getXMLConstituents(rhsType); if (leftConstituent != null && rightConstituent != null) { actualType = new BXMLType(BUnionType.create(null, leftConstituent, rightConstituent), null); break; } default: if (lhsType != symTable.semanticError && rhsType != symTable.semanticError) { BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType); if (opSymbol == symTable.notFoundSymbol) { opSymbol = symResolver.getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType, binaryExpr); } if (opSymbol == symTable.notFoundSymbol) { dlog.error(binaryExpr.pos, DiagnosticCode.BINARY_OP_INCOMPATIBLE_TYPES, binaryExpr.opKind, lhsType, rhsType); } else { if ((binaryExpr.opKind == OperatorKind.EQUAL || binaryExpr.opKind == OperatorKind.NOT_EQUAL) && (couldHoldTableValues(lhsType, new ArrayList<>()) && couldHoldTableValues(rhsType, new ArrayList<>()))) { dlog.error(binaryExpr.pos, DiagnosticCode.EQUALITY_NOT_YET_SUPPORTED, TABLE_TNAME); } binaryExpr.opSymbol = (BOperatorSymbol) opSymbol; actualType = opSymbol.type.getReturnType(); } } } resultType = types.checkType(binaryExpr, actualType, expType); } private SymbolEnv getEnvBeforeInputNode(SymbolEnv env, BLangNode node) { while (env != null && env.node != node) { env = env.enclEnv; } return env != null && env.enclEnv != null ? env.enclEnv.createClone() : new SymbolEnv(node, null); } private SymbolEnv getEnvAfterJoinNode(SymbolEnv env, BLangNode node) { SymbolEnv clone = env.createClone(); while (clone != null && clone.node != node) { clone = clone.enclEnv; } if (clone != null) { clone.enclEnv = getEnvBeforeInputNode(clone.enclEnv, getLastInputNodeFromEnv(clone.enclEnv)); } else { clone = new SymbolEnv(node, null); } return clone; } private BLangNode getLastInputNodeFromEnv(SymbolEnv env) { while (env != null && (env.node.getKind() != NodeKind.FROM && env.node.getKind() != NodeKind.JOIN)) { env = env.enclEnv; } return env != null ? env.node : null; } public void visit(BLangTransactionalExpr transactionalExpr) { resultType = types.checkType(transactionalExpr, symTable.booleanType, expType); } public void visit(BLangCommitExpr commitExpr) { BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(commitExpr, actualType, expType); } private BType getXMLConstituents(BType type) { BType constituent = null; if (type.tag == TypeTags.XML) { constituent = ((BXMLType) type).constraint; } else if (TypeTags.isXMLNonSequenceType(type.tag)) { constituent = type; } return constituent; } private void checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(BLangBinaryExpr binaryExpr) { if (expType.tag != TypeTags.DECIMAL) { return; } switch (binaryExpr.opKind) { case ADD: case SUB: case MUL: case DIV: checkExpr(binaryExpr.lhsExpr, env, expType); checkExpr(binaryExpr.rhsExpr, env, expType); break; default: break; } } public void visit(BLangElvisExpr elvisExpr) { BType lhsType = checkExpr(elvisExpr.lhsExpr, env); BType actualType = symTable.semanticError; if (lhsType != symTable.semanticError) { if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) { BUnionType unionType = (BUnionType) lhsType; LinkedHashSet<BType> memberTypes = unionType.getMemberTypes().stream() .filter(type -> type.tag != TypeTags.NIL) .collect(Collectors.toCollection(LinkedHashSet::new)); if (memberTypes.size() == 1) { actualType = memberTypes.toArray(new BType[0])[0]; } else { actualType = BUnionType.create(null, memberTypes); } } else { dlog.error(elvisExpr.pos, DiagnosticCode.OPERATOR_NOT_SUPPORTED, OperatorKind.ELVIS, lhsType); } } BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType); BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType, DiagnosticCode.INCOMPATIBLE_TYPES); if (rhsReturnType == symTable.semanticError || lhsReturnType == symTable.semanticError) { resultType = symTable.semanticError; } else if (expType == symTable.noType) { if (types.isSameType(rhsReturnType, lhsReturnType)) { resultType = lhsReturnType; } else { dlog.error(elvisExpr.rhsExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, lhsReturnType, rhsReturnType); resultType = symTable.semanticError; } } else { resultType = expType; } } @Override public void visit(BLangGroupExpr groupExpr) { resultType = checkExpr(groupExpr.expression, env, expType); } public void visit(BLangTypedescExpr accessExpr) { accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env); int resolveTypeTag = accessExpr.resolvedType.tag; final BType actualType; if (resolveTypeTag != TypeTags.TYPEDESC && resolveTypeTag != TypeTags.NONE) { actualType = new BTypedescType(accessExpr.resolvedType, null); } else { actualType = accessExpr.resolvedType; } resultType = types.checkType(accessExpr, actualType, expType); } public void visit(BLangUnaryExpr unaryExpr) { BType exprType; BType actualType = symTable.semanticError; if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) { exprType = checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { actualType = exprType; } } else if (OperatorKind.TYPEOF.equals(unaryExpr.operator)) { exprType = checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { actualType = new BTypedescType(exprType, null); } } else { exprType = OperatorKind.ADD.equals(unaryExpr.operator) ? checkExpr(unaryExpr.expr, env, expType) : checkExpr(unaryExpr.expr, env); if (exprType != symTable.semanticError) { BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.pos, unaryExpr.operator, exprType); if (symbol == symTable.notFoundSymbol) { dlog.error(unaryExpr.pos, DiagnosticCode.UNARY_OP_INCOMPATIBLE_TYPES, unaryExpr.operator, exprType); } else { unaryExpr.opSymbol = (BOperatorSymbol) symbol; actualType = symbol.type.getReturnType(); } } } resultType = types.checkType(unaryExpr, actualType, expType); } public void visit(BLangTypeConversionExpr conversionExpr) { BType actualType = symTable.semanticError; for (BLangAnnotationAttachment annAttachment : conversionExpr.annAttachments) { annAttachment.attachPoints.add(AttachPoint.Point.TYPE); semanticAnalyzer.analyzeNode(annAttachment, this.env); } BLangExpression expr = conversionExpr.expr; if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) { resultType = checkExpr(expr, env, this.expType); return; } BType targetType = symResolver.resolveTypeNode(conversionExpr.typeNode, env); boolean requiresTypeInference = requireTypeInference(expr, false); if (requiresTypeInference) { targetType = getEffectiveReadOnlyType(conversionExpr.typeNode.pos, targetType); } conversionExpr.targetType = targetType; BType expType = requiresTypeInference ? targetType : symTable.noType; BType sourceType = checkExpr(expr, env, expType); if (types.isTypeCastable(expr, sourceType, targetType)) { actualType = targetType; } else { dlog.error(conversionExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES_CAST, sourceType, targetType); } resultType = types.checkType(conversionExpr, actualType, this.expType); } @Override public void visit(BLangLambdaFunction bLangLambdaFunction) { bLangLambdaFunction.type = bLangLambdaFunction.function.symbol.type; bLangLambdaFunction.capturedClosureEnv = env.createClone(); env.enclPkg.lambdaFunctions.add(bLangLambdaFunction); resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.type, expType); } @Override public void visit(BLangArrowFunction bLangArrowFunction) { BType expectedType = expType; if (expectedType.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) expectedType; BType invokableType = unionType.getMemberTypes().stream().filter(type -> type.tag == TypeTags.INVOKABLE) .collect(Collectors.collectingAndThen(Collectors.toList(), list -> { if (list.size() != 1) { return null; } return list.get(0); } )); if (invokableType != null) { expectedType = invokableType; } } if (expectedType.tag != TypeTags.INVOKABLE) { dlog.error(bLangArrowFunction.pos, DiagnosticCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS); resultType = symTable.semanticError; return; } BInvokableType expectedInvocation = (BInvokableType) expectedType; populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes); bLangArrowFunction.body.expr.type = populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType); if (expectedInvocation.retType.tag == TypeTags.NONE) { expectedInvocation.retType = bLangArrowFunction.body.expr.type; } resultType = bLangArrowFunction.funcType = expectedInvocation; } public void visit(BLangXMLQName bLangXMLQName) { String prefix = bLangXMLQName.prefix.value; resultType = types.checkType(bLangXMLQName, symTable.stringType, expType); if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty() && bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) { ((BLangXMLAttribute) env.node).isNamespaceDeclr = true; return; } if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { ((BLangXMLAttribute) env.node).isNamespaceDeclr = true; return; } if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) { dlog.error(bLangXMLQName.pos, DiagnosticCode.INVALID_NAMESPACE_PREFIX, prefix); bLangXMLQName.type = symTable.semanticError; return; } if (bLangXMLQName.prefix.value.isEmpty()) { return; } BSymbol xmlnsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromIdNode(bLangXMLQName.prefix)); if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) { return; } if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) { logUndefinedSymbolError(bLangXMLQName.pos, prefix); bLangXMLQName.type = symTable.semanticError; return; } if (xmlnsSymbol.getKind() == SymbolKind.PACKAGE) { xmlnsSymbol = findXMLNamespaceFromPackageConst(bLangXMLQName.localname.value, bLangXMLQName.prefix.value, (BPackageSymbol) xmlnsSymbol, bLangXMLQName.pos); } if (xmlnsSymbol == null || xmlnsSymbol.getKind() != SymbolKind.XMLNS) { resultType = symTable.semanticError; return; } bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol; bLangXMLQName.namespaceURI = bLangXMLQName.nsSymbol.namespaceURI; } private BSymbol findXMLNamespaceFromPackageConst(String localname, String prefix, BPackageSymbol pkgSymbol, DiagnosticPos pos) { BSymbol constSymbol = symResolver.lookupMemberSymbol(pos, pkgSymbol.scope, env, names.fromString(localname), SymTag.CONSTANT); if (constSymbol == symTable.notFoundSymbol) { if (!missingNodesHelper.isMissingNode(prefix) && !missingNodesHelper.isMissingNode(localname)) { dlog.error(pos, DiagnosticCode.UNDEFINED_SYMBOL, prefix + ":" + localname); } return null; } BConstantSymbol constantSymbol = (BConstantSymbol) constSymbol; if (constantSymbol.literalType.tag != TypeTags.STRING) { dlog.error(pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.stringType, constantSymbol.literalType); return null; } String constVal = (String) constantSymbol.value.value; int s = constVal.indexOf('{'); int e = constVal.lastIndexOf('}'); if (e > s + 1) { pkgSymbol.isUsed = true; String nsURI = constVal.substring(s + 1, e); String local = constVal.substring(e); return new BXMLNSSymbol(names.fromString(local), nsURI, constantSymbol.pkgID, constantSymbol.owner, pos, SOURCE); } dlog.error(pos, DiagnosticCode.INVALID_ATTRIBUTE_REFERENCE, prefix + ":" + localname); return null; } public void visit(BLangXMLAttribute bLangXMLAttribute) { SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env); BLangXMLQName name = (BLangXMLQName) bLangXMLAttribute.name; checkExpr(name, xmlAttributeEnv, symTable.stringType); if (name.prefix.value.isEmpty()) { name.namespaceURI = null; } checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType); symbolEnter.defineNode(bLangXMLAttribute, env); } public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) { SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env); Set<String> usedPrefixes = new HashSet<>(); BLangIdentifier elemNamePrefix = ((BLangXMLQName) bLangXMLElementLiteral.startTagName).prefix; if (elemNamePrefix != null && !elemNamePrefix.value.isEmpty()) { usedPrefixes.add(elemNamePrefix.value); } for (BLangXMLAttribute attribute : bLangXMLElementLiteral.attributes) { if (attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute)) { BLangXMLQuotedString value = attribute.value; if (value.getKind() == NodeKind.XML_QUOTED_STRING && value.textFragments.size() > 1) { dlog.error(value.pos, DiagnosticCode.INVALID_XML_NS_INTERPOLATION); } checkExpr(attribute, xmlElementEnv, symTable.noType); } BLangIdentifier prefix = ((BLangXMLQName) attribute.name).prefix; if (prefix != null && !prefix.value.isEmpty()) { usedPrefixes.add(prefix.value); } } bLangXMLElementLiteral.attributes.forEach(attribute -> { if (!(attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute))) { checkExpr(attribute, xmlElementEnv, symTable.noType); } }); Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv); Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX); if (namespaces.containsKey(defaultNs)) { bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs); } for (Map.Entry<Name, BXMLNSSymbol> nsEntry : namespaces.entrySet()) { if (usedPrefixes.contains(nsEntry.getKey().value)) { bLangXMLElementLiteral.namespacesInScope.put(nsEntry.getKey(), nsEntry.getValue()); } } validateTags(bLangXMLElementLiteral, xmlElementEnv); bLangXMLElementLiteral.modifiedChildren = concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlElementType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLElementLiteral.pos, symTable.xmlElementType, this.expType); if (Symbols.isFlagOn(resultType.flags, Flags.READONLY)) { markChildrenAsImmutable(bLangXMLElementLiteral); } } private boolean isXmlNamespaceAttribute(BLangXMLAttribute attribute) { BLangXMLQName attrName = (BLangXMLQName) attribute.name; return (attrName.prefix.value.isEmpty() && attrName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) || attrName.prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE); } public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) { checkStringTemplateExprs(bLangXMLTextLiteral.textFragments, false); resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlTextType, expType); } public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) { checkStringTemplateExprs(bLangXMLCommentLiteral.textFragments, false); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlCommentType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLCommentLiteral.pos, symTable.xmlCommentType, this.expType); } public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) { checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType); checkStringTemplateExprs(bLangXMLProcInsLiteral.dataFragments, false); if (expType == symTable.noType) { resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlPIType, expType); return; } resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLProcInsLiteral.pos, symTable.xmlPIType, this.expType); } public void visit(BLangXMLQuotedString bLangXMLQuotedString) { checkStringTemplateExprs(bLangXMLQuotedString.textFragments, false); resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType); } public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) { dlog.error(xmlAttributeAccessExpr.pos, DiagnosticCode.DEPRECATED_XML_ATTRIBUTE_ACCESS); resultType = symTable.semanticError; } public void visit(BLangStringTemplateLiteral stringTemplateLiteral) { checkStringTemplateExprs(stringTemplateLiteral.exprs, false); resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType); } @Override public void visit(BLangRawTemplateLiteral rawTemplateLiteral) { BType type = determineRawTemplateLiteralType(rawTemplateLiteral, expType); if (type == symTable.semanticError) { resultType = type; return; } BObjectType literalType = (BObjectType) type; BType stringsType = literalType.fields.get("strings").type; if (evaluateRawTemplateExprs(rawTemplateLiteral.strings, stringsType, INVALID_NUM_STRINGS, rawTemplateLiteral.pos)) { type = symTable.semanticError; } BType insertionsType = literalType.fields.get("insertions").type; if (evaluateRawTemplateExprs(rawTemplateLiteral.insertions, insertionsType, INVALID_NUM_INSERTIONS, rawTemplateLiteral.pos)) { type = symTable.semanticError; } resultType = type; } private BType determineRawTemplateLiteralType(BLangRawTemplateLiteral rawTemplateLiteral, BType expType) { if (expType == symTable.noType || containsAnyType(expType)) { return symTable.rawTemplateType; } BType compatibleType = getCompatibleRawTemplateType(expType, rawTemplateLiteral.pos); BType type = types.checkType(rawTemplateLiteral, compatibleType, symTable.rawTemplateType, DiagnosticCode.INVALID_RAW_TEMPLATE_TYPE); if (type == symTable.semanticError) { return type; } if (Symbols.isFlagOn(type.tsymbol.flags, Flags.CLASS)) { dlog.error(rawTemplateLiteral.pos, DiagnosticCode.INVALID_RAW_TEMPLATE_ASSIGNMENT, type); return symTable.semanticError; } BObjectType litObjType = (BObjectType) type; BObjectTypeSymbol objTSymbol = (BObjectTypeSymbol) litObjType.tsymbol; if (litObjType.fields.size() > 2) { dlog.error(rawTemplateLiteral.pos, DiagnosticCode.INVALID_NUM_FIELDS, litObjType); type = symTable.semanticError; } if (!objTSymbol.attachedFuncs.isEmpty()) { dlog.error(rawTemplateLiteral.pos, DiagnosticCode.METHODS_NOT_ALLOWED, litObjType); type = symTable.semanticError; } return type; } private boolean evaluateRawTemplateExprs(List<? extends BLangExpression> exprs, BType fieldType, DiagnosticCode code, DiagnosticPos pos) { BType listType = getResolvedIntersectionType(fieldType); boolean errored = false; if (listType.tag == TypeTags.ARRAY) { BArrayType arrayType = (BArrayType) listType; if (arrayType.state == BArrayState.CLOSED_SEALED && (exprs.size() != arrayType.size)) { dlog.error(pos, code, arrayType.size, exprs.size()); return false; } for (BLangExpression expr : exprs) { errored = (checkExpr(expr, env, arrayType.eType) == symTable.semanticError) || errored; } } else if (listType.tag == TypeTags.TUPLE) { BTupleType tupleType = (BTupleType) listType; final int size = exprs.size(); final int requiredItems = tupleType.tupleTypes.size(); if (size < requiredItems || (size > requiredItems && tupleType.restType == null)) { dlog.error(pos, code, requiredItems, size); return false; } int i; List<BType> memberTypes = tupleType.tupleTypes; for (i = 0; i < requiredItems; i++) { errored = (checkExpr(exprs.get(i), env, memberTypes.get(i)) == symTable.semanticError) || errored; } if (size > requiredItems) { for (; i < size; i++) { errored = (checkExpr(exprs.get(i), env, tupleType.restType) == symTable.semanticError) || errored; } } } else { throw new IllegalStateException("Expected a list type, but found: " + listType); } return errored; } private BType getResolvedIntersectionType(BType type) { return type.tag != TypeTags.INTERSECTION ? type : ((BIntersectionType) type).effectiveType; } private boolean containsAnyType(BType type) { if (type == symTable.anyType) { return true; } if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().contains(symTable.anyType); } return false; } private BType getCompatibleRawTemplateType(BType expType, DiagnosticPos pos) { if (expType.tag != TypeTags.UNION) { return expType; } BUnionType unionType = (BUnionType) expType; List<BType> compatibleTypes = new ArrayList<>(); for (BType type : unionType.getMemberTypes()) { if (types.isAssignable(type, symTable.rawTemplateType)) { compatibleTypes.add(type); } } if (compatibleTypes.size() == 0) { return expType; } if (compatibleTypes.size() > 1) { dlog.error(pos, DiagnosticCode.MULTIPLE_COMPATIBLE_RAW_TEMPLATE_TYPES, symTable.rawTemplateType, expType); return symTable.semanticError; } return compatibleTypes.get(0); } @Override public void visit(BLangIntRangeExpression intRangeExpression) { checkExpr(intRangeExpression.startExpr, env, symTable.intType); checkExpr(intRangeExpression.endExpr, env, symTable.intType); resultType = new BArrayType(symTable.intType); } @Override public void visit(BLangRestArgsExpression bLangRestArgExpression) { resultType = checkExpr(bLangRestArgExpression.expr, env, expType); } @Override public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) { resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType); bLangNamedArgsExpression.type = bLangNamedArgsExpression.expr.type; } @Override public void visit(BLangMatchExpression bLangMatchExpression) { SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env); checkExpr(bLangMatchExpression.expr, matchExprEnv); bLangMatchExpression.patternClauses.forEach(pattern -> { if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) { symbolEnter.defineNode(pattern.variable, matchExprEnv); } checkExpr(pattern.expr, matchExprEnv, expType); pattern.variable.type = symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv); }); LinkedHashSet<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression); BType actualType; if (matchExprTypes.contains(symTable.semanticError)) { actualType = symTable.semanticError; } else if (matchExprTypes.size() == 1) { actualType = matchExprTypes.toArray(new BType[0])[0]; } else { actualType = BUnionType.create(null, matchExprTypes); } resultType = types.checkType(bLangMatchExpression, actualType, expType); } @Override public void visit(BLangCheckedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr); } @Override public void visit(BLangCheckPanickedExpr checkedExpr) { visitCheckAndCheckPanicExpr(checkedExpr); } @Override public void visit(BLangQueryExpr queryExpr) { if (prevEnvs.empty()) { prevEnvs.push(env.createClone()); } else { prevEnvs.push(prevEnvs.peek()); } queryEnvs.push(prevEnvs.peek().createClone()); selectClauses.push(queryExpr.getSelectClause()); List<BLangNode> clauses = queryExpr.getQueryClauses(); BLangExpression collectionNode = (BLangExpression) ((BLangFromClause) clauses.get(0)).getCollection(); clauses.forEach(clause -> clause.accept(this)); BType actualType = findAssignableType(queryEnvs.peek(), selectClauses.peek().expression, collectionNode.type, expType, queryExpr); resultType = (actualType == symTable.semanticError) ? actualType : types.checkType(queryExpr.pos, actualType, expType, DiagnosticCode.INCOMPATIBLE_TYPES); selectClauses.pop(); queryEnvs.pop(); prevEnvs.pop(); } @Override public void visit(BLangQueryAction queryAction) { if (prevEnvs.empty()) { prevEnvs.push(env.createClone()); } else { prevEnvs.push(prevEnvs.peek()); } queryEnvs.push(prevEnvs.peek().createClone()); selectClauses.push(null); BLangDoClause doClause = queryAction.getDoClause(); List<BLangNode> clauses = queryAction.getQueryClauses(); clauses.forEach(clause -> clause.accept(this)); semanticAnalyzer.analyzeStmt(doClause.body, SymbolEnv.createBlockEnv(doClause.body, queryEnvs.peek())); BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType); resultType = types.checkType(doClause.pos, actualType, expType, DiagnosticCode.INCOMPATIBLE_TYPES); selectClauses.pop(); queryEnvs.pop(); prevEnvs.pop(); } @Override public void visit(BLangFromClause fromClause) { queryEnvs.push(SymbolEnv.createTypeNarrowedEnv(fromClause, queryEnvs.pop())); checkExpr(fromClause.collection, queryEnvs.peek()); types.setInputClauseTypedBindingPatternType(fromClause); handleInputClauseVariables(fromClause, queryEnvs.peek()); } @Override public void visit(BLangJoinClause joinClause) { queryEnvs.push(SymbolEnv.createTypeNarrowedEnv(joinClause, queryEnvs.pop())); checkExpr(joinClause.collection, queryEnvs.peek()); types.setInputClauseTypedBindingPatternType(joinClause); handleInputClauseVariables(joinClause, queryEnvs.peek()); if (joinClause.onClause != null) { ((BLangOnClause) joinClause.onClause).accept(this); } } @Override public void visit(BLangLetClause letClause) { queryEnvs.push(SymbolEnv.createTypeNarrowedEnv(letClause, queryEnvs.pop())); for (BLangLetVariable letVariable : letClause.letVarDeclarations) { semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, queryEnvs.peek()); } } @Override public void visit(BLangWhereClause whereClause) { handleFilterClauses(whereClause.expression); } @Override public void visit(BLangSelectClause selectClause) { } @Override public void visit(BLangDoClause doClause) { } @Override public void visit(BLangOnConflictClause onConflictClause) { BType exprType = checkExpr(onConflictClause.expression, queryEnvs.peek(), symTable.errorType); if (!types.isAssignable(exprType, symTable.errorType)) { dlog.error(onConflictClause.expression.pos, DiagnosticCode.ERROR_TYPE_EXPECTED, symTable.errorType, exprType); } } @Override public void visit(BLangLimitClause limitClause) { BType exprType = checkExpr(limitClause.expression, queryEnvs.peek()); if (!types.isAssignable(exprType, symTable.intType)) { dlog.error(limitClause.expression.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.intType, exprType); } } @Override public void visit(BLangOnClause onClause) { SymbolEnv lhsExprEnv, rhsExprEnv; BType lhsType, rhsType; BLangNode joinNode = getLastInputNodeFromEnv(queryEnvs.peek()); lhsExprEnv = getEnvBeforeInputNode(queryEnvs.peek(), joinNode); lhsType = checkExpr(onClause.lhsExpr, lhsExprEnv); rhsExprEnv = getEnvAfterJoinNode(queryEnvs.peek(), joinNode); rhsType = checkExpr(onClause.rhsExpr, rhsExprEnv); if (!types.isAssignable(lhsType, rhsType)) { dlog.error(onClause.rhsExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, lhsType, rhsType); } } @Override public void visit(BLangOrderByClause orderByClause) { for (OrderKeyNode orderKeyNode : orderByClause.getOrderKeyList()) { BType exprType = checkExpr((BLangExpression) orderKeyNode.getOrderKey(), queryEnvs.peek()); if (!types.isOrderedType(exprType)) { dlog.error(((BLangOrderKey) orderKeyNode).expression.pos, DiagnosticCode.ORDER_BY_NOT_SUPPORTED); } } } private BType findAssignableType(SymbolEnv env, BLangExpression selectExp, BType collectionType, BType targetType, BLangQueryExpr queryExpr) { List<BType> assignableSelectTypes = new ArrayList<>(); BType actualType = symTable.semanticError; Map<Boolean, List<BType>> resultTypeMap = types.getAllTypes(targetType).stream() .collect(Collectors.groupingBy(memberType -> (types.isAssignable(memberType, symTable.errorType) || (types.isAssignable(memberType, symTable.nilType))))); final boolean containsXmlOrStr = types.getAllTypes(targetType).stream() .anyMatch(t -> t.tag == TypeTags.STRING || t.tag == TypeTags.XML); for (BType type : resultTypeMap.get(false)) { BType selectType; switch (type.tag) { case TypeTags.ARRAY: selectType = checkExpr(selectExp, env, ((BArrayType) type).eType); break; case TypeTags.TABLE: selectType = checkExpr(selectExp, env, types.getSafeType(((BTableType) type).constraint, true, true)); break; case TypeTags.STREAM: selectType = checkExpr(selectExp, env, types.getSafeType(((BStreamType) type).constraint, true, true)); break; default: selectType = checkExpr(selectExp, env, type); } if (selectType != symTable.semanticError) { assignableSelectTypes.add(selectType); } } if (assignableSelectTypes.size() == 1) { actualType = assignableSelectTypes.get(0); if (!queryExpr.isStream && !queryExpr.isTable && !containsXmlOrStr) { actualType = new BArrayType(actualType); } } else if (assignableSelectTypes.size() > 1) { dlog.error(selectExp.pos, DiagnosticCode.AMBIGUOUS_TYPES, assignableSelectTypes); return actualType; } else { return actualType; } BType nextMethodReturnType = null; BType errorType = null; if (collectionType.tag != TypeTags.SEMANTIC_ERROR) { switch (collectionType.tag) { case TypeTags.STREAM: errorType = ((BStreamType) collectionType).error; break; case TypeTags.OBJECT: nextMethodReturnType = types.getVarTypeFromIterableObject((BObjectType) collectionType); break; default: BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType, names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)); nextMethodReturnType = types.getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType); } } if (nextMethodReturnType != null) { Map<Boolean, List<BType>> collectionTypeMap = types.getAllTypes(nextMethodReturnType).stream() .collect(Collectors.groupingBy(memberType -> types.isAssignable(memberType, symTable.errorType))); List<BType> errorTypes = collectionTypeMap.get(true); if (errorTypes != null && !errorTypes.isEmpty()) { if (errorTypes.size() == 1) { errorType = errorTypes.get(0); } else { errorType = BUnionType.create(null, errorTypes.toArray(new BType[errorTypes.size()])); } } } if (queryExpr.isStream) { return new BStreamType(TypeTags.STREAM, actualType, errorType, symTable.streamType.tsymbol); } else if (queryExpr.isTable) { final BTableType tableType = new BTableType(TypeTags.TABLE, actualType, symTable.tableType.tsymbol); if (!queryExpr.fieldNameIdentifierList.isEmpty()) { tableType.fieldNameList = queryExpr.fieldNameIdentifierList.stream() .map(identifier -> ((BLangIdentifier) identifier).value).collect(Collectors.toList()); return BUnionType.create(null, tableType, symTable.errorType); } return tableType; } else if (errorType != null) { return BUnionType.create(null, actualType, errorType); } return actualType; } @Override public void visit(BLangDo doNode) { if (doNode.onFailClause != null) { doNode.onFailClause.accept(this); } } public void visit(BLangOnFailClause onFailClause) { onFailClause.body.stmts.forEach(stmt -> stmt.accept(this)); } private void handleFilterClauses (BLangExpression filterExpression) { checkExpr(filterExpression, queryEnvs.peek(), symTable.booleanType); BType actualType = filterExpression.type; if (TypeTags.TUPLE == actualType.tag) { dlog.error(filterExpression.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.booleanType, actualType); } queryEnvs.push(typeNarrower.evaluateTruth(filterExpression, selectClauses.peek(), queryEnvs.pop())); } private void handleInputClauseVariables(BLangInputClause bLangInputClause, SymbolEnv blockEnv) { if (bLangInputClause.variableDefinitionNode == null) { return; } BLangVariable variableNode = (BLangVariable) bLangInputClause.variableDefinitionNode.getVariable(); if (bLangInputClause.isDeclaredWithVar) { semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv); return; } BType typeNodeType = symResolver.resolveTypeNode(variableNode.typeNode, blockEnv); if (types.isAssignable(bLangInputClause.varType, typeNodeType)) { semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv); return; } if (typeNodeType != symTable.semanticError) { dlog.error(variableNode.typeNode.pos, DiagnosticCode.INCOMPATIBLE_TYPES, bLangInputClause.varType, typeNodeType); } semanticAnalyzer.handleDeclaredVarInForeach(variableNode, typeNodeType, blockEnv); } private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr) { String operatorType = checkedExpr.getKind() == NodeKind.CHECK_EXPR ? "check" : "checkpanic"; boolean firstVisit = checkedExpr.expr.type == null; BType exprExpType; if (expType == symTable.noType) { exprExpType = symTable.noType; } else { exprExpType = BUnionType.create(null, expType, symTable.errorType); } BType exprType = checkExpr(checkedExpr.expr, env, exprExpType); if (checkedExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) { if (firstVisit) { isTypeChecked = false; resultType = expType; return; } else { expType = checkedExpr.type; exprType = checkedExpr.expr.type; } } if (exprType.tag != TypeTags.UNION) { if (types.isAssignable(exprType, symTable.errorType)) { dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_ALL_ERROR_TYPES_IN_RHS, operatorType); } else if (exprType != symTable.semanticError) { dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType); } checkedExpr.type = symTable.semanticError; return; } BUnionType unionType = (BUnionType) exprType; Map<Boolean, List<BType>> resultTypeMap = unionType.getMemberTypes().stream() .collect(Collectors.groupingBy(memberType -> types.isAssignable(memberType, symTable.errorType))); checkedExpr.equivalentErrorTypeList = resultTypeMap.get(true); if (checkedExpr.equivalentErrorTypeList == null || checkedExpr.equivalentErrorTypeList.size() == 0) { dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType); checkedExpr.type = symTable.semanticError; return; } List<BType> nonErrorTypeList = resultTypeMap.get(false); if (nonErrorTypeList == null || nonErrorTypeList.size() == 0) { dlog.error(checkedExpr.expr.pos, DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_ALL_ERROR_TYPES_IN_RHS, operatorType); checkedExpr.type = symTable.semanticError; return; } BType actualType; if (nonErrorTypeList.size() == 1) { actualType = nonErrorTypeList.get(0); } else { actualType = BUnionType.create(null, new LinkedHashSet<>(nonErrorTypeList)); } resultType = types.checkType(checkedExpr, actualType, expType); } @Override public void visit(BLangServiceConstructorExpr serviceConstructorExpr) { resultType = serviceConstructorExpr.serviceNode.symbol.type; } @Override public void visit(BLangTypeTestExpr typeTestExpr) { typeTestExpr.typeNode.type = symResolver.resolveTypeNode(typeTestExpr.typeNode, env); checkExpr(typeTestExpr.expr, env); resultType = types.checkType(typeTestExpr, symTable.booleanType, expType); } public void visit(BLangAnnotAccessExpr annotAccessExpr) { checkExpr(annotAccessExpr.expr, this.env, symTable.typeDesc); BType actualType = symTable.semanticError; BSymbol symbol = this.symResolver.resolveAnnotation(annotAccessExpr.pos, env, names.fromString(annotAccessExpr.pkgAlias.getValue()), names.fromString(annotAccessExpr.annotationName.getValue())); if (symbol == this.symTable.notFoundSymbol) { this.dlog.error(annotAccessExpr.pos, DiagnosticCode.UNDEFINED_ANNOTATION, annotAccessExpr.annotationName.getValue()); } else { annotAccessExpr.annotationSymbol = (BAnnotationSymbol) symbol; BType annotType = ((BAnnotationSymbol) symbol).attachedType == null ? symTable.trueType : ((BAnnotationSymbol) symbol).attachedType.type; actualType = BUnionType.create(null, annotType, symTable.nilType); } this.resultType = this.types.checkType(annotAccessExpr, actualType, this.expType); } private boolean isValidVariableReference(BLangExpression varRef) { switch (varRef.getKind()) { case SIMPLE_VARIABLE_REF: case RECORD_VARIABLE_REF: case TUPLE_VARIABLE_REF: case ERROR_VARIABLE_REF: case FIELD_BASED_ACCESS_EXPR: case INDEX_BASED_ACCESS_EXPR: case XML_ATTRIBUTE_ACCESS_EXPR: return true; default: dlog.error(varRef.pos, DiagnosticCode.INVALID_RECORD_BINDING_PATTERN, varRef.type); return false; } } private BType getEffectiveReadOnlyType(DiagnosticPos pos, BType origTargetType) { if (origTargetType == symTable.readonlyType) { if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) { return origTargetType; } return ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) expType, env, symTable, anonymousModelHelper, names, new HashSet<>()); } if (origTargetType.tag != TypeTags.UNION) { return origTargetType; } boolean hasReadOnlyType = false; LinkedHashSet<BType> nonReadOnlyTypes = new LinkedHashSet<>(); for (BType memberType : ((BUnionType) origTargetType).getMemberTypes()) { if (memberType == symTable.readonlyType) { hasReadOnlyType = true; continue; } nonReadOnlyTypes.add(memberType); } if (!hasReadOnlyType) { return origTargetType; } if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) { return origTargetType; } BUnionType nonReadOnlyUnion = BUnionType.create(null, nonReadOnlyTypes); nonReadOnlyUnion.add(ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) expType, env, symTable, anonymousModelHelper, names, new HashSet<>())); return nonReadOnlyUnion; } private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) { SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env); bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv)); return checkExpr(bLangArrowFunction.body.expr, arrowFunctionEnv, expectedRetType); } private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) { if (paramTypes.size() != bLangArrowFunction.params.size()) { dlog.error(bLangArrowFunction.pos, DiagnosticCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH, paramTypes.size(), bLangArrowFunction.params.size()); resultType = symTable.semanticError; bLangArrowFunction.params.forEach(param -> param.type = symTable.semanticError); return; } for (int i = 0; i < bLangArrowFunction.params.size(); i++) { BLangSimpleVariable paramIdentifier = bLangArrowFunction.params.get(i); BType bType = paramTypes.get(i); BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode(); valueTypeNode.setTypeKind(bType.getKind()); paramIdentifier.setTypeNode(valueTypeNode); paramIdentifier.type = bType; } } private void checkSelfReferences(DiagnosticPos pos, SymbolEnv env, BVarSymbol varSymbol) { if (env.enclVarSym == varSymbol) { dlog.error(pos, DiagnosticCode.SELF_REFERENCE_VAR, varSymbol.name); } } public List<BType> getListWithErrorTypes(int count) { List<BType> list = new ArrayList<>(count); for (int i = 0; i < count; i++) { list.add(symTable.semanticError); } return list; } private void checkFunctionInvocationExpr(BLangInvocation iExpr) { Name funcName = names.fromIdNode(iExpr.name); Name pkgAlias = names.fromIdNode(iExpr.pkgAlias); BSymbol funcSymbol = symTable.notFoundSymbol; BSymbol pkgSymbol = symResolver.resolvePrefixSymbol(env, pkgAlias, getCurrentCompUnit(iExpr)); if (pkgSymbol == symTable.notFoundSymbol) { dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_MODULE, pkgAlias); } else { if (funcSymbol == symTable.notFoundSymbol) { BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName); if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) { funcSymbol = symbol; } if (symTable.rootPkgSymbol.pkgID.equals(symbol.pkgID) && (symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) { funcSymbol = symbol; } } if (funcSymbol == symTable.notFoundSymbol || ((funcSymbol.tag & SymTag.TYPE) == SymTag.TYPE)) { BSymbol ctor = symResolver.lookupConstructorSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName); funcSymbol = ctor != symTable.notFoundSymbol ? ctor : funcSymbol; } } if ((funcSymbol.tag & SymTag.ERROR) == SymTag.ERROR || ((funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR && funcSymbol.type.tag == TypeTags.ERROR)) { iExpr.symbol = funcSymbol; iExpr.type = funcSymbol.type; checkErrorConstructorInvocation(iExpr); return; } else if (funcSymbol == symTable.notFoundSymbol || isNotFunction(funcSymbol)) { if (!missingNodesHelper.isMissingNode(funcName)) { dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, funcName); } iExpr.argExprs.forEach(arg -> checkExpr(arg, env)); resultType = symTable.semanticError; return; } if (isFunctionPointer(funcSymbol)) { iExpr.functionPointerInvocation = true; markAndRegisterClosureVariable(funcSymbol, iExpr.pos); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_RESOURCE_FUNCTION_INVOCATION); } boolean langLibPackageID = PackageID.isLangLibPackageID(pkgSymbol.pkgID); if (langLibPackageID) { this.env = SymbolEnv.createInvocationEnv(iExpr, this.env); } iExpr.symbol = funcSymbol; checkInvocationParamAndReturnType(iExpr); if (langLibPackageID && !iExpr.argExprs.isEmpty()) { checkInvalidImmutableValueUpdate(iExpr, iExpr.argExprs.get(0).type, funcSymbol); } } private void markAndRegisterClosureVariable(BSymbol symbol, DiagnosticPos pos) { BLangInvokableNode encInvokable = env.enclInvokable; if (symbol.owner instanceof BPackageSymbol && env.node.getKind() != NodeKind.ARROW_EXPR) { return; } if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA) && !isFunctionArgument(symbol, encInvokable.requiredParams)) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) { resolvedSymbol.closure = true; ((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } if (env.node.getKind() == NodeKind.ARROW_EXPR && !isFunctionArgument(symbol, ((BLangArrowFunction) env.node).params)) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol) { resolvedSymbol.closure = true; ((BLangArrowFunction) env.node).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } if (env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, (BLangRecordTypeNode) env.enclType); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) { resolvedSymbol.closure = true; ((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos)); } } BLangNode node = env.node; SymbolEnv cEnv = env; while (node != null && node.getKind() != NodeKind.FUNCTION) { if (node.getKind() == NodeKind.TRANSACTION || node.getKind() == NodeKind.RETRY || node.getKind() == NodeKind.ON_FAIL) { SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable); BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE); if (resolvedSymbol != symTable.notFoundSymbol) { resolvedSymbol.closure = true; } break; } else { SymbolEnv enclEnv = cEnv.enclEnv; if (enclEnv == null) { break; } cEnv = enclEnv; node = cEnv.node; } } } private boolean isNotFunction(BSymbol funcSymbol) { if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION || (funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) { return false; } if (isFunctionPointer(funcSymbol)) { return false; } return true; } private boolean isFunctionPointer(BSymbol funcSymbol) { if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) { return false; } return (funcSymbol.tag & SymTag.FUNCTION) == SymTag.VARIABLE && funcSymbol.kind == SymbolKind.FUNCTION && (funcSymbol.flags & Flags.NATIVE) != Flags.NATIVE; } private void checkErrorConstructorInvocation(BLangInvocation iExpr) { BErrorType errorType = (BErrorType) iExpr.symbol.type; if (this.expType == symTable.noType) { this.expType = errorType; } if (!types.isAssignable(errorType, this.expType)) { dlog.error(iExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, this.expType, errorType); resultType = symTable.semanticError; } if (iExpr.argExprs.isEmpty() && !iExpr.requiredArgs.isEmpty()) { resultType = iExpr.type; return; } if (iExpr.argExprs.isEmpty()) { dlog.error(iExpr.pos, DiagnosticCode.MISSING_REQUIRED_ARG_ERROR_MESSAGE); return; } BLangExpression errorMessageArg = iExpr.argExprs.get(0); if (errorMessageArg.getKind() == NodeKind.NAMED_ARGS_EXPR) { dlog.error(iExpr.pos, DiagnosticCode.MISSING_REQUIRED_ARG_ERROR_MESSAGE); return; } checkExpr(errorMessageArg, this.env, symTable.stringType); iExpr.requiredArgs.add(0, errorMessageArg); iExpr.argExprs.remove(0); if (!iExpr.argExprs.isEmpty()) { BLangExpression secondArg = iExpr.argExprs.get(0); if (secondArg.getKind() != NodeKind.NAMED_ARGS_EXPR) { checkExpr(secondArg, this.env, symTable.errorType); iExpr.requiredArgs.add(1, secondArg); iExpr.argExprs.remove(0); } } if (errorType.detailType.tag == TypeTags.MAP) { BMapType detailMapType = (BMapType) errorType.detailType; List<BLangNamedArgsExpression> namedArgs = getProvidedErrorDetails(iExpr); if (namedArgs == null) { resultType = symTable.semanticError; return; } for (BLangNamedArgsExpression namedArg : namedArgs) { if (!types.isAssignable(namedArg.expr.type, detailMapType.constraint)) { dlog.error(namedArg.pos, DiagnosticCode.INVALID_ERROR_DETAIL_ARG_TYPE, namedArg.name, detailMapType.constraint, namedArg.expr.type); resultType = symTable.semanticError; } } if (resultType == symTable.semanticError) { return; } } else if (errorType.detailType.tag == TypeTags.RECORD) { BRecordType targetErrorDetailRec = (BRecordType) errorType.detailType; BRecordType recordType = createErrorDetailRecordType(iExpr, targetErrorDetailRec); if (resultType == symTable.semanticError || targetErrorDetailRec == null) { return; } if (!types.isAssignable(recordType, targetErrorDetailRec)) { reportErrorDetailMissmatchError(iExpr, targetErrorDetailRec, recordType); resultType = symTable.semanticError; return; } } else { resultType = symTable.semanticError; } setErrorDetailArgsToNamedArgsList(iExpr); resultType = errorType; if (iExpr.symbol == symTable.errorType.tsymbol) { iExpr.symbol = ((BErrorTypeSymbol) errorType.tsymbol).ctorSymbol; } } private void reportErrorDetailMissmatchError(BLangInvocation iExpr, BRecordType targetErrorDetailRec, BRecordType recordType) { boolean detailedErrorReported = false; Set<String> checkedFieldNames = new HashSet<>(); for (Map.Entry<String, BField> fieldEntry : targetErrorDetailRec.fields.entrySet()) { checkedFieldNames.add(fieldEntry.getKey()); BField argField = recordType.fields.get(fieldEntry.getKey()); if (argField == null && !Symbols.isOptional(fieldEntry.getValue().symbol)) { dlog.error(iExpr.pos, DiagnosticCode.MISSING_ERROR_DETAIL_ARG, fieldEntry.getKey()); detailedErrorReported = true; } else if (!types.isAssignable(argField.type, fieldEntry.getValue().type)) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_ERROR_DETAIL_ARG_TYPE, fieldEntry.getKey(), fieldEntry.getValue().type, argField.type); } } if (recordType.fields.size() > checkedFieldNames.size()) { for (Map.Entry<String, BField> fieldEntry : recordType.fields.entrySet()) { if (!checkedFieldNames.contains(fieldEntry.getKey())) { BField field = fieldEntry.getValue(); if (targetErrorDetailRec.sealed) { dlog.error(iExpr.pos, DiagnosticCode.UNKNOWN_DETAIL_ARG_TO_SEALED_ERROR_DETAIL_REC, fieldEntry.getKey(), targetErrorDetailRec); detailedErrorReported = true; } else if (!types.isAssignable(field.type, targetErrorDetailRec.restFieldType)) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_ERROR_DETAIL_REST_ARG_TYPE, fieldEntry.getKey(), targetErrorDetailRec); detailedErrorReported = true; } } } } if (!detailedErrorReported) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_ERROR_CONSTRUCTOR_DETAIL, iExpr); } } private void setErrorDetailArgsToNamedArgsList(BLangInvocation iExpr) { List<BLangExpression> namedArgPositions = new ArrayList<>(iExpr.argExprs.size()); for (int i = 0; i < iExpr.argExprs.size(); i++) { BLangExpression argExpr = iExpr.argExprs.get(i); if (argExpr.getKind() == NodeKind.NAMED_ARGS_EXPR) { iExpr.requiredArgs.add(argExpr); namedArgPositions.add(argExpr); } else { dlog.error(argExpr.pos, DiagnosticCode.ERROR_DETAIL_ARG_IS_NOT_NAMED_ARG); resultType = symTable.semanticError; } } for (BLangExpression expr : namedArgPositions) { iExpr.argExprs.remove(expr); } } /** * Create a error detail record using all metadata from {@code targetErrorDetailsType} and put actual error details * from {@code iExpr} expression. * * @param iExpr error constructor invocation * @param targetErrorDetailsType target error details type to extract metadata such as pkgId from * @return error detail record */ private BRecordType createErrorDetailRecordType(BLangInvocation iExpr, BRecordType targetErrorDetailsType) { List<BLangNamedArgsExpression> namedArgs = getProvidedErrorDetails(iExpr); if (namedArgs == null) { return null; } BRecordTypeSymbol recordTypeSymbol = new BRecordTypeSymbol( SymTag.RECORD, targetErrorDetailsType.tsymbol.flags, Names.EMPTY, targetErrorDetailsType.tsymbol.pkgID, symTable.recordType, null, targetErrorDetailsType.tsymbol.pos, VIRTUAL); BRecordType recordType = new BRecordType(recordTypeSymbol); recordType.sealed = targetErrorDetailsType.sealed; recordType.restFieldType = targetErrorDetailsType.restFieldType; Set<Name> availableErrorDetailFields = new HashSet<>(); for (BLangNamedArgsExpression arg : namedArgs) { Name fieldName = names.fromIdNode(arg.name); BField field = new BField(fieldName, arg.pos, new BVarSymbol(0, fieldName, null, arg.type, null, arg.pos, VIRTUAL)); recordType.fields.put(field.name.value, field); availableErrorDetailFields.add(fieldName); } for (BField field : targetErrorDetailsType.fields.values()) { boolean notRequired = (field.symbol.flags & Flags.REQUIRED) != Flags.REQUIRED; if (notRequired && !availableErrorDetailFields.contains(field.name)) { BField defaultableField = new BField(field.name, iExpr.pos, new BVarSymbol(field.symbol.flags, field.name, null, field.type, null, iExpr.pos, VIRTUAL)); recordType.fields.put(defaultableField.name.value, defaultableField); } } return recordType; } private List<BLangNamedArgsExpression> getProvidedErrorDetails(BLangInvocation iExpr) { List<BLangNamedArgsExpression> namedArgs = new ArrayList<>(); for (int i = 0; i < iExpr.argExprs.size(); i++) { BLangExpression argExpr = iExpr.argExprs.get(i); checkExpr(argExpr, env); if (argExpr.getKind() != NodeKind.NAMED_ARGS_EXPR) { dlog.error(argExpr.pos, DiagnosticCode.ERROR_DETAIL_ARG_IS_NOT_NAMED_ARG); resultType = symTable.semanticError; return null; } namedArgs.add((BLangNamedArgsExpression) argExpr); } return namedArgs; } private void checkObjectFunctionInvocationExpr(BLangInvocation iExpr, BObjectType objectType) { if (objectType.getKind() == TypeKind.SERVICE && !(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) { dlog.error(iExpr.pos, DiagnosticCode.SERVICE_FUNCTION_INVALID_INVOCATION); return; } Name funcName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, iExpr.name.value)); BSymbol funcSymbol = symResolver.resolveObjectMethod(iExpr.pos, env, funcName, (BObjectTypeSymbol) objectType.tsymbol); if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) { if (!checkLangLibMethodInvocationExpr(iExpr, objectType)) { dlog.error(iExpr.name.pos, DiagnosticCode.UNDEFINED_METHOD_IN_OBJECT, iExpr.name.value, objectType); resultType = symTable.semanticError; return; } } else { iExpr.symbol = funcSymbol; } if (iExpr.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value) && !(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF && (Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_INIT_INVOCATION); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value); } if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_RESOURCE_FUNCTION_INVOCATION); } checkInvocationParamAndReturnType(iExpr); } private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BObjectType expType) { BLangVariableReference varRef = (BLangVariableReference) aInv.expr; if (((varRef.symbol.tag & SymTag.ENDPOINT) != SymTag.ENDPOINT) && !aInv.async) { dlog.error(aInv.pos, DiagnosticCode.INVALID_ACTION_INVOCATION, varRef.type); this.resultType = symTable.semanticError; aInv.symbol = symTable.notFoundSymbol; return; } BVarSymbol epSymbol = (BVarSymbol) varRef.symbol; Name remoteMethodQName = names .fromString(Symbols.getAttachedFuncSymbolName(expType.tsymbol.name.value, aInv.name.value)); Name actionName = names.fromIdNode(aInv.name); BSymbol remoteFuncSymbol = symResolver .lookupMemberSymbol(aInv.pos, ((BObjectTypeSymbol) epSymbol.type.tsymbol).methodScope, env, remoteMethodQName, SymTag.FUNCTION); if (remoteFuncSymbol == symTable.notFoundSymbol && !checkLangLibMethodInvocationExpr(aInv, expType)) { dlog.error(aInv.name.pos, DiagnosticCode.UNDEFINED_METHOD_IN_OBJECT, aInv.name.value, expType); resultType = symTable.semanticError; return; } if (!Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && aInv.remoteMethodCall) { dlog.error(aInv.pos, DiagnosticCode.INVALID_METHOD_INVOCATION_SYNTAX, actionName); this.resultType = symTable.semanticError; return; } aInv.symbol = remoteFuncSymbol; checkInvocationParamAndReturnType(aInv); } private boolean checkLangLibMethodInvocationExpr(BLangInvocation iExpr, BType bType) { return getLangLibMethod(iExpr, bType) != symTable.notFoundSymbol; } private BSymbol getLangLibMethod(BLangInvocation iExpr, BType bType) { Name funcName = names.fromString(iExpr.name.value); BSymbol funcSymbol = symResolver.lookupLangLibMethod(bType, funcName); if (funcSymbol == symTable.notFoundSymbol) { return symTable.notFoundSymbol; } iExpr.symbol = funcSymbol; iExpr.langLibInvocation = true; SymbolEnv enclEnv = this.env; this.env = SymbolEnv.createInvocationEnv(iExpr, this.env); if (iExpr.argExprs.isEmpty() || !iExpr.argExprs.get(0).equals(iExpr.expr)) { iExpr.argExprs.add(0, iExpr.expr); } checkInvocationParamAndReturnType(iExpr); this.env = enclEnv; return funcSymbol; } private void checkInvocationParamAndReturnType(BLangInvocation iExpr) { BType actualType = checkInvocationParam(iExpr); resultType = types.checkType(iExpr, actualType, this.expType); } private BType checkInvocationParam(BLangInvocation iExpr) { if (iExpr.symbol.type.tag != TypeTags.INVOKABLE) { dlog.error(iExpr.pos, DiagnosticCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type); return symTable.noType; } List<BType> paramTypes = ((BInvokableType) iExpr.symbol.type).getParameterTypes(); int parameterCount = paramTypes.size(); iExpr.requiredArgs = new ArrayList<>(); int i = 0; BLangExpression vararg = null; boolean foundNamedArg = false; for (BLangExpression expr : iExpr.argExprs) { switch (expr.getKind()) { case NAMED_ARGS_EXPR: foundNamedArg = true; if (i < parameterCount) { iExpr.requiredArgs.add(expr); } else { dlog.error(expr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); } i++; break; case REST_ARGS_EXPR: if (foundNamedArg) { dlog.error(expr.pos, DiagnosticCode.REST_ARG_DEFINED_AFTER_NAMED_ARG); continue; } vararg = expr; break; default: if (foundNamedArg) { dlog.error(expr.pos, DiagnosticCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG); } if (i < parameterCount) { iExpr.requiredArgs.add(expr); } else { iExpr.restArgs.add(expr); } i++; break; } } return checkInvocationArgs(iExpr, paramTypes, vararg); } private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, BLangExpression vararg) { BInvokableSymbol invokableSymbol = (BInvokableSymbol) iExpr.symbol; BInvokableType bInvokableType = (BInvokableType) invokableSymbol.type; BInvokableTypeSymbol invokableTypeSymbol = (BInvokableTypeSymbol) bInvokableType.tsymbol; List<BVarSymbol> nonRestParams = new ArrayList<>(invokableTypeSymbol.params); List<BLangExpression> nonRestArgs = iExpr.requiredArgs; List<BVarSymbol> valueProvidedParams = new ArrayList<>(); List<BVarSymbol> requiredParams = new ArrayList<>(); for (BVarSymbol nonRestParam : nonRestParams) { if (nonRestParam.defaultableParam) { continue; } requiredParams.add(nonRestParam); } int i = 0; for (; i < nonRestArgs.size(); i++) { BLangExpression arg = nonRestArgs.get(i); BType expectedType = paramTypes.get(i); if (i == 0 && arg.typeChecked && iExpr.expr != null && iExpr.expr == arg) { types.checkType(arg.pos, arg.type, expectedType, DiagnosticCode.INCOMPATIBLE_TYPES); types.setImplicitCastExpr(arg, arg.type, expectedType); } if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) { if (i < nonRestParams.size()) { BVarSymbol param = nonRestParams.get(i); checkTypeParamExpr(arg, this.env, param.type, iExpr.langLibInvocation); valueProvidedParams.add(param); requiredParams.remove(param); continue; } break; } if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) { BLangIdentifier argName = ((NamedArgNode) arg).getName(); BVarSymbol varSym = null; for (BVarSymbol nonRestParam : nonRestParams) { if (nonRestParam.getName().value.equals(argName.value)) { varSym = nonRestParam; } } if (varSym == null) { dlog.error(arg.pos, DiagnosticCode.UNDEFINED_PARAMETER, argName); break; } requiredParams.remove(varSym); if (valueProvidedParams.contains(varSym)) { dlog.error(arg.pos, DiagnosticCode.DUPLICATE_NAMED_ARGS, varSym.name.value); continue; } checkTypeParamExpr(arg, this.env, varSym.type, iExpr.langLibInvocation); valueProvidedParams.add(varSym); } } BVarSymbol restParam = invokableTypeSymbol.restParam; boolean errored = false; if (!requiredParams.isEmpty() && vararg == null) { for (BVarSymbol requiredParam : requiredParams) { dlog.error(iExpr.pos, DiagnosticCode.MISSING_REQUIRED_PARAMETER, requiredParam.name, iExpr.name.value); } errored = true; } if (restParam == null && (!iExpr.restArgs.isEmpty() || (vararg != null && valueProvidedParams.size() == nonRestParams.size()))) { dlog.error(iExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); errored = true; } if (errored) { return symTable.semanticError; } BType restType = restParam == null ? null : restParam.type; if (nonRestArgs.size() < nonRestParams.size() && vararg != null) { List<BType> tupleMemberTypes = new ArrayList<>(); BType tupleRestType = null; for (int j = nonRestArgs.size(); j < nonRestParams.size(); j++) { tupleMemberTypes.add(paramTypes.get(j)); } if (restType != null) { if (restType.tag == TypeTags.ARRAY) { tupleRestType = ((BArrayType) restType).eType; } else if (restType.tag == TypeTags.TUPLE) { BTupleType restTupleType = (BTupleType) restType; tupleMemberTypes.addAll(restTupleType.tupleTypes); if (restTupleType.restType != null) { tupleRestType = restTupleType.restType; } } } BTupleType tupleType = new BTupleType(tupleMemberTypes); tupleType.restType = tupleRestType; restType = tupleType; } if (restType == null && (vararg != null || !iExpr.restArgs.isEmpty())) { dlog.error(iExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value); return symTable.semanticError; } if (vararg != null && !iExpr.restArgs.isEmpty()) { BType elementType = ((BArrayType) restType).eType; for (BLangExpression restArg : iExpr.restArgs) { checkTypeParamExpr(restArg, this.env, elementType, true); } checkTypeParamExpr(vararg, this.env, restType, iExpr.langLibInvocation); iExpr.restArgs.add(vararg); } else if (vararg != null) { checkTypeParamExpr(vararg, this.env, restType, iExpr.langLibInvocation); iExpr.restArgs.add(vararg); } else if (!iExpr.restArgs.isEmpty()) { if (restType.tag == TypeTags.ARRAY) { BType elementType = ((BArrayType) restType).eType; for (BLangExpression restArg : iExpr.restArgs) { checkTypeParamExpr(restArg, this.env, elementType, true); } } else { BTupleType tupleType = (BTupleType) restType; List<BType> tupleMemberTypes = tupleType.tupleTypes; BType tupleRestType = tupleType.restType; int tupleMemCount = tupleMemberTypes.size(); for (int j = 0; j < iExpr.restArgs.size(); j++) { BLangExpression restArg = iExpr.restArgs.get(j); BType memType = j < tupleMemCount ? tupleMemberTypes.get(j) : tupleRestType; checkTypeParamExpr(restArg, this.env, memType, true); } } } BType retType = typeParamAnalyzer.getReturnTypeParams(env, bInvokableType.getReturnType()); if (Symbols.isFlagOn(invokableSymbol.flags, Flags.NATIVE) && Symbols.isFlagOn(retType.flags, Flags.PARAMETERIZED)) { retType = typeBuilder.build(retType, iExpr); } boolean langLibPackageID = PackageID.isLangLibPackageID(iExpr.symbol.pkgID); String sortFuncName = "sort"; if (langLibPackageID && sortFuncName.equals(iExpr.name.value)) { checkArrayLibSortFuncArgs(iExpr); } if (iExpr instanceof ActionNode && ((BLangInvocation.BLangActionInvocation) iExpr).async) { return this.generateFutureType(invokableSymbol, retType); } else { return retType; } } private BFutureType generateFutureType(BInvokableSymbol invocableSymbol, BType retType) { boolean isWorkerStart = invocableSymbol.name.value.startsWith(WORKER_LAMBDA_VAR_PREFIX); return new BFutureType(TypeTags.FUTURE, retType, null, isWorkerStart); } private void checkTypeParamExpr(BLangExpression arg, SymbolEnv env, BType expectedType, boolean inferTypeForNumericLiteral) { checkTypeParamExpr(arg.pos, arg, env, expectedType, inferTypeForNumericLiteral); } private void checkTypeParamExpr(DiagnosticPos pos, BLangExpression arg, SymbolEnv env, BType expectedType, boolean inferTypeForNumericLiteral) { if (typeParamAnalyzer.notRequireTypeParams(env)) { checkExpr(arg, env, expectedType); return; } if (requireTypeInference(arg, inferTypeForNumericLiteral)) { BType expType = typeParamAnalyzer.getMatchingBoundType(expectedType, env); BType inferredType = checkExpr(arg, env, expType); typeParamAnalyzer.checkForTypeParamsInArg(pos, inferredType, this.env, expectedType); return; } checkExpr(arg, env, expectedType); typeParamAnalyzer.checkForTypeParamsInArg(pos, arg.type, this.env, expectedType); } private boolean requireTypeInference(BLangExpression expr, boolean inferTypeForNumericLiteral) { switch (expr.getKind()) { case GROUP_EXPR: return requireTypeInference(((BLangGroupExpr) expr).expression, inferTypeForNumericLiteral); case ARROW_EXPR: case LIST_CONSTRUCTOR_EXPR: case RECORD_LITERAL_EXPR: return true; case NUMERIC_LITERAL: return inferTypeForNumericLiteral; default: return false; } } private BType checkMappingField(RecordLiteralNode.RecordField field, BType mappingType) { BType fieldType = symTable.semanticError; boolean keyValueField = field.isKeyValueField(); boolean spreadOpField = field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP; boolean readOnlyConstructorField = false; String fieldName = null; DiagnosticPos pos = null; BLangExpression valueExpr = null; if (keyValueField) { valueExpr = ((BLangRecordKeyValueField) field).valueExpr; } else if (!spreadOpField) { valueExpr = (BLangRecordVarNameField) field; } switch (mappingType.tag) { case TypeTags.RECORD: if (keyValueField) { BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValField.key; fieldType = checkRecordLiteralKeyExpr(key.expr, key.computedKey, (BRecordType) mappingType); readOnlyConstructorField = keyValField.readonly; pos = key.expr.pos; fieldName = getKeyValueFieldName(keyValField); } else if (spreadOpField) { BLangExpression spreadExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; checkExpr(spreadExpr, this.env); BType spreadExprType = spreadExpr.type; if (spreadExprType.tag == TypeTags.MAP) { return types.checkType(spreadExpr.pos, ((BMapType) spreadExprType).constraint, getAllFieldType((BRecordType) mappingType), DiagnosticCode.INCOMPATIBLE_TYPES); } if (spreadExprType.tag != TypeTags.RECORD) { dlog.error(spreadExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES_SPREAD_OP, spreadExprType); return symTable.semanticError; } boolean errored = false; for (BField bField : ((BRecordType) spreadExprType).fields.values()) { BType specFieldType = bField.type; BType expectedFieldType = checkRecordLiteralKeyByName(spreadExpr.pos, this.env, bField.name, (BRecordType) mappingType); if (expectedFieldType != symTable.semanticError && !types.isAssignable(specFieldType, expectedFieldType)) { dlog.error(spreadExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES_FIELD, expectedFieldType, bField.name, specFieldType); if (!errored) { errored = true; } } } return errored ? symTable.semanticError : symTable.noType; } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; fieldType = checkRecordLiteralKeyExpr(varNameField, false, (BRecordType) mappingType); readOnlyConstructorField = varNameField.readonly; pos = varNameField.pos; fieldName = getVarNameFieldName(varNameField); } break; case TypeTags.MAP: if (spreadOpField) { BLangExpression spreadExp = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; BType spreadOpType = checkExpr(spreadExp, this.env); BType spreadOpMemberType; switch (spreadOpType.tag) { case TypeTags.RECORD: List<BType> types = new ArrayList<>(); BRecordType recordType = (BRecordType) spreadOpType; for (BField recField : recordType.fields.values()) { types.add(recField.type); } if (!recordType.sealed) { types.add(recordType.restFieldType); } spreadOpMemberType = getRepresentativeBroadType(types); break; case TypeTags.MAP: spreadOpMemberType = ((BMapType) spreadOpType).constraint; break; default: dlog.error(spreadExp.pos, DiagnosticCode.INCOMPATIBLE_TYPES_SPREAD_OP, spreadOpType); return symTable.semanticError; } return types.checkType(spreadExp.pos, spreadOpMemberType, ((BMapType) mappingType).constraint, DiagnosticCode.INCOMPATIBLE_TYPES); } boolean validMapKey; if (keyValueField) { BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValField.key; validMapKey = checkValidJsonOrMapLiteralKeyExpr(key.expr, key.computedKey); readOnlyConstructorField = keyValField.readonly; pos = key.pos; fieldName = getKeyValueFieldName(keyValField); } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; validMapKey = checkValidJsonOrMapLiteralKeyExpr(varNameField, false); readOnlyConstructorField = varNameField.readonly; pos = varNameField.pos; fieldName = getVarNameFieldName(varNameField); } fieldType = validMapKey ? ((BMapType) mappingType).constraint : symTable.semanticError; break; } if (readOnlyConstructorField) { if (types.isSelectivelyImmutableType(fieldType)) { fieldType = ImmutableTypeCloner.getImmutableIntersectionType(pos, types, (SelectivelyImmutableReferenceType) fieldType, env, symTable, anonymousModelHelper, names, new HashSet<>()); } else if (!types.isInherentlyImmutableType(fieldType)) { dlog.error(pos, DiagnosticCode.INVALID_READONLY_MAPPING_FIELD, fieldName, fieldType); fieldType = symTable.semanticError; } } if (spreadOpField) { valueExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr; } BLangExpression exprToCheck = valueExpr; if (this.nonErrorLoggingCheck) { valueExpr.cloneAttempt++; exprToCheck = nodeCloner.clone(valueExpr); } else { ((BLangNode) field).type = fieldType; } return checkExpr(exprToCheck, this.env, fieldType); } private BType checkRecordLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey, BRecordType recordType) { Name fieldName; if (computedKey) { checkExpr(keyExpr, this.env, symTable.stringType); if (keyExpr.type == symTable.semanticError) { return symTable.semanticError; } LinkedHashSet<BType> fieldTypes = recordType.fields.values().stream() .map(field -> field.type) .collect(Collectors.toCollection(LinkedHashSet::new)); if (recordType.restFieldType.tag != TypeTags.NONE) { fieldTypes.add(recordType.restFieldType); } return BUnionType.create(null, fieldTypes); } else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) { BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr; fieldName = names.fromIdNode(varRef.variableName); } else if (keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).type.tag == TypeTags.STRING) { fieldName = names.fromString((String) ((BLangLiteral) keyExpr).value); } else { dlog.error(keyExpr.pos, DiagnosticCode.INVALID_RECORD_LITERAL_KEY); return symTable.semanticError; } return checkRecordLiteralKeyByName(keyExpr.pos, this.env, fieldName, recordType); } private BType checkRecordLiteralKeyByName(DiagnosticPos pos, SymbolEnv env, Name key, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(pos, env, key, recordType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { return fieldSymbol.type; } if (recordType.sealed) { dlog.error(pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, key, recordType.tsymbol.type.getKind().typeName(), recordType); return symTable.semanticError; } return recordType.restFieldType; } private BType getAllFieldType(BRecordType recordType) { LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BField field : recordType.fields.values()) { possibleTypes.add(field.type); } BType restFieldType = recordType.restFieldType; if (restFieldType != null && restFieldType != symTable.noType) { possibleTypes.add(restFieldType); } return BUnionType.create(null, possibleTypes); } private boolean checkValidJsonOrMapLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey) { if (computedKey) { checkExpr(keyExpr, this.env, symTable.stringType); if (keyExpr.type == symTable.semanticError) { return false; } return true; } else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF || (keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).type.tag == TypeTags.STRING)) { return true; } dlog.error(keyExpr.pos, DiagnosticCode.INVALID_RECORD_LITERAL_KEY); return false; } private BType addNilForNillableAccessType(BType actualType) { if (actualType.isNullable()) { return actualType; } return BUnionType.create(null, actualType, symTable.nilType); } private BType checkRecordRequiredFieldAccess(BLangVariableReference varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol || Symbols.isOptional(fieldSymbol)) { return symTable.semanticError; } varReferExpr.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkRecordOptionalFieldAccess(BLangVariableReference varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol || !Symbols.isOptional(fieldSymbol)) { return symTable.semanticError; } varReferExpr.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkRecordRestFieldAccess(BLangVariableReference varReferExpr, Name fieldName, BRecordType recordType) { BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { return symTable.semanticError; } if (recordType.sealed) { return symTable.semanticError; } return recordType.restFieldType; } private BType checkObjectFieldAccess(BLangFieldBasedAccess bLangFieldBasedAccess, Name fieldName, BObjectType objectType) { BSymbol fieldSymbol = symResolver.resolveStructField(bLangFieldBasedAccess.pos, this.env, fieldName, objectType.tsymbol); if (fieldSymbol != symTable.notFoundSymbol) { bLangFieldBasedAccess.symbol = fieldSymbol; return fieldSymbol.type; } Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, fieldName.value)); fieldSymbol = symResolver.resolveObjectField(bLangFieldBasedAccess.pos, env, objFuncName, objectType.tsymbol); if (fieldSymbol == symTable.notFoundSymbol) { dlog.error(bLangFieldBasedAccess.field.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName, objectType.tsymbol.type.getKind().typeName(), objectType.tsymbol); return symTable.semanticError; } bLangFieldBasedAccess.symbol = fieldSymbol; return fieldSymbol.type; } private BType checkTupleFieldType(BType tupleType, int indexValue) { BTupleType bTupleType = (BTupleType) tupleType; if (bTupleType.tupleTypes.size() <= indexValue && bTupleType.restType != null) { return bTupleType.restType; } else if (indexValue < 0 || bTupleType.tupleTypes.size() <= indexValue) { return symTable.semanticError; } return bTupleType.tupleTypes.get(indexValue); } private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) { BLangExpression startTagName = bLangXMLElementLiteral.startTagName; checkExpr(startTagName, xmlElementEnv, symTable.stringType); BLangExpression endTagName = bLangXMLElementLiteral.endTagName; if (endTagName == null) { return; } checkExpr(endTagName, xmlElementEnv, symTable.stringType); if (startTagName.getKind() == NodeKind.XML_QNAME && endTagName.getKind() == NodeKind.XML_QNAME && startTagName.equals(endTagName)) { return; } if (startTagName.getKind() != NodeKind.XML_QNAME && endTagName.getKind() != NodeKind.XML_QNAME) { return; } dlog.error(bLangXMLElementLiteral.pos, DiagnosticCode.XML_TAGS_MISMATCH); } private void checkStringTemplateExprs(List<? extends BLangExpression> exprs, boolean allowXml) { for (BLangExpression expr : exprs) { checkExpr(expr, env); BType type = expr.type; if (type == symTable.semanticError) { continue; } if (type.tag >= TypeTags.JSON) { if (allowXml) { if (type.tag != TypeTags.XML) { dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType, symTable.stringType, symTable.booleanType, symTable.xmlType), type); } continue; } dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType, symTable.stringType, symTable.booleanType), type); } } } /** * Concatenate the consecutive text type nodes, and get the reduced set of children. * * @param exprs Child nodes * @param xmlElementEnv * @return Reduced set of children */ private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) { List<BLangExpression> newChildren = new ArrayList<>(); List<BLangExpression> tempConcatExpressions = new ArrayList<>(); for (BLangExpression expr : exprs) { BType exprType = checkExpr(expr, xmlElementEnv); if (TypeTags.isXMLTypeTag(exprType.tag)) { if (!tempConcatExpressions.isEmpty()) { newChildren.add(getXMLTextLiteral(tempConcatExpressions)); tempConcatExpressions = new ArrayList<>(); } newChildren.add(expr); continue; } BType type = expr.type; if (type.tag >= TypeTags.JSON) { if (type != symTable.semanticError && !TypeTags.isXMLTypeTag(type.tag)) { dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType, symTable.stringType, symTable.booleanType, symTable.xmlType), type); } continue; } tempConcatExpressions.add(expr); } if (!tempConcatExpressions.isEmpty()) { newChildren.add(getXMLTextLiteral(tempConcatExpressions)); } return newChildren; } private BLangExpression getXMLTextLiteral(List<BLangExpression> exprs) { BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode(); xmlTextLiteral.textFragments = exprs; xmlTextLiteral.pos = exprs.get(0).pos; xmlTextLiteral.type = symTable.xmlType; return xmlTextLiteral; } private BType getTypeOfExprInFieldAccess(BLangExpression expr) { checkExpr(expr, this.env, symTable.noType); return expr.type; } private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) { accessExpr.originalType = actualType; BUnionType unionType = BUnionType.create(null, actualType); if (returnsNull(accessExpr)) { unionType.add(symTable.nilType); } BType parentType = accessExpr.expr.type; if (accessExpr.errorSafeNavigation && (parentType.tag == TypeTags.SEMANTIC_ERROR || (parentType.tag == TypeTags.UNION && ((BUnionType) parentType).getMemberTypes().contains(symTable.errorType)))) { unionType.add(symTable.errorType); } if (unionType.getMemberTypes().size() == 1) { return unionType.getMemberTypes().toArray(new BType[0])[0]; } return unionType; } private boolean returnsNull(BLangAccessExpression accessExpr) { BType parentType = accessExpr.expr.type; if (parentType.isNullable() && parentType.tag != TypeTags.JSON) { return true; } if (parentType.tag != TypeTags.MAP) { return false; } if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR && accessExpr.expr.type.tag == TypeTags.MAP) { BType constraintType = ((BMapType) accessExpr.expr.type).constraint; return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON; } return false; } private BType checkObjectFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.OBJECT) { return checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) memType); if (individualFieldType == symTable.semanticError) { return individualFieldType; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { return checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { return individualFieldType; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkRecordFieldAccessLhsExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType != symTable.semanticError) { return fieldType; } return checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { return symTable.semanticError; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkOptionalRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { if (varRefType.tag == TypeTags.RECORD) { BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType != symTable.semanticError) { return fieldType; } fieldType = checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType); if (fieldType == symTable.semanticError) { return fieldType; } return BUnionType.create(null, fieldType, symTable.nilType); } Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes(); BType fieldType; boolean nonMatchedRecordExists = false; LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : memberTypes) { BType individualFieldType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName); if (individualFieldType == symTable.semanticError) { nonMatchedRecordExists = true; continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.isEmpty()) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { fieldType = fieldTypeMembers.iterator().next(); } else { fieldType = BUnionType.create(null, fieldTypeMembers); } return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType; } private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { BType actualType = symTable.semanticError; if (types.isSubTypeOfBaseType(varRefType, TypeTags.OBJECT)) { actualType = checkObjectFieldAccessExpr(fieldAccessExpr, varRefType, fieldName); fieldAccessExpr.originalType = actualType; } else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD)) { actualType = checkRecordFieldAccessExpr(fieldAccessExpr, varRefType, fieldName); if (actualType != symTable.semanticError) { fieldAccessExpr.originalType = actualType; return actualType; } if (!fieldAccessExpr.lhsVar) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_NON_REQUIRED_FIELD, varRefType, fieldName); return actualType; } actualType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, varRefType, fieldName); fieldAccessExpr.originalType = actualType; if (actualType == symTable.semanticError) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName, varRefType.tsymbol.type.getKind().typeName(), varRefType); } } else if (types.isLax(varRefType)) { if (fieldAccessExpr.lhsVar) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_ASSIGNMENT, varRefType); return symTable.semanticError; } if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } BType laxFieldAccessType = getLaxFieldAccessType(varRefType); actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType); fieldAccessExpr.originalType = laxFieldAccessType; } else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) { BType laxFieldAccessType = getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType); if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType); fieldAccessExpr.errorSafeNavigation = true; fieldAccessExpr.originalType = laxFieldAccessType; } else if (TypeTags.isXMLTypeTag(varRefType.tag)) { if (fieldAccessExpr.lhsVar) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_XML_SEQUENCE); } actualType = symTable.xmlType; fieldAccessExpr.originalType = actualType; } else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS, varRefType); } return actualType; } private void resolveXMLNamespace(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess fieldAccessExpr) { BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldAccess = fieldAccessExpr; String nsPrefix = nsPrefixedFieldAccess.nsPrefix.value; BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(nsPrefix)); if (nsSymbol == symTable.notFoundSymbol) { dlog.error(nsPrefixedFieldAccess.nsPrefix.pos, DiagnosticCode.CANNOT_FIND_XML_NAMESPACE, nsPrefixedFieldAccess.nsPrefix); } else if (nsSymbol.getKind() == SymbolKind.PACKAGE) { nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) findXMLNamespaceFromPackageConst( nsPrefixedFieldAccess.field.value, nsPrefixedFieldAccess.nsPrefix.value, (BPackageSymbol) nsSymbol, fieldAccessExpr.pos); } else { nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) nsSymbol; } } private boolean hasLaxOriginalType(BLangFieldBasedAccess fieldBasedAccess) { return fieldBasedAccess.originalType != null && types.isLax(fieldBasedAccess.originalType); } private BType getLaxFieldAccessType(BType exprType) { switch (exprType.tag) { case TypeTags.JSON: return symTable.jsonType; case TypeTags.XML: case TypeTags.XML_ELEMENT: return symTable.stringType; case TypeTags.MAP: return ((BMapType) exprType).constraint; case TypeTags.UNION: BUnionType unionType = (BUnionType) exprType; LinkedHashSet<BType> memberTypes = new LinkedHashSet<>(); unionType.getMemberTypes().forEach(bType -> memberTypes.add(getLaxFieldAccessType(bType))); return memberTypes.size() == 1 ? memberTypes.iterator().next() : BUnionType.create(null, memberTypes); } return symTable.semanticError; } private BType checkOptionalFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) { BType actualType = symTable.semanticError; boolean nillableExprType = false; BType effectiveType = varRefType; if (varRefType.tag == TypeTags.UNION) { Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes(); if (memTypes.contains(symTable.nilType)) { LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>(); for (BType bType : memTypes) { if (bType != symTable.nilType) { nilRemovedSet.add(bType); } else { nillableExprType = true; } } effectiveType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() : BUnionType.create(null, nilRemovedSet); } } if (types.isSubTypeOfBaseType(effectiveType, TypeTags.RECORD)) { actualType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, effectiveType, fieldName); if (actualType == symTable.semanticError) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS_FOR_FIELD, varRefType, fieldName); } fieldAccessExpr.nilSafeNavigation = nillableExprType; fieldAccessExpr.originalType = getSafeType(actualType, fieldAccessExpr); } else if (types.isLax(effectiveType)) { BType laxFieldAccessType = getLaxFieldAccessType(effectiveType); actualType = accessCouldResultInError(effectiveType) ? BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } fieldAccessExpr.originalType = laxFieldAccessType; fieldAccessExpr.nilSafeNavigation = true; nillableExprType = true; } else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) { BType laxFieldAccessType = getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType); actualType = accessCouldResultInError(effectiveType) ? BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType; if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) { resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr); } fieldAccessExpr.errorSafeNavigation = true; fieldAccessExpr.originalType = laxFieldAccessType; fieldAccessExpr.nilSafeNavigation = true; nillableExprType = true; } else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) { dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS, varRefType); } if (nillableExprType && actualType != symTable.semanticError && !actualType.isNullable()) { actualType = BUnionType.create(null, actualType, symTable.nilType); } return actualType; } private boolean accessCouldResultInError(BType type) { if (type.tag == TypeTags.JSON) { return true; } if (type.tag == TypeTags.MAP) { return false; } if (type.tag == TypeTags.XML) { return true; } if (type.tag == TypeTags.UNION) { return ((BUnionType) type).getMemberTypes().stream().anyMatch(this::accessCouldResultInError); } else { return false; } } private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr) { BType varRefType = indexBasedAccessExpr.expr.type; boolean nillableExprType = false; if (varRefType.tag == TypeTags.UNION) { Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes(); if (memTypes.contains(symTable.nilType)) { LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>(); for (BType bType : memTypes) { if (bType != symTable.nilType) { nilRemovedSet.add(bType); } else { nillableExprType = true; } } if (nillableExprType) { varRefType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() : BUnionType.create(null, nilRemovedSet); if (!types.isSubTypeOfMapping(varRefType)) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEXING, indexBasedAccessExpr.expr.type); return symTable.semanticError; } if (indexBasedAccessExpr.lhsVar) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEX_ACCESS_FOR_ASSIGNMENT, indexBasedAccessExpr.expr.type); return symTable.semanticError; } } } } BLangExpression indexExpr = indexBasedAccessExpr.indexExpr; BType actualType = symTable.semanticError; if (types.isSubTypeOfMapping(varRefType)) { checkExpr(indexExpr, this.env, symTable.stringType); if (indexExpr.type == symTable.semanticError) { return symTable.semanticError; } actualType = checkMappingIndexBasedAccess(indexBasedAccessExpr, varRefType); if (actualType == symTable.semanticError) { if (indexExpr.type.tag == TypeTags.STRING && isConst(indexExpr)) { String fieldName = getConstFieldName(indexExpr); dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD, fieldName, indexBasedAccessExpr.expr.type); return actualType; } dlog.error(indexExpr.pos, DiagnosticCode.INVALID_RECORD_INDEX_EXPR, indexExpr.type); return actualType; } indexBasedAccessExpr.nilSafeNavigation = nillableExprType; indexBasedAccessExpr.originalType = getSafeType(actualType, indexBasedAccessExpr); } else if (types.isSubTypeOfList(varRefType)) { checkExpr(indexExpr, this.env, symTable.intType); if (indexExpr.type == symTable.semanticError) { return symTable.semanticError; } actualType = checkListIndexBasedAccess(indexBasedAccessExpr, varRefType); indexBasedAccessExpr.originalType = actualType; if (actualType == symTable.semanticError) { if (indexExpr.type.tag == TypeTags.INT && isConst(indexExpr)) { dlog.error(indexBasedAccessExpr.indexExpr.pos, DiagnosticCode.LIST_INDEX_OUT_OF_RANGE, getConstIndex(indexExpr)); return actualType; } dlog.error(indexExpr.pos, DiagnosticCode.INVALID_LIST_INDEX_EXPR, indexExpr.type); return actualType; } } else if (types.isAssignable(varRefType, symTable.stringType)) { if (indexBasedAccessExpr.lhsVar) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEX_ACCESS_FOR_ASSIGNMENT, indexBasedAccessExpr.expr.type); return symTable.semanticError; } checkExpr(indexExpr, this.env, symTable.intType); if (indexExpr.type == symTable.semanticError) { return symTable.semanticError; } indexBasedAccessExpr.originalType = symTable.stringType; actualType = symTable.stringType; } else if (varRefType.tag == TypeTags.XML) { if (indexBasedAccessExpr.lhsVar) { indexExpr.type = symTable.semanticError; dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_XML_SEQUENCE); return actualType; } BType type = checkExpr(indexExpr, this.env, symTable.intType); if (type == symTable.semanticError) { return type; } actualType = varRefType; indexBasedAccessExpr.originalType = actualType; } else if (varRefType.tag == TypeTags.TABLE) { BTableType tableType = (BTableType) indexBasedAccessExpr.expr.type; BType keyTypeConstraint = tableType.keyTypeConstraint; if (tableType.keyTypeConstraint == null) { keyTypeConstraint = createTableKeyConstraint(((BTableType) indexBasedAccessExpr.expr.type). fieldNameList, ((BTableType) indexBasedAccessExpr.expr.type).constraint); if (keyTypeConstraint == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.MEMBER_ACCESS_NOT_SUPPORT_FOR_KEYLESS_TABLE, indexBasedAccessExpr.expr); return symTable.semanticError; } } if (indexExpr.getKind() != NodeKind.TABLE_MULTI_KEY) { checkExpr(indexExpr, this.env, keyTypeConstraint); if (indexExpr.type == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } } else { List<BLangExpression> multiKeyExpressionList = ((BLangTableMultiKeyExpr) indexBasedAccessExpr.indexExpr).multiKeyIndexExprs; List<BType> keyConstraintTypes = ((BTupleType) keyTypeConstraint).tupleTypes; if (keyConstraintTypes.size() != multiKeyExpressionList.size()) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } for (int i = 0; i < multiKeyExpressionList.size(); i++) { BLangExpression keyExpr = multiKeyExpressionList.get(i); checkExpr(keyExpr, this.env, keyConstraintTypes.get(i)); if (keyExpr.type == symTable.semanticError) { dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS, keyTypeConstraint); return symTable.semanticError; } } } if (expType.tag != TypeTags.NONE) { BType resultType = checkExpr(indexBasedAccessExpr.expr, env, expType); if (resultType == symTable.semanticError) { return symTable.semanticError; } } indexBasedAccessExpr.originalType = tableType.constraint; actualType = tableType.constraint; } else if (varRefType == symTable.semanticError) { indexBasedAccessExpr.indexExpr.type = symTable.semanticError; return symTable.semanticError; } else { indexBasedAccessExpr.indexExpr.type = symTable.semanticError; dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEXING, indexBasedAccessExpr.expr.type); return symTable.semanticError; } if (nillableExprType && !actualType.isNullable()) { actualType = BUnionType.create(null, actualType, symTable.nilType); } return actualType; } private Long getConstIndex(BLangExpression indexExpr) { return indexExpr.getKind() == NodeKind.NUMERIC_LITERAL ? (Long) ((BLangLiteral) indexExpr).value : (Long) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value; } private String getConstFieldName(BLangExpression indexExpr) { return indexExpr.getKind() == NodeKind.LITERAL ? (String) ((BLangLiteral) indexExpr).value : (String) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value; } private BType checkArrayIndexBasedAccess(BLangIndexBasedAccess indexBasedAccess, BType indexExprType, BArrayType arrayType) { BType actualType = symTable.semanticError; switch (indexExprType.tag) { case TypeTags.INT: BLangExpression indexExpr = indexBasedAccess.indexExpr; if (!isConst(indexExpr) || arrayType.state == BArrayState.UNSEALED) { actualType = arrayType.eType; break; } actualType = getConstIndex(indexExpr) >= arrayType.size ? symTable.semanticError : arrayType.eType; break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) indexExprType; boolean validIndexExists = false; for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue(); if (indexValue >= 0 && (arrayType.state == BArrayState.UNSEALED || indexValue < arrayType.size)) { validIndexExists = true; break; } } if (!validIndexExists) { return symTable.semanticError; } actualType = arrayType.eType; break; case TypeTags.UNION: List<BFiniteType> finiteTypes = ((BUnionType) indexExprType).getMemberTypes().stream() .filter(memType -> memType.tag == TypeTags.FINITE) .map(matchedType -> (BFiniteType) matchedType) .collect(Collectors.toList()); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType elementType = checkArrayIndexBasedAccess(indexBasedAccess, finiteType, arrayType); if (elementType == symTable.semanticError) { return symTable.semanticError; } actualType = arrayType.eType; } return actualType; } private BType checkListIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) { if (type.tag == TypeTags.ARRAY) { return checkArrayIndexBasedAccess(accessExpr, accessExpr.indexExpr.type, (BArrayType) type); } if (type.tag == TypeTags.TUPLE) { return checkTupleIndexBasedAccess(accessExpr, (BTupleType) type, accessExpr.indexExpr.type); } LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : ((BUnionType) type).getMemberTypes()) { BType individualFieldType = checkListIndexBasedAccess(accessExpr, memType); if (individualFieldType == symTable.semanticError) { continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 0) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { return fieldTypeMembers.iterator().next(); } return BUnionType.create(null, fieldTypeMembers); } private BType checkTupleIndexBasedAccess(BLangIndexBasedAccess accessExpr, BTupleType tuple, BType currentType) { BType actualType = symTable.semanticError; BLangExpression indexExpr = accessExpr.indexExpr; switch (currentType.tag) { case TypeTags.INT: if (isConst(indexExpr)) { actualType = checkTupleFieldType(tuple, getConstIndex(indexExpr).intValue()); } else { BTupleType tupleExpr = (BTupleType) accessExpr.expr.type; LinkedHashSet<BType> tupleTypes = collectTupleFieldTypes(tupleExpr, new LinkedHashSet<>()); actualType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes); } break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) currentType; LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue(); BType fieldType = checkTupleFieldType(tuple, indexValue); if (fieldType.tag != TypeTags.SEMANTIC_ERROR) { possibleTypes.add(fieldType); } } if (possibleTypes.size() == 0) { return symTable.semanticError; } actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() : BUnionType.create(null, possibleTypes); break; case TypeTags.UNION: LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>(); List<BFiniteType> finiteTypes = new ArrayList<>(); ((BUnionType) currentType).getMemberTypes().forEach(memType -> { if (memType.tag == TypeTags.FINITE) { finiteTypes.add((BFiniteType) memType); } else { BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, memType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } } }); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, finiteType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } if (possibleTypesByMember.contains(symTable.semanticError)) { return symTable.semanticError; } actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() : BUnionType.create(null, possibleTypesByMember); } return actualType; } private LinkedHashSet<BType> collectTupleFieldTypes(BTupleType tupleType, LinkedHashSet<BType> memberTypes) { tupleType.tupleTypes .forEach(memberType -> { if (memberType.tag == TypeTags.UNION) { collectMemberTypes((BUnionType) memberType, memberTypes); } else { memberTypes.add(memberType); } }); return memberTypes; } private BType checkMappingIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) { if (type.tag == TypeTags.MAP) { BType constraint = ((BMapType) type).constraint; return accessExpr.lhsVar ? constraint : addNilForNillableAccessType(constraint); } if (type.tag == TypeTags.RECORD) { return checkRecordIndexBasedAccess(accessExpr, (BRecordType) type, accessExpr.indexExpr.type); } BType fieldType; boolean nonMatchedRecordExists = false; LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>(); for (BType memType : ((BUnionType) type).getMemberTypes()) { BType individualFieldType = checkMappingIndexBasedAccess(accessExpr, memType); if (individualFieldType == symTable.semanticError) { nonMatchedRecordExists = true; continue; } fieldTypeMembers.add(individualFieldType); } if (fieldTypeMembers.size() == 0) { return symTable.semanticError; } if (fieldTypeMembers.size() == 1) { fieldType = fieldTypeMembers.iterator().next(); } else { fieldType = BUnionType.create(null, fieldTypeMembers); } return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType; } private BType checkRecordIndexBasedAccess(BLangIndexBasedAccess accessExpr, BRecordType record, BType currentType) { BType actualType = symTable.semanticError; BLangExpression indexExpr = accessExpr.indexExpr; switch (currentType.tag) { case TypeTags.STRING: if (isConst(indexExpr)) { String fieldName = IdentifierEncoder.escapeSpecialCharacters(getConstFieldName(indexExpr)); actualType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType != symTable.semanticError) { return actualType; } actualType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType == symTable.semanticError) { actualType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record); if (actualType == symTable.semanticError) { return actualType; } if (actualType == symTable.neverType) { return actualType; } return addNilForNillableAccessType(actualType); } if (accessExpr.lhsVar) { return actualType; } return addNilForNillableAccessType(actualType); } LinkedHashSet<BType> fieldTypes = record.fields.values().stream() .map(field -> field.type) .collect(Collectors.toCollection(LinkedHashSet::new)); if (record.restFieldType.tag != TypeTags.NONE) { fieldTypes.add(record.restFieldType); } if (fieldTypes.stream().noneMatch(BType::isNullable)) { fieldTypes.add(symTable.nilType); } actualType = BUnionType.create(null, fieldTypes); break; case TypeTags.FINITE: BFiniteType finiteIndexExpr = (BFiniteType) currentType; LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>(); for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) { String fieldName = (String) ((BLangLiteral) finiteMember).value; BType fieldType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record); if (fieldType == symTable.semanticError) { fieldType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record); if (fieldType == symTable.semanticError) { fieldType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record); } if (fieldType != symTable.semanticError) { fieldType = addNilForNillableAccessType(fieldType); } } if (fieldType.tag == TypeTags.SEMANTIC_ERROR) { continue; } possibleTypes.add(fieldType); } if (possibleTypes.isEmpty()) { return symTable.semanticError; } if (possibleTypes.stream().noneMatch(BType::isNullable)) { possibleTypes.add(symTable.nilType); } actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() : BUnionType.create(null, possibleTypes); break; case TypeTags.UNION: LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>(); List<BFiniteType> finiteTypes = new ArrayList<>(); ((BUnionType) currentType).getMemberTypes().forEach(memType -> { if (memType.tag == TypeTags.FINITE) { finiteTypes.add((BFiniteType) memType); } else { BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, memType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } } }); BFiniteType finiteType; if (finiteTypes.size() == 1) { finiteType = finiteTypes.get(0); } else { Set<BLangExpression> valueSpace = new LinkedHashSet<>(); finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace())); finiteType = new BFiniteType(null, valueSpace); } BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, finiteType); if (possibleType.tag == TypeTags.UNION) { possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes()); } else { possibleTypesByMember.add(possibleType); } if (possibleTypesByMember.contains(symTable.semanticError)) { return symTable.semanticError; } actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() : BUnionType.create(null, possibleTypesByMember); } return actualType; } private BType getSafeType(BType type, BLangAccessExpression accessExpr) { if (type.tag != TypeTags.UNION) { return type; } List<BType> lhsTypes = new ArrayList<>(((BUnionType) type).getMemberTypes()); if (accessExpr.errorSafeNavigation) { if (!lhsTypes.contains(symTable.errorType)) { dlog.error(accessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, type); return symTable.semanticError; } lhsTypes = lhsTypes.stream() .filter(memberType -> memberType != symTable.errorType) .collect(Collectors.toList()); if (lhsTypes.isEmpty()) { dlog.error(accessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, type); return symTable.semanticError; } } if (accessExpr.nilSafeNavigation) { lhsTypes = lhsTypes.stream() .filter(memberType -> memberType != symTable.nilType) .collect(Collectors.toList()); } if (lhsTypes.size() == 1) { return lhsTypes.get(0); } return BUnionType.create(null, new LinkedHashSet<>(lhsTypes)); } private List<BType> getTypesList(BType type) { if (type.tag == TypeTags.UNION) { BUnionType unionType = (BUnionType) type; return new ArrayList<>(unionType.getMemberTypes()); } else { return Lists.of(type); } } private LinkedHashSet<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) { List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.type); LinkedHashSet<BType> matchExprTypes = new LinkedHashSet<>(); for (BType type : exprTypes) { boolean assignable = false; for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) { BType patternExprType = pattern.expr.type; matchExprTypes.addAll(getTypesList(patternExprType)); if (type.tag == TypeTags.SEMANTIC_ERROR || patternExprType.tag == TypeTags.SEMANTIC_ERROR) { return new LinkedHashSet<BType>() { { add(symTable.semanticError); } }; } assignable = this.types.isAssignable(type, pattern.variable.type); if (assignable) { break; } } if (!assignable) { matchExprTypes.add(type); } } return matchExprTypes; } private boolean couldHoldTableValues(BType type, List<BType> encounteredTypes) { if (encounteredTypes.contains(type)) { return false; } encounteredTypes.add(type); switch (type.tag) { case TypeTags.UNION: for (BType bType1 : ((BUnionType) type).getMemberTypes()) { if (couldHoldTableValues(bType1, encounteredTypes)) { return true; } } return false; case TypeTags.MAP: return couldHoldTableValues(((BMapType) type).constraint, encounteredTypes); case TypeTags.RECORD: BRecordType recordType = (BRecordType) type; for (BField field : recordType.fields.values()) { if (couldHoldTableValues(field.type, encounteredTypes)) { return true; } } return !recordType.sealed && couldHoldTableValues(recordType.restFieldType, encounteredTypes); case TypeTags.ARRAY: return couldHoldTableValues(((BArrayType) type).eType, encounteredTypes); case TypeTags.TUPLE: for (BType bType : ((BTupleType) type).getTupleTypes()) { if (couldHoldTableValues(bType, encounteredTypes)) { return true; } } return false; } return false; } private boolean isConst(BLangExpression expression) { if (ConstantAnalyzer.isValidConstantExpressionNode(expression)) { return true; } if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) { return false; } return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT; } private Name getCurrentCompUnit(BLangNode node) { return names.fromString(node.pos.getSource().getCompilationUnitName()); } private BType getRepresentativeBroadType(List<BType> inferredTypeList) { for (int i = 0; i < inferredTypeList.size(); i++) { BType type = inferredTypeList.get(i); if (type.tag == TypeTags.SEMANTIC_ERROR) { return type; } for (int j = i + 1; j < inferredTypeList.size(); j++) { BType otherType = inferredTypeList.get(j); if (otherType.tag == TypeTags.SEMANTIC_ERROR) { return otherType; } if (types.isAssignable(otherType, type)) { inferredTypeList.remove(j); j -= 1; continue; } if (types.isAssignable(type, otherType)) { inferredTypeList.remove(i); i -= 1; break; } } } if (inferredTypeList.size() == 1) { return inferredTypeList.get(0); } return BUnionType.create(null, inferredTypeList.toArray(new BType[0])); } private BType defineInferredRecordType(BLangRecordLiteral recordLiteral, BType expType) { PackageID pkgID = env.enclPkg.symbol.pkgID; BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL); Map<String, FieldInfo> nonRestFieldTypes = new LinkedHashMap<>(); List<BType> restFieldTypes = new ArrayList<>(); for (RecordLiteralNode.RecordField field : recordLiteral.fields) { if (field.isKeyValueField()) { BLangRecordKeyValueField keyValue = (BLangRecordKeyValueField) field; BLangRecordKey key = keyValue.key; BLangExpression expression = keyValue.valueExpr; BLangExpression keyExpr = key.expr; if (key.computedKey) { checkExpr(keyExpr, env, symTable.stringType); BType exprType = checkExpr(expression, env, expType); if (isUniqueType(restFieldTypes, exprType)) { restFieldTypes.add(exprType); } } else { addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(keyExpr), keyValue.readonly ? checkExpr(expression, env, symTable.readonlyType) : checkExpr(expression, env, expType), true, keyValue.readonly); } } else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) { BType type = checkExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env, expType); int typeTag = type.tag; if (typeTag == TypeTags.MAP) { BType constraintType = ((BMapType) type).constraint; if (isUniqueType(restFieldTypes, constraintType)) { restFieldTypes.add(constraintType); } } if (type.tag != TypeTags.RECORD) { continue; } BRecordType recordType = (BRecordType) type; for (BField recField : recordType.fields.values()) { addToNonRestFieldTypes(nonRestFieldTypes, recField.name.value, recField.type, !Symbols.isOptional(recField.symbol), false); } if (!recordType.sealed) { BType restFieldType = recordType.restFieldType; if (isUniqueType(restFieldTypes, restFieldType)) { restFieldTypes.add(restFieldType); } } } else { BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field; addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(varNameField), varNameField.readonly ? checkExpr(varNameField, env, symTable.readonlyType) : checkExpr(varNameField, env, expType), true, varNameField.readonly); } } LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); boolean allReadOnlyNonRestFields = true; for (Map.Entry<String, FieldInfo> entry : nonRestFieldTypes.entrySet()) { FieldInfo fieldInfo = entry.getValue(); List<BType> types = fieldInfo.types; if (types.contains(symTable.semanticError)) { return symTable.semanticError; } String key = entry.getKey(); Name fieldName = names.fromString(key); BType type = types.size() == 1 ? types.get(0) : BUnionType.create(null, types.toArray(new BType[0])); Set<Flag> flags = new HashSet<>(); if (fieldInfo.required) { flags.add(Flag.REQUIRED); } else { flags.add(Flag.OPTIONAL); } if (fieldInfo.readonly) { flags.add(Flag.READONLY); } else if (allReadOnlyNonRestFields) { allReadOnlyNonRestFields = false; } BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(flags), fieldName, pkgID, type, recordSymbol, symTable.builtinPos, VIRTUAL); fields.put(fieldName.value, new BField(fieldName, null, fieldSymbol)); recordSymbol.scope.define(fieldName, fieldSymbol); } BRecordType recordType = new BRecordType(recordSymbol); recordType.fields = fields; if (restFieldTypes.contains(symTable.semanticError)) { return symTable.semanticError; } if (restFieldTypes.isEmpty()) { recordType.sealed = true; recordType.restFieldType = symTable.noType; } else if (restFieldTypes.size() == 1) { recordType.restFieldType = restFieldTypes.get(0); } else { recordType.restFieldType = BUnionType.create(null, restFieldTypes.toArray(new BType[0])); } recordSymbol.type = recordType; recordType.tsymbol = recordSymbol; if (expType == symTable.readonlyType || (recordType.sealed && allReadOnlyNonRestFields)) { recordType.flags |= Flags.READONLY; recordSymbol.flags |= Flags.READONLY; } BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable, recordLiteral.pos); recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable); TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env); return recordType; } private BRecordTypeSymbol createRecordTypeSymbol(PackageID pkgID, DiagnosticPos pos, SymbolOrigin origin) { BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0, names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(pkgID)), pkgID, null, env.scope.owner, pos, origin); BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null); BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol( Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false, symTable.builtinPos, VIRTUAL); initFuncSymbol.retType = symTable.nilType; recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, pos); recordSymbol.scope = new Scope(recordSymbol); recordSymbol.scope.define( names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value), recordSymbol.initializerFunc.symbol); return recordSymbol; } private String getKeyName(BLangExpression key) { return key.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? ((BLangSimpleVarRef) key).variableName.value : (String) ((BLangLiteral) key).value; } private void addToNonRestFieldTypes(Map<String, FieldInfo> nonRestFieldTypes, String keyString, BType exprType, boolean required, boolean readonly) { if (!nonRestFieldTypes.containsKey(keyString)) { nonRestFieldTypes.put(keyString, new FieldInfo(new ArrayList<BType>() {{ add(exprType); }}, required, readonly)); return; } FieldInfo fieldInfo = nonRestFieldTypes.get(keyString); List<BType> typeList = fieldInfo.types; if (isUniqueType(typeList, exprType)) { typeList.add(exprType); } if (required && !fieldInfo.required) { fieldInfo.required = true; } } private boolean isUniqueType(List<BType> typeList, BType type) { boolean isRecord = type.tag == TypeTags.RECORD; for (BType bType : typeList) { if (isRecord) { if (type == bType) { return false; } } else if (types.isSameType(type, bType)) { return false; } } return true; } private BType checkXmlSubTypeLiteralCompatibility(DiagnosticPos pos, BXMLSubType mutableXmlSubType, BType expType) { if (expType == symTable.semanticError) { return expType; } boolean unionExpType = expType.tag == TypeTags.UNION; if (expType == mutableXmlSubType) { return expType; } if (!unionExpType && types.isAssignable(mutableXmlSubType, expType)) { return mutableXmlSubType; } BXMLSubType immutableXmlSubType = (BXMLSubType) ImmutableTypeCloner.getEffectiveImmutableType(pos, types, mutableXmlSubType, env, symTable, anonymousModelHelper, names); if (expType == immutableXmlSubType) { return expType; } if (!unionExpType && types.isAssignable(immutableXmlSubType, expType)) { return immutableXmlSubType; } if (!unionExpType) { dlog.error(pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType); return symTable.semanticError; } List<BType> compatibleTypes = new ArrayList<>(); for (BType memberType : ((BUnionType) expType).getMemberTypes()) { if (compatibleTypes.contains(memberType)) { continue; } if (memberType == mutableXmlSubType || memberType == immutableXmlSubType) { compatibleTypes.add(memberType); continue; } if (types.isAssignable(mutableXmlSubType, memberType) && !compatibleTypes.contains(mutableXmlSubType)) { compatibleTypes.add(mutableXmlSubType); continue; } if (types.isAssignable(immutableXmlSubType, memberType) && !compatibleTypes.contains(immutableXmlSubType)) { compatibleTypes.add(immutableXmlSubType); } } if (compatibleTypes.isEmpty()) { dlog.error(pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType); return symTable.semanticError; } if (compatibleTypes.size() == 1) { return compatibleTypes.get(0); } dlog.error(pos, DiagnosticCode.AMBIGUOUS_TYPES, expType); return symTable.semanticError; } private void markChildrenAsImmutable(BLangXMLElementLiteral bLangXMLElementLiteral) { for (BLangExpression modifiedChild : bLangXMLElementLiteral.modifiedChildren) { BType childType = modifiedChild.type; if (Symbols.isFlagOn(childType.flags, Flags.READONLY) || !types.isSelectivelyImmutableType(childType)) { continue; } modifiedChild.type = ImmutableTypeCloner.getEffectiveImmutableType(modifiedChild.pos, types, (SelectivelyImmutableReferenceType) childType, env, symTable, anonymousModelHelper, names); if (modifiedChild.getKind() == NodeKind.XML_ELEMENT_LITERAL) { markChildrenAsImmutable((BLangXMLElementLiteral) modifiedChild); } } } private void logUndefinedSymbolError(DiagnosticPos pos, String name) { if (!missingNodesHelper.isMissingNode(name)) { dlog.error(pos, DiagnosticCode.UNDEFINED_SYMBOL, name); } } private static class FieldInfo { List<BType> types; boolean required; boolean readonly; private FieldInfo(List<BType> types, boolean required, boolean readonly) { this.types = types; this.required = required; this.readonly = readonly; } } }
I don't think they can? I think even before the migration there's always a default value?
public Read withAttemptTimeout(Duration timeout) { checkArgument(timeout.isLongerThan(Duration.ZERO), "attempt timeout must be positive"); BigtableReadOptions readOptions = getBigtableReadOptions(); return toBuilder() .setBigtableReadOptions(readOptions.toBuilder().setAttemptTimeout(timeout).build()) .build(); }
checkArgument(timeout.isLongerThan(Duration.ZERO), "attempt timeout must be positive");
public Read withAttemptTimeout(Duration timeout) { checkArgument(timeout.isLongerThan(Duration.ZERO), "attempt timeout must be positive"); BigtableReadOptions readOptions = getBigtableReadOptions(); return toBuilder() .setBigtableReadOptions(readOptions.toBuilder().setAttemptTimeout(timeout).build()) .build(); }
class to using the SegmentReader. If * null is passed, this behavior will be disabled and the stream reader will be used. * * <p>Does not modify this object. * * <p>When we have a builder, we initialize the value. When they call the method then we * override the value */ @Experimental(Kind.SOURCE_SINK) public Read withMaxBufferElementCount(@Nullable Integer maxBufferElementCount) { BigtableReadOptions bigtableReadOptions = getBigtableReadOptions(); return toBuilder() .setBigtableReadOptions( bigtableReadOptions .toBuilder() .setMaxBufferElementCount(maxBufferElementCount) .build()) .build(); }
class to using the SegmentReader. If * null is passed, this behavior will be disabled and the stream reader will be used. * * <p>Does not modify this object. * * <p>When we have a builder, we initialize the value. When they call the method then we * override the value */ @Experimental(Kind.SOURCE_SINK) public Read withMaxBufferElementCount(@Nullable Integer maxBufferElementCount) { BigtableReadOptions bigtableReadOptions = getBigtableReadOptions(); return toBuilder() .setBigtableReadOptions( bigtableReadOptions .toBuilder() .setMaxBufferElementCount(maxBufferElementCount) .build()) .build(); }
`verifyJobGraphs` verifies JobID and JobName, if we don't change both of them, we should remove such verification. Otherwise we' better to check update.
public void testPutAndRemoveJobGraph() throws Exception { ZooKeeperSubmittedJobGraphStore jobGraphs = createZooKeeperSubmittedJobGraphStore("/testPutAndRemoveJobGraph"); try { SubmittedJobGraphListener listener = mock(SubmittedJobGraphListener.class); jobGraphs.start(listener); SubmittedJobGraph jobGraph = createSubmittedJobGraph(new JobID()); assertEquals(0, jobGraphs.getJobIds().size()); jobGraphs.putJobGraph(jobGraph); Collection<JobID> jobIds = jobGraphs.getJobIds(); assertEquals(1, jobIds.size()); JobID jobId = jobIds.iterator().next(); verifyJobGraphs(jobGraph, jobGraphs.recoverJobGraph(jobId)); jobGraph = createSubmittedJobGraph(jobGraph.getJobId()); jobGraphs.putJobGraph(jobGraph); jobIds = jobGraphs.getJobIds(); assertEquals(1, jobIds.size()); jobId = jobIds.iterator().next(); verifyJobGraphs(jobGraph, jobGraphs.recoverJobGraph(jobId)); jobGraphs.removeJobGraph(jobGraph.getJobId()); assertEquals(0, jobGraphs.getJobIds().size()); verify(listener, atMost(1)).onAddedJobGraph(any(JobID.class)); verify(listener, never()).onRemovedJobGraph(any(JobID.class)); jobGraphs.removeJobGraph(jobGraph.getJobId()); } finally { jobGraphs.stop(); } }
jobGraphs.putJobGraph(jobGraph);
public void testPutAndRemoveJobGraph() throws Exception { ZooKeeperSubmittedJobGraphStore jobGraphs = createZooKeeperSubmittedJobGraphStore("/testPutAndRemoveJobGraph"); try { SubmittedJobGraphListener listener = mock(SubmittedJobGraphListener.class); jobGraphs.start(listener); SubmittedJobGraph jobGraph = createSubmittedJobGraph(new JobID(), "JobName"); assertEquals(0, jobGraphs.getJobIds().size()); jobGraphs.putJobGraph(jobGraph); Collection<JobID> jobIds = jobGraphs.getJobIds(); assertEquals(1, jobIds.size()); JobID jobId = jobIds.iterator().next(); verifyJobGraphs(jobGraph, jobGraphs.recoverJobGraph(jobId)); jobGraph = createSubmittedJobGraph(jobGraph.getJobId(), "Updated JobName"); jobGraphs.putJobGraph(jobGraph); jobIds = jobGraphs.getJobIds(); assertEquals(1, jobIds.size()); jobId = jobIds.iterator().next(); verifyJobGraphs(jobGraph, jobGraphs.recoverJobGraph(jobId)); jobGraphs.removeJobGraph(jobGraph.getJobId()); assertEquals(0, jobGraphs.getJobIds().size()); verify(listener, atMost(1)).onAddedJobGraph(any(JobID.class)); verify(listener, never()).onRemovedJobGraph(any(JobID.class)); jobGraphs.removeJobGraph(jobGraph.getJobId()); } finally { jobGraphs.stop(); } }
class ZooKeeperSubmittedJobGraphsStoreITCase extends TestLogger { private static final ZooKeeperTestEnvironment ZooKeeper = new ZooKeeperTestEnvironment(1); private static final RetrievableStateStorageHelper<SubmittedJobGraph> localStateStorage = new RetrievableStateStorageHelper<SubmittedJobGraph>() { @Override public RetrievableStateHandle<SubmittedJobGraph> store(SubmittedJobGraph state) throws IOException { ByteStreamStateHandle byteStreamStateHandle = new ByteStreamStateHandle( String.valueOf(UUID.randomUUID()), InstantiationUtil.serializeObject(state)); return new RetrievableStreamStateHandle<>(byteStreamStateHandle); } }; @AfterClass public static void tearDown() throws Exception { if (ZooKeeper != null) { ZooKeeper.shutdown(); } } @Before public void cleanUp() throws Exception { ZooKeeper.deleteAll(); } @Test @Nonnull private ZooKeeperSubmittedJobGraphStore createZooKeeperSubmittedJobGraphStore(String fullPath) throws Exception { final CuratorFramework client = ZooKeeper.getClient(); client.newNamespaceAwareEnsurePath(fullPath).ensure(client.getZookeeperClient()); CuratorFramework facade = client.usingNamespace(client.getNamespace() + fullPath); return new ZooKeeperSubmittedJobGraphStore( fullPath, new ZooKeeperStateHandleStore<>( facade, localStateStorage), new PathChildrenCache(facade, "/", false)); } @Test public void testRecoverJobGraphs() throws Exception { ZooKeeperSubmittedJobGraphStore jobGraphs = createZooKeeperSubmittedJobGraphStore("/testRecoverJobGraphs"); try { SubmittedJobGraphListener listener = mock(SubmittedJobGraphListener.class); jobGraphs.start(listener); HashMap<JobID, SubmittedJobGraph> expected = new HashMap<>(); JobID[] jobIds = new JobID[] { new JobID(), new JobID(), new JobID() }; expected.put(jobIds[0], createSubmittedJobGraph(jobIds[0])); expected.put(jobIds[1], createSubmittedJobGraph(jobIds[1])); expected.put(jobIds[2], createSubmittedJobGraph(jobIds[2])); for (SubmittedJobGraph jobGraph : expected.values()) { jobGraphs.putJobGraph(jobGraph); } Collection<JobID> actual = jobGraphs.getJobIds(); assertEquals(expected.size(), actual.size()); for (JobID jobId : actual) { SubmittedJobGraph jobGraph = jobGraphs.recoverJobGraph(jobId); assertTrue(expected.containsKey(jobGraph.getJobId())); verifyJobGraphs(expected.get(jobGraph.getJobId()), jobGraph); jobGraphs.removeJobGraph(jobGraph.getJobId()); } assertEquals(0, jobGraphs.getJobIds().size()); verify(listener, atMost(expected.size())).onAddedJobGraph(any(JobID.class)); verify(listener, never()).onRemovedJobGraph(any(JobID.class)); } finally { jobGraphs.stop(); } } @Test public void testConcurrentAddJobGraph() throws Exception { ZooKeeperSubmittedJobGraphStore jobGraphs = null; ZooKeeperSubmittedJobGraphStore otherJobGraphs = null; try { jobGraphs = createZooKeeperSubmittedJobGraphStore("/testConcurrentAddJobGraph"); otherJobGraphs = createZooKeeperSubmittedJobGraphStore("/testConcurrentAddJobGraph"); SubmittedJobGraph jobGraph = createSubmittedJobGraph(new JobID()); SubmittedJobGraph otherJobGraph = createSubmittedJobGraph(new JobID()); SubmittedJobGraphListener listener = mock(SubmittedJobGraphListener.class); final JobID[] actualOtherJobId = new JobID[1]; final CountDownLatch sync = new CountDownLatch(1); doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocation) throws Throwable { actualOtherJobId[0] = (JobID) invocation.getArguments()[0]; sync.countDown(); return null; } }).when(listener).onAddedJobGraph(any(JobID.class)); jobGraphs.start(listener); otherJobGraphs.start(null); jobGraphs.putJobGraph(jobGraph); verify(listener, never()).onAddedJobGraph(any(JobID.class)); verify(listener, never()).onRemovedJobGraph(any(JobID.class)); otherJobGraphs.putJobGraph(otherJobGraph); sync.await(); verify(listener, times(1)).onAddedJobGraph(any(JobID.class)); verify(listener, never()).onRemovedJobGraph(any(JobID.class)); assertEquals(otherJobGraph.getJobId(), actualOtherJobId[0]); } finally { if (jobGraphs != null) { jobGraphs.stop(); } if (otherJobGraphs != null) { otherJobGraphs.stop(); } } } @Test(expected = IllegalStateException.class) public void testUpdateJobGraphYouDidNotGetOrAdd() throws Exception { ZooKeeperSubmittedJobGraphStore jobGraphs = createZooKeeperSubmittedJobGraphStore("/testUpdateJobGraphYouDidNotGetOrAdd"); ZooKeeperSubmittedJobGraphStore otherJobGraphs = createZooKeeperSubmittedJobGraphStore("/testUpdateJobGraphYouDidNotGetOrAdd"); jobGraphs.start(null); otherJobGraphs.start(null); SubmittedJobGraph jobGraph = createSubmittedJobGraph(new JobID()); jobGraphs.putJobGraph(jobGraph); otherJobGraphs.putJobGraph(jobGraph); } private SubmittedJobGraph createSubmittedJobGraph(JobID jobId) { final JobGraph jobGraph = new JobGraph(jobId, "Test JobGraph"); final JobVertex jobVertex = new JobVertex("Test JobVertex"); jobVertex.setParallelism(1); jobGraph.addVertex(jobVertex); return new SubmittedJobGraph(jobGraph, null); } private void verifyJobGraphs(SubmittedJobGraph expected, SubmittedJobGraph actual) { JobGraph expectedJobGraph = expected.getJobGraph(); JobGraph actualJobGraph = actual.getJobGraph(); assertEquals(expectedJobGraph.getName(), actualJobGraph.getName()); assertEquals(expectedJobGraph.getJobID(), actualJobGraph.getJobID()); } }
class ZooKeeperSubmittedJobGraphsStoreITCase extends TestLogger { private static final ZooKeeperTestEnvironment ZooKeeper = new ZooKeeperTestEnvironment(1); private static final RetrievableStateStorageHelper<SubmittedJobGraph> localStateStorage = new RetrievableStateStorageHelper<SubmittedJobGraph>() { @Override public RetrievableStateHandle<SubmittedJobGraph> store(SubmittedJobGraph state) throws IOException { ByteStreamStateHandle byteStreamStateHandle = new ByteStreamStateHandle( String.valueOf(UUID.randomUUID()), InstantiationUtil.serializeObject(state)); return new RetrievableStreamStateHandle<>(byteStreamStateHandle); } }; @AfterClass public static void tearDown() throws Exception { ZooKeeper.shutdown(); } @Before public void cleanUp() throws Exception { ZooKeeper.deleteAll(); } @Test @Nonnull private ZooKeeperSubmittedJobGraphStore createZooKeeperSubmittedJobGraphStore(String fullPath) throws Exception { final CuratorFramework client = ZooKeeper.getClient(); client.newNamespaceAwareEnsurePath(fullPath).ensure(client.getZookeeperClient()); CuratorFramework facade = client.usingNamespace(client.getNamespace() + fullPath); return new ZooKeeperSubmittedJobGraphStore( fullPath, new ZooKeeperStateHandleStore<>( facade, localStateStorage), new PathChildrenCache(facade, "/", false)); } @Test public void testRecoverJobGraphs() throws Exception { ZooKeeperSubmittedJobGraphStore jobGraphs = createZooKeeperSubmittedJobGraphStore("/testRecoverJobGraphs"); try { SubmittedJobGraphListener listener = mock(SubmittedJobGraphListener.class); jobGraphs.start(listener); HashMap<JobID, SubmittedJobGraph> expected = new HashMap<>(); JobID[] jobIds = new JobID[] { new JobID(), new JobID(), new JobID() }; expected.put(jobIds[0], createSubmittedJobGraph(jobIds[0])); expected.put(jobIds[1], createSubmittedJobGraph(jobIds[1])); expected.put(jobIds[2], createSubmittedJobGraph(jobIds[2])); for (SubmittedJobGraph jobGraph : expected.values()) { jobGraphs.putJobGraph(jobGraph); } Collection<JobID> actual = jobGraphs.getJobIds(); assertEquals(expected.size(), actual.size()); for (JobID jobId : actual) { SubmittedJobGraph jobGraph = jobGraphs.recoverJobGraph(jobId); assertTrue(expected.containsKey(jobGraph.getJobId())); verifyJobGraphs(expected.get(jobGraph.getJobId()), jobGraph); jobGraphs.removeJobGraph(jobGraph.getJobId()); } assertEquals(0, jobGraphs.getJobIds().size()); verify(listener, atMost(expected.size())).onAddedJobGraph(any(JobID.class)); verify(listener, never()).onRemovedJobGraph(any(JobID.class)); } finally { jobGraphs.stop(); } } @Test public void testConcurrentAddJobGraph() throws Exception { ZooKeeperSubmittedJobGraphStore jobGraphs = null; ZooKeeperSubmittedJobGraphStore otherJobGraphs = null; try { jobGraphs = createZooKeeperSubmittedJobGraphStore("/testConcurrentAddJobGraph"); otherJobGraphs = createZooKeeperSubmittedJobGraphStore("/testConcurrentAddJobGraph"); SubmittedJobGraph jobGraph = createSubmittedJobGraph(new JobID()); SubmittedJobGraph otherJobGraph = createSubmittedJobGraph(new JobID()); SubmittedJobGraphListener listener = mock(SubmittedJobGraphListener.class); final JobID[] actualOtherJobId = new JobID[1]; final CountDownLatch sync = new CountDownLatch(1); doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocation) throws Throwable { actualOtherJobId[0] = (JobID) invocation.getArguments()[0]; sync.countDown(); return null; } }).when(listener).onAddedJobGraph(any(JobID.class)); jobGraphs.start(listener); otherJobGraphs.start(null); jobGraphs.putJobGraph(jobGraph); verify(listener, never()).onAddedJobGraph(any(JobID.class)); verify(listener, never()).onRemovedJobGraph(any(JobID.class)); otherJobGraphs.putJobGraph(otherJobGraph); sync.await(); verify(listener, times(1)).onAddedJobGraph(any(JobID.class)); verify(listener, never()).onRemovedJobGraph(any(JobID.class)); assertEquals(otherJobGraph.getJobId(), actualOtherJobId[0]); } finally { if (jobGraphs != null) { jobGraphs.stop(); } if (otherJobGraphs != null) { otherJobGraphs.stop(); } } } @Test(expected = IllegalStateException.class) public void testUpdateJobGraphYouDidNotGetOrAdd() throws Exception { ZooKeeperSubmittedJobGraphStore jobGraphs = createZooKeeperSubmittedJobGraphStore("/testUpdateJobGraphYouDidNotGetOrAdd"); ZooKeeperSubmittedJobGraphStore otherJobGraphs = createZooKeeperSubmittedJobGraphStore("/testUpdateJobGraphYouDidNotGetOrAdd"); jobGraphs.start(null); otherJobGraphs.start(null); SubmittedJobGraph jobGraph = createSubmittedJobGraph(new JobID()); jobGraphs.putJobGraph(jobGraph); otherJobGraphs.putJobGraph(jobGraph); } private SubmittedJobGraph createSubmittedJobGraph(JobID jobId) { return createSubmittedJobGraph(jobId, "Test JobGraph"); } private SubmittedJobGraph createSubmittedJobGraph(JobID jobId, String jobName) { final JobGraph jobGraph = new JobGraph(jobId, jobName); final JobVertex jobVertex = new JobVertex("Test JobVertex"); jobVertex.setParallelism(1); jobGraph.addVertex(jobVertex); return new SubmittedJobGraph(jobGraph); } private void verifyJobGraphs(SubmittedJobGraph expected, SubmittedJobGraph actual) { JobGraph expectedJobGraph = expected.getJobGraph(); JobGraph actualJobGraph = actual.getJobGraph(); assertEquals(expectedJobGraph.getName(), actualJobGraph.getName()); assertEquals(expectedJobGraph.getJobID(), actualJobGraph.getJobID()); } }
add data for type: `TINYINT`, `SMALLINT` and `FLOAT`. Also please cover the slow path of some special handled data type, like `long`, `double`, `float`
public void testSerDe() throws Exception { long id = 1238123899121L; String name = "asdlkjasjkdla998y1122"; byte[] bytes = new byte[1024]; ThreadLocalRandom.current().nextBytes(bytes); BigDecimal decimal = new BigDecimal("123.456789"); Double[] doubles = new Double[]{1.1, 2.2, 3.3}; LocalDate date = LocalDate.parse("1990-10-14"); LocalTime time = LocalTime.parse("12:12:43"); Timestamp timestamp3 = Timestamp.valueOf("1990-10-14 12:12:43.123"); Timestamp timestamp9 = Timestamp.valueOf("1990-10-14 12:12:43.123456789"); Map<String, Long> map = new HashMap<>(); map.put("flink", 123L); Map<String, Map<String, Integer>> nestedMap = new HashMap<>(); Map<String, Integer> innerMap = new HashMap<>(); innerMap.put("key", 234); nestedMap.put("inner_map", innerMap); ObjectMapper objectMapper = new ObjectMapper(); ArrayNode doubleNode = objectMapper.createArrayNode().add(1.1D).add(2.2D).add(3.3D); ObjectNode root = objectMapper.createObjectNode(); root.put("bool", true); root.put("id", id); root.put("name", name); root.put("bytes", bytes); root.put("decimal", decimal); root.set("doubles", doubleNode); root.put("date", "1990-10-14"); root.put("time", "12:12:43Z"); root.put("timestamp3", "1990-10-14T12:12:43.123Z"); root.put("timestamp9", "1990-10-14T12:12:43.123456789Z"); root.putObject("map").put("flink", 123); root.putObject("map2map").putObject("inner_map").put("key", 234); byte[] serializedJson = objectMapper.writeValueAsBytes(root); DataType dataType = ROW( FIELD("bool", BOOLEAN()), FIELD("id", BIGINT()), FIELD("name", STRING()), FIELD("bytes", BYTES()), FIELD("decimal", DECIMAL(9, 6)), FIELD("doubles", ARRAY(DOUBLE())), FIELD("date", DATE()), FIELD("time", TIME(0)), FIELD("timestamp3", TIMESTAMP(3)), FIELD("timestamp9", TIMESTAMP(9)), FIELD("map", MAP(STRING(), BIGINT())), FIELD("map2map", MAP(STRING(), MAP(STRING(), INT())))); RowType schema = (RowType) dataType.getLogicalType(); RowDataTypeInfo resultTypeInfo = new RowDataTypeInfo(schema); JsonRowDataDeserializationSchema deserializationSchema = JsonRowDataDeserializationSchema.builder() .schema(schema) .resultTypeInfo(resultTypeInfo) .build(); Row expected = new Row(12); expected.setField(0, true); expected.setField(1, id); expected.setField(2, name); expected.setField(3, bytes); expected.setField(4, decimal); expected.setField(5, doubles); expected.setField(6, date); expected.setField(7, time); expected.setField(8, timestamp3.toLocalDateTime()); expected.setField(9, timestamp9.toLocalDateTime()); expected.setField(10, map); expected.setField(11, nestedMap); RowData rowData = deserializationSchema.deserialize(serializedJson); Row actual = convertToExternal(rowData, dataType); assertEquals(expected, actual); JsonRowDataSerializationSchema serializationSchema = JsonRowDataSerializationSchema.builder() .schema(schema) .build(); byte[] actualBytes = serializationSchema.serialize(rowData); assertEquals(new String(serializedJson), new String(actualBytes)); }
long id = 1238123899121L;
public void testSerDe() throws Exception { byte tinyint = 'c'; short smallint = 128; int intValue = 45536; float floatValue = 33.333F; long bigint = 1238123899121L; String name = "asdlkjasjkdla998y1122"; byte[] bytes = new byte[1024]; ThreadLocalRandom.current().nextBytes(bytes); BigDecimal decimal = new BigDecimal("123.456789"); Double[] doubles = new Double[]{1.1, 2.2, 3.3}; LocalDate date = LocalDate.parse("1990-10-14"); LocalTime time = LocalTime.parse("12:12:43"); Timestamp timestamp3 = Timestamp.valueOf("1990-10-14 12:12:43.123"); Timestamp timestamp9 = Timestamp.valueOf("1990-10-14 12:12:43.123456789"); Map<String, Long> map = new HashMap<>(); map.put("flink", 123L); Map<String, Map<String, Integer>> nestedMap = new HashMap<>(); Map<String, Integer> innerMap = new HashMap<>(); innerMap.put("key", 234); nestedMap.put("inner_map", innerMap); ObjectMapper objectMapper = new ObjectMapper(); ArrayNode doubleNode = objectMapper.createArrayNode().add(1.1D).add(2.2D).add(3.3D); ObjectNode root = objectMapper.createObjectNode(); root.put("bool", true); root.put("tinyint", tinyint); root.put("smallint", smallint); root.put("int", intValue); root.put("bigint", bigint); root.put("float", floatValue); root.put("name", name); root.put("bytes", bytes); root.put("decimal", decimal); root.set("doubles", doubleNode); root.put("date", "1990-10-14"); root.put("time", "12:12:43Z"); root.put("timestamp3", "1990-10-14T12:12:43.123Z"); root.put("timestamp9", "1990-10-14T12:12:43.123456789Z"); root.putObject("map").put("flink", 123); root.putObject("map2map").putObject("inner_map").put("key", 234); byte[] serializedJson = objectMapper.writeValueAsBytes(root); DataType dataType = ROW( FIELD("bool", BOOLEAN()), FIELD("tinyint", TINYINT()), FIELD("smallint", SMALLINT()), FIELD("int", INT()), FIELD("bigint", BIGINT()), FIELD("float", FLOAT()), FIELD("name", STRING()), FIELD("bytes", BYTES()), FIELD("decimal", DECIMAL(9, 6)), FIELD("doubles", ARRAY(DOUBLE())), FIELD("date", DATE()), FIELD("time", TIME(0)), FIELD("timestamp3", TIMESTAMP(3)), FIELD("timestamp9", TIMESTAMP(9)), FIELD("map", MAP(STRING(), BIGINT())), FIELD("map2map", MAP(STRING(), MAP(STRING(), INT())))); RowType schema = (RowType) dataType.getLogicalType(); RowDataTypeInfo resultTypeInfo = new RowDataTypeInfo(schema); JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema( schema, resultTypeInfo, false, false); Row expected = new Row(16); expected.setField(0, true); expected.setField(1, tinyint); expected.setField(2, smallint); expected.setField(3, intValue); expected.setField(4, bigint); expected.setField(5, floatValue); expected.setField(6, name); expected.setField(7, bytes); expected.setField(8, decimal); expected.setField(9, doubles); expected.setField(10, date); expected.setField(11, time); expected.setField(12, timestamp3.toLocalDateTime()); expected.setField(13, timestamp9.toLocalDateTime()); expected.setField(14, map); expected.setField(15, nestedMap); RowData rowData = deserializationSchema.deserialize(serializedJson); Row actual = convertToExternal(rowData, dataType); assertEquals(expected, actual); JsonRowDataSerializationSchema serializationSchema = new JsonRowDataSerializationSchema(schema); byte[] actualBytes = serializationSchema.serialize(rowData); assertEquals(new String(serializedJson), new String(actualBytes)); }
class JsonRowDataSerDeSchemaTest { @Rule public ExpectedException thrown = ExpectedException.none(); @Test @Test public void testSerDeMultiRows() throws Exception { RowType rowType = (RowType) ROW( FIELD("f1", INT()), FIELD("f2", BOOLEAN()), FIELD("f3", STRING()) ).getLogicalType(); JsonRowDataDeserializationSchema deserializationSchema = JsonRowDataDeserializationSchema.builder() .schema(rowType) .resultTypeInfo(new RowDataTypeInfo(rowType)) .build(); JsonRowDataSerializationSchema serializationSchema = JsonRowDataSerializationSchema.builder() .schema(rowType) .build(); ObjectMapper objectMapper = new ObjectMapper(); { ObjectNode root = objectMapper.createObjectNode(); root.put("f1", 1); root.put("f2", true); root.put("f3", "str"); byte[] serializedJson = objectMapper.writeValueAsBytes(root); RowData rowData = deserializationSchema.deserialize(serializedJson); byte[] actual = serializationSchema.serialize(rowData); assertEquals(new String(serializedJson), new String(actual)); } { ObjectNode root = objectMapper.createObjectNode(); root.put("f1", 10); root.put("f2", false); root.put("f3", "newStr"); byte[] serializedJson = objectMapper.writeValueAsBytes(root); RowData rowData = deserializationSchema.deserialize(serializedJson); byte[] actual = serializationSchema.serialize(rowData); assertEquals(new String(serializedJson), new String(actual)); } } @Test public void testSerDeMultiRowsWithNullValues() throws Exception { String[] jsons = new String[] { "{\"svt\":\"2020-02-24T12:58:09.209+0800\"}", "{\"svt\":\"2020-02-24T12:58:09.209+0800\", \"ops\":{\"id\":\"281708d0-4092-4c21-9233-931950b6eccf\"}, " + "\"ids\":[1, 2, 3]}", "{\"svt\":\"2020-02-24T12:58:09.209+0800\"}", }; String[] expected = new String[] { "{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"ops\":null,\"ids\":null}", "{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"ops\":{\"id\":\"281708d0-4092-4c21-9233-931950b6eccf\"}," + "\"ids\":[1,2,3]}", "{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"ops\":null,\"ids\":null}", }; RowType rowType = (RowType) ROW( FIELD("svt", STRING()), FIELD("ops", ROW(FIELD("id", STRING()))), FIELD("ids", ARRAY(INT())) ).getLogicalType(); JsonRowDataDeserializationSchema deserializationSchema = JsonRowDataDeserializationSchema.builder() .schema(rowType) .resultTypeInfo(new RowDataTypeInfo(rowType)) .build(); JsonRowDataSerializationSchema serializationSchema = JsonRowDataSerializationSchema.builder() .schema(rowType) .build(); for (int i = 0; i < jsons.length; i++) { String json = jsons[i]; RowData row = deserializationSchema.deserialize(json.getBytes()); String result = new String(serializationSchema.serialize(row)); assertEquals(expected[i], result); } } @Test public void testDeserializationMissingNode() throws Exception { ObjectMapper objectMapper = new ObjectMapper(); ObjectNode root = objectMapper.createObjectNode(); root.put("id", 123123123); byte[] serializedJson = objectMapper.writeValueAsBytes(root); DataType dataType = ROW(FIELD("name", STRING())); RowType schema = (RowType) dataType.getLogicalType(); JsonRowDataDeserializationSchema deserializationSchema = JsonRowDataDeserializationSchema.builder() .schema(schema) .resultTypeInfo(new RowDataTypeInfo(schema)) .build(); Row expected = new Row(1); Row actual = convertToExternal(deserializationSchema.deserialize(serializedJson), dataType); assertEquals(expected, actual); deserializationSchema = JsonRowDataDeserializationSchema.builder() .schema(schema) .resultTypeInfo(new RowDataTypeInfo(schema)) .failOnMissingField() .build(); thrown.expect(IOException.class); thrown.expectMessage("Failed to deserialize JSON '{\"id\":123123123}'"); deserializationSchema.deserialize(serializedJson); deserializationSchema = JsonRowDataDeserializationSchema.builder() .schema(schema) .resultTypeInfo(new RowDataTypeInfo(schema)) .ignoreParseErrors() .build(); actual = convertToExternal(deserializationSchema.deserialize(serializedJson), dataType); assertEquals(expected, actual); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("JSON format doesn't support failOnMissingField and ignoreParseErrors are both enabled"); JsonRowDataDeserializationSchema.builder() .schema(schema) .resultTypeInfo(new RowDataTypeInfo(schema)) .failOnMissingField() .ignoreParseErrors() .build(); } @Test public void testJsonParse() throws Exception { for (TestSpec spec : testData) { testIgnoreParseErrors(spec); if (spec.errorMessage != null) { testParseErrors(spec); } } } private void testIgnoreParseErrors(TestSpec spec) throws Exception { JsonRowDataDeserializationSchema ignoreErrorsSchema = JsonRowDataDeserializationSchema.builder() .schema(spec.rowType) .resultTypeInfo(new RowDataTypeInfo(spec.rowType)) .ignoreParseErrors() .build(); Row expected; if (spec.expected != null) { expected = spec.expected; } else { expected = new Row(1); } RowData rowData = ignoreErrorsSchema.deserialize(spec.json.getBytes()); Row actual = convertToExternal(rowData, fromLogicalToDataType(spec.rowType)); assertEquals("Test Ignore Parse Error: " + spec.json, expected, actual); } private void testParseErrors(TestSpec spec) throws Exception { JsonRowDataDeserializationSchema failingSchema = JsonRowDataDeserializationSchema.builder() .schema(spec.rowType) .resultTypeInfo(new RowDataTypeInfo(spec.rowType)) .build(); thrown.expectMessage(spec.errorMessage); failingSchema.deserialize(spec.json.getBytes()); } private static List<TestSpec> testData = Arrays.asList( TestSpec .json("{\"id\": \"trueA\"}") .rowType(ROW(FIELD("id", BOOLEAN()))) .expect(Row.of(false)), TestSpec .json("{\"id\": true}") .rowType(ROW(FIELD("id", BOOLEAN()))) .expect(Row.of(true)), TestSpec .json("{\"id\":\"abc\"}") .rowType(ROW(FIELD("id", INT()))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"abc\"}'"), TestSpec .json("{\"id\":112.013}") .rowType(ROW(FIELD("id", BIGINT()))) .expect(Row.of(112L)), TestSpec .json("{\"id\":\"long\"}") .rowType(ROW(FIELD("id", BIGINT()))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"long\"}'"), TestSpec .json("{\"id\":\"112.013.123\"}") .rowType(ROW(FIELD("id", FLOAT()))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"112.013.123\"}'"), TestSpec .json("{\"id\":\"112.013.123\"}") .rowType(ROW(FIELD("id", DOUBLE()))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"112.013.123\"}'"), TestSpec .json("{\"id\":\"18:00:243\"}") .rowType(ROW(FIELD("id", TIME()))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"18:00:243\"}'"), TestSpec .json("{\"id\":\"20191112\"}") .rowType(ROW(FIELD("id", DATE()))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"20191112\"}'"), TestSpec .json("{\"id\":\"2019-11-12 18:00:12\"}") .rowType(ROW(FIELD("id", TIMESTAMP(0)))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"2019-11-12 18:00:12\"}'"), TestSpec .json("{\"id\":\"abc\"}") .rowType(ROW(FIELD("id", DECIMAL(10, 3)))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"abc\"}'"), TestSpec .json("{\"row\":{\"id\":\"abc\"}}") .rowType(ROW(FIELD("row", ROW(FIELD("id", BOOLEAN()))))) .expect(Row.of(new Row(1))) .expectErrorMessage("Failed to deserialize JSON '{\"row\":{\"id\":\"abc\"}}'"), TestSpec .json("{\"array\":[123, \"abc\"]}") .rowType(ROW(FIELD("array", ARRAY(INT())))) .expect(Row.of((Object) new Integer[]{123, null})) .expectErrorMessage("Failed to deserialize JSON '{\"array\":[123, \"abc\"]}'"), TestSpec .json("{\"map\":{\"key1\":\"123\", \"key2\":\"abc\"}}") .rowType(ROW(FIELD("map", MAP(STRING(), INT())))) .expect(Row.of(createHashMap("key1", 123, "key2", null))) .expectErrorMessage("Failed to deserialize JSON '{\"map\":{\"key1\":\"123\", \"key2\":\"abc\"}}'") ); private static Map<String, Integer> createHashMap(String k1, Integer v1, String k2, Integer v2) { Map<String, Integer> map = new HashMap<>(); map.put(k1, v1); map.put(k2, v2); return map; } @SuppressWarnings("unchecked") private static Row convertToExternal(RowData rowData, DataType dataType) { return (Row) DataFormatConverters.getConverterForDataType(dataType).toExternal(rowData); } private static class TestSpec { private final String json; private RowType rowType; private Row expected; private String errorMessage; private TestSpec(String json) { this.json = json; } public static TestSpec json(String json) { return new TestSpec(json); } TestSpec expect(Row row) { this.expected = row; return this; } TestSpec rowType(DataType rowType) { this.rowType = (RowType) rowType.getLogicalType(); return this; } TestSpec expectErrorMessage(String errorMessage) { this.errorMessage = errorMessage; return this; } } }
class JsonRowDataSerDeSchemaTest { @Rule public ExpectedException thrown = ExpectedException.none(); @Test /** * Tests the deserialization slow path, * e.g. convert into string and use {@link Double */ @Test public void testSlowDeserialization() throws Exception { Random random = new Random(); boolean bool = random.nextBoolean(); int integer = random.nextInt(); long bigint = random.nextLong(); double doubleValue = random.nextDouble(); float floatValue = random.nextFloat(); ObjectMapper objectMapper = new ObjectMapper(); ObjectNode root = objectMapper.createObjectNode(); root.put("bool", String.valueOf(bool)); root.put("int", String.valueOf(integer)); root.put("bigint", String.valueOf(bigint)); root.put("double1", String.valueOf(doubleValue)); root.put("double2", new BigDecimal(doubleValue)); root.put("float1", String.valueOf(floatValue)); root.put("float2", new BigDecimal(floatValue)); byte[] serializedJson = objectMapper.writeValueAsBytes(root); DataType dataType = ROW( FIELD("bool", BOOLEAN()), FIELD("int", INT()), FIELD("bigint", BIGINT()), FIELD("double1", DOUBLE()), FIELD("double2", DOUBLE()), FIELD("float1", FLOAT()), FIELD("float2", FLOAT()) ); RowType rowType = (RowType) dataType.getLogicalType(); JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema( rowType, new RowDataTypeInfo(rowType), false, false); Row expected = new Row(7); expected.setField(0, bool); expected.setField(1, integer); expected.setField(2, bigint); expected.setField(3, doubleValue); expected.setField(4, doubleValue); expected.setField(5, floatValue); expected.setField(6, floatValue); RowData rowData = deserializationSchema.deserialize(serializedJson); Row actual = convertToExternal(rowData, dataType); assertEquals(expected, actual); } @Test public void testSerDeMultiRows() throws Exception { RowType rowType = (RowType) ROW( FIELD("f1", INT()), FIELD("f2", BOOLEAN()), FIELD("f3", STRING()) ).getLogicalType(); JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema( rowType, new RowDataTypeInfo(rowType), false, false); JsonRowDataSerializationSchema serializationSchema = new JsonRowDataSerializationSchema(rowType); ObjectMapper objectMapper = new ObjectMapper(); { ObjectNode root = objectMapper.createObjectNode(); root.put("f1", 1); root.put("f2", true); root.put("f3", "str"); byte[] serializedJson = objectMapper.writeValueAsBytes(root); RowData rowData = deserializationSchema.deserialize(serializedJson); byte[] actual = serializationSchema.serialize(rowData); assertEquals(new String(serializedJson), new String(actual)); } { ObjectNode root = objectMapper.createObjectNode(); root.put("f1", 10); root.put("f2", false); root.put("f3", "newStr"); byte[] serializedJson = objectMapper.writeValueAsBytes(root); RowData rowData = deserializationSchema.deserialize(serializedJson); byte[] actual = serializationSchema.serialize(rowData); assertEquals(new String(serializedJson), new String(actual)); } } @Test public void testSerDeMultiRowsWithNullValues() throws Exception { String[] jsons = new String[] { "{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"metrics\":{\"k1\":10.01,\"k2\":\"invalid\"}}", "{\"svt\":\"2020-02-24T12:58:09.209+0800\", \"ops\":{\"id\":\"281708d0-4092-4c21-9233-931950b6eccf\"}, " + "\"ids\":[1, 2, 3]}", "{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"metrics\":{}}", }; String[] expected = new String[] { "{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"ops\":null,\"ids\":null,\"metrics\":{\"k1\":10.01,\"k2\":null}}", "{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"ops\":{\"id\":\"281708d0-4092-4c21-9233-931950b6eccf\"}," + "\"ids\":[1,2,3],\"metrics\":null}", "{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"ops\":null,\"ids\":null,\"metrics\":{}}", }; RowType rowType = (RowType) ROW( FIELD("svt", STRING()), FIELD("ops", ROW(FIELD("id", STRING()))), FIELD("ids", ARRAY(INT())), FIELD("metrics", MAP(STRING(), DOUBLE())) ).getLogicalType(); JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema( rowType, new RowDataTypeInfo(rowType), false, true); JsonRowDataSerializationSchema serializationSchema = new JsonRowDataSerializationSchema(rowType); for (int i = 0; i < jsons.length; i++) { String json = jsons[i]; RowData row = deserializationSchema.deserialize(json.getBytes()); String result = new String(serializationSchema.serialize(row)); assertEquals(expected[i], result); } } @Test public void testDeserializationMissingNode() throws Exception { ObjectMapper objectMapper = new ObjectMapper(); ObjectNode root = objectMapper.createObjectNode(); root.put("id", 123123123); byte[] serializedJson = objectMapper.writeValueAsBytes(root); DataType dataType = ROW(FIELD("name", STRING())); RowType schema = (RowType) dataType.getLogicalType(); JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema( schema, new RowDataTypeInfo(schema), false, false); Row expected = new Row(1); Row actual = convertToExternal(deserializationSchema.deserialize(serializedJson), dataType); assertEquals(expected, actual); deserializationSchema = deserializationSchema = new JsonRowDataDeserializationSchema( schema, new RowDataTypeInfo(schema), true, false); thrown.expect(IOException.class); thrown.expectMessage("Failed to deserialize JSON '{\"id\":123123123}'"); deserializationSchema.deserialize(serializedJson); deserializationSchema = new JsonRowDataDeserializationSchema( schema, new RowDataTypeInfo(schema), false, true); actual = convertToExternal(deserializationSchema.deserialize(serializedJson), dataType); assertEquals(expected, actual); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("JSON format doesn't support failOnMissingField and ignoreParseErrors are both enabled"); new JsonRowDataDeserializationSchema( schema, new RowDataTypeInfo(schema), true, true); } @Test public void testJsonParse() throws Exception { for (TestSpec spec : testData) { testIgnoreParseErrors(spec); if (spec.errorMessage != null) { testParseErrors(spec); } } } private void testIgnoreParseErrors(TestSpec spec) throws Exception { JsonRowDataDeserializationSchema ignoreErrorsSchema = new JsonRowDataDeserializationSchema( spec.rowType, new RowDataTypeInfo(spec.rowType), false, true); Row expected; if (spec.expected != null) { expected = spec.expected; } else { expected = new Row(1); } RowData rowData = ignoreErrorsSchema.deserialize(spec.json.getBytes()); Row actual = convertToExternal(rowData, fromLogicalToDataType(spec.rowType)); assertEquals("Test Ignore Parse Error: " + spec.json, expected, actual); } private void testParseErrors(TestSpec spec) throws Exception { JsonRowDataDeserializationSchema failingSchema = new JsonRowDataDeserializationSchema( spec.rowType, new RowDataTypeInfo(spec.rowType), false, false); thrown.expectMessage(spec.errorMessage); failingSchema.deserialize(spec.json.getBytes()); } private static List<TestSpec> testData = Arrays.asList( TestSpec .json("{\"id\": \"trueA\"}") .rowType(ROW(FIELD("id", BOOLEAN()))) .expect(Row.of(false)), TestSpec .json("{\"id\": true}") .rowType(ROW(FIELD("id", BOOLEAN()))) .expect(Row.of(true)), TestSpec .json("{\"id\":\"abc\"}") .rowType(ROW(FIELD("id", INT()))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"abc\"}'"), TestSpec .json("{\"id\":112.013}") .rowType(ROW(FIELD("id", BIGINT()))) .expect(Row.of(112L)), TestSpec .json("{\"id\":\"long\"}") .rowType(ROW(FIELD("id", BIGINT()))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"long\"}'"), TestSpec .json("{\"id\":\"112.013.123\"}") .rowType(ROW(FIELD("id", FLOAT()))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"112.013.123\"}'"), TestSpec .json("{\"id\":\"112.013.123\"}") .rowType(ROW(FIELD("id", DOUBLE()))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"112.013.123\"}'"), TestSpec .json("{\"id\":\"18:00:243\"}") .rowType(ROW(FIELD("id", TIME()))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"18:00:243\"}'"), TestSpec .json("{\"id\":\"20191112\"}") .rowType(ROW(FIELD("id", DATE()))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"20191112\"}'"), TestSpec .json("{\"id\":\"2019-11-12 18:00:12\"}") .rowType(ROW(FIELD("id", TIMESTAMP(0)))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"2019-11-12 18:00:12\"}'"), TestSpec .json("{\"id\":\"abc\"}") .rowType(ROW(FIELD("id", DECIMAL(10, 3)))) .expectErrorMessage("Failed to deserialize JSON '{\"id\":\"abc\"}'"), TestSpec .json("{\"row\":{\"id\":\"abc\"}}") .rowType(ROW(FIELD("row", ROW(FIELD("id", BOOLEAN()))))) .expect(Row.of(new Row(1))) .expectErrorMessage("Failed to deserialize JSON '{\"row\":{\"id\":\"abc\"}}'"), TestSpec .json("{\"array\":[123, \"abc\"]}") .rowType(ROW(FIELD("array", ARRAY(INT())))) .expect(Row.of((Object) new Integer[]{123, null})) .expectErrorMessage("Failed to deserialize JSON '{\"array\":[123, \"abc\"]}'"), TestSpec .json("{\"map\":{\"key1\":\"123\", \"key2\":\"abc\"}}") .rowType(ROW(FIELD("map", MAP(STRING(), INT())))) .expect(Row.of(createHashMap("key1", 123, "key2", null))) .expectErrorMessage("Failed to deserialize JSON '{\"map\":{\"key1\":\"123\", \"key2\":\"abc\"}}'") ); private static Map<String, Integer> createHashMap(String k1, Integer v1, String k2, Integer v2) { Map<String, Integer> map = new HashMap<>(); map.put(k1, v1); map.put(k2, v2); return map; } @SuppressWarnings("unchecked") private static Row convertToExternal(RowData rowData, DataType dataType) { return (Row) DataFormatConverters.getConverterForDataType(dataType).toExternal(rowData); } private static class TestSpec { private final String json; private RowType rowType; private Row expected; private String errorMessage; private TestSpec(String json) { this.json = json; } public static TestSpec json(String json) { return new TestSpec(json); } TestSpec expect(Row row) { this.expected = row; return this; } TestSpec rowType(DataType rowType) { this.rowType = (RowType) rowType.getLogicalType(); return this; } TestSpec expectErrorMessage(String errorMessage) { this.errorMessage = errorMessage; return this; } } }
Can't/shouldn't we call `analyzeNode` instead of directly calling `accept`?
public void visit(BLangMappingMatchPattern mappingMatchPattern) { BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + recordCount++), env.enclPkg.symbol.pkgID, null, env.scope.owner, mappingMatchPattern.pos, VIRTUAL); LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); for (BLangFieldMatchPattern fieldMatchPattern : mappingMatchPattern.fieldMatchPatterns) { fieldMatchPattern.accept(this); String fieldName = fieldMatchPattern.fieldName.value; BVarSymbol fieldSymbol = new BVarSymbol(0, names.fromString(fieldName), env.enclPkg.symbol.pkgID, fieldMatchPattern.matchPattern.type, recordSymbol, fieldMatchPattern.pos, COMPILED_SOURCE); BField field = new BField(names.fromString(fieldName), fieldMatchPattern.pos, fieldSymbol); fields.put(fieldName, field); mappingMatchPattern.declaredVars.putAll(fieldMatchPattern.declaredVars); } BRecordType recordVarType = new BRecordType(recordSymbol); recordVarType.fields = fields; recordVarType.restFieldType = symTable.anydataType; if (mappingMatchPattern.restMatchPattern != null) { BLangRestMatchPattern restMatchPattern = mappingMatchPattern.restMatchPattern; restMatchPattern.type = new BMapType(TypeTags.MAP, symTable.anydataType, null); restMatchPattern.accept(this); mappingMatchPattern.declaredVars.put(restMatchPattern.variableName.value, restMatchPattern.symbol); } mappingMatchPattern.type = types.resolvePatternTypeFromMatchExpr(mappingMatchPattern, recordVarType, env); assignTypesToMemberPatterns(mappingMatchPattern, mappingMatchPattern.type); }
fieldMatchPattern.accept(this);
public void visit(BLangMappingMatchPattern mappingMatchPattern) { EnumSet<Flag> flags = EnumSet.of(Flag.PUBLIC, Flag.ANONYMOUS); BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.asMask(flags), Names.EMPTY, env.enclPkg.packageID, null, env.scope.owner, mappingMatchPattern.pos, VIRTUAL); recordSymbol.name = names.fromString(anonModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID)); LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); for (BLangFieldMatchPattern fieldMatchPattern : mappingMatchPattern.fieldMatchPatterns) { analyzeNode(fieldMatchPattern, env); String fieldName = fieldMatchPattern.fieldName.value; BVarSymbol fieldSymbol = new BVarSymbol(0, names.fromString(fieldName), env.enclPkg.symbol.pkgID, fieldMatchPattern.matchPattern.type, recordSymbol, fieldMatchPattern.pos, COMPILED_SOURCE); BField field = new BField(names.fromString(fieldName), fieldMatchPattern.pos, fieldSymbol); fields.put(fieldName, field); mappingMatchPattern.declaredVars.putAll(fieldMatchPattern.declaredVars); } BRecordType recordVarType = new BRecordType(recordSymbol); recordVarType.fields = fields; recordVarType.restFieldType = symTable.anydataType; if (mappingMatchPattern.restMatchPattern != null) { BLangRestMatchPattern restMatchPattern = mappingMatchPattern.restMatchPattern; restMatchPattern.type = new BMapType(TypeTags.MAP, symTable.anydataType, null); analyzeNode(restMatchPattern, env); mappingMatchPattern.declaredVars.put(restMatchPattern.variableName.value, restMatchPattern.symbol); } mappingMatchPattern.type = types.resolvePatternTypeFromMatchExpr(mappingMatchPattern, recordVarType, env); assignTypesToMemberPatterns(mappingMatchPattern, mappingMatchPattern.type); }
class SemanticAnalyzer extends BLangNodeVisitor { private static final CompilerContext.Key<SemanticAnalyzer> SYMBOL_ANALYZER_KEY = new CompilerContext.Key<>(); private static final String ANONYMOUS_RECORD_NAME = "anonymous-record"; private static final String NULL_LITERAL = "null"; private static final String LEFT_BRACE = "{"; private static final String RIGHT_BRACE = "}"; private static final String SPACE = " "; public static final String COLON = ":"; private static final String LISTENER_TYPE_NAME = "lang.object:Listener"; private static final String LISTENER_NAME = "listener"; private SymbolTable symTable; private SymbolEnter symbolEnter; private Names names; private SymbolResolver symResolver; private TypeChecker typeChecker; private Types types; private BLangDiagnosticLog dlog; private TypeNarrower typeNarrower; private ConstantAnalyzer constantAnalyzer; private ConstantValueResolver constantValueResolver; private SymbolEnv env; private BType expType; private DiagnosticCode diagCode; private BType resType; private Map<BVarSymbol, BType.NarrowedTypes> narrowedTypeInfo; private Stack<SymbolEnv> prevEnvs = new Stack<>(); private int recordCount = 0; public static SemanticAnalyzer getInstance(CompilerContext context) { SemanticAnalyzer semAnalyzer = context.get(SYMBOL_ANALYZER_KEY); if (semAnalyzer == null) { semAnalyzer = new SemanticAnalyzer(context); } return semAnalyzer; } public SemanticAnalyzer(CompilerContext context) { context.put(SYMBOL_ANALYZER_KEY, this); this.symTable = SymbolTable.getInstance(context); this.symbolEnter = SymbolEnter.getInstance(context); this.names = Names.getInstance(context); this.symResolver = SymbolResolver.getInstance(context); this.typeChecker = TypeChecker.getInstance(context); this.types = Types.getInstance(context); this.dlog = BLangDiagnosticLog.getInstance(context); this.typeNarrower = TypeNarrower.getInstance(context); this.constantAnalyzer = ConstantAnalyzer.getInstance(context); this.constantValueResolver = ConstantValueResolver.getInstance(context); } public BLangPackage analyze(BLangPackage pkgNode) { this.dlog.setCurrentPackageId(pkgNode.packageID); pkgNode.accept(this); return pkgNode; } public void visit(BLangPackage pkgNode) { if (pkgNode.completedPhases.contains(CompilerPhase.TYPE_CHECK)) { return; } SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol); pkgNode.topLevelNodes.stream().filter(pkgLevelNode -> pkgLevelNode.getKind() == NodeKind.CONSTANT) .forEach(constant -> analyzeDef((BLangNode) constant, pkgEnv)); this.constantValueResolver.resolve(pkgNode.constants, pkgNode.packageID); for (int i = 0; i < pkgNode.topLevelNodes.size(); i++) { TopLevelNode pkgLevelNode = pkgNode.topLevelNodes.get(i); NodeKind kind = pkgLevelNode.getKind(); if (kind == NodeKind.CONSTANT || ((kind == NodeKind.FUNCTION && ((BLangFunction) pkgLevelNode).flagSet.contains(Flag.LAMBDA)))) { continue; } analyzeDef((BLangNode) pkgLevelNode, pkgEnv); } while (pkgNode.lambdaFunctions.peek() != null) { BLangLambdaFunction lambdaFunction = pkgNode.lambdaFunctions.poll(); BLangFunction function = lambdaFunction.function; lambdaFunction.type = function.symbol.type; analyzeDef(lambdaFunction.function, lambdaFunction.capturedClosureEnv); } pkgNode.getTestablePkgs().forEach(testablePackage -> visit((BLangPackage) testablePackage)); pkgNode.completedPhases.add(CompilerPhase.TYPE_CHECK); } public void visit(BLangXMLNS xmlnsNode) { xmlnsNode.type = symTable.stringType; if (xmlnsNode.symbol == null) { symbolEnter.defineNode(xmlnsNode, env); } typeChecker.checkExpr(xmlnsNode.namespaceURI, env, symTable.stringType); } public void visit(BLangXMLNSStatement xmlnsStmtNode) { analyzeNode(xmlnsStmtNode.xmlnsDecl, env); } public void visit(BLangFunction funcNode) { SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env); funcNode.symbol.params.forEach(param -> param.flags |= Flags.FUNCTION_FINAL); if (!funcNode.flagSet.contains(Flag.WORKER)) { funcNode.annAttachments.forEach(annotationAttachment -> { if (Symbols.isFlagOn(funcNode.symbol.flags, Flags.RESOURCE)) { annotationAttachment.attachPoints.add(AttachPoint.Point.RESOURCE); } else if (funcNode.attachedFunction) { annotationAttachment.attachPoints.add(AttachPoint.Point.OBJECT_METHOD); } annotationAttachment.attachPoints.add(AttachPoint.Point.FUNCTION); this.analyzeDef(annotationAttachment, funcEnv); }); validateAnnotationAttachmentCount(funcNode.annAttachments); } if (funcNode.returnTypeNode != null) { funcNode.returnTypeAnnAttachments.forEach(annotationAttachment -> { annotationAttachment.attachPoints.add(AttachPoint.Point.RETURN); this.analyzeDef(annotationAttachment, funcEnv); }); validateAnnotationAttachmentCount(funcNode.returnTypeAnnAttachments); } boolean inIsolatedFunction = funcNode.flagSet.contains(Flag.ISOLATED); for (BLangSimpleVariable param : funcNode.requiredParams) { symbolEnter.defineExistingVarSymbolInEnv(param.symbol, funcNode.clonedEnv); this.analyzeDef(param, funcNode.clonedEnv); if (param.expr != null) { funcNode.symbol.paramDefaultValTypes.put(param.symbol.name.value, param.expr.type); ((BInvokableTypeSymbol) funcNode.type.tsymbol).paramDefaultValTypes.put(param.symbol.name.value, param.expr.type); } validateIsolatedParamUsage(inIsolatedFunction, param, false); } BLangSimpleVariable restParam = funcNode.restParam; if (restParam != null) { symbolEnter.defineExistingVarSymbolInEnv(restParam.symbol, funcNode.clonedEnv); this.analyzeDef(restParam, funcNode.clonedEnv); validateIsolatedParamUsage(inIsolatedFunction, restParam, true); } validateObjectAttachedFunction(funcNode); if (funcNode.hasBody()) { analyzeNode(funcNode.body, funcEnv, funcNode.returnTypeNode.type, null); } if (funcNode.anonForkName != null) { funcNode.symbol.enclForkName = funcNode.anonForkName; } funcNode.symbol.annAttachments.addAll(funcNode.annAttachments); this.processWorkers(funcNode, funcEnv); } private void processWorkers(BLangInvokableNode invNode, SymbolEnv invEnv) { if (invNode.workers.size() > 0) { invEnv.scope.entries.putAll(invNode.body.scope.entries); for (BLangWorker worker : invNode.workers) { this.symbolEnter.defineNode(worker, invEnv); } for (BLangWorker e : invNode.workers) { analyzeNode(e, invEnv); } } } @Override public void visit(BLangBlockFunctionBody body) { env = SymbolEnv.createFuncBodyEnv(body, env); for (BLangStatement stmt : body.stmts) { analyzeStmt(stmt, env); } } @Override public void visit(BLangExprFunctionBody body) { env = SymbolEnv.createFuncBodyEnv(body, env); typeChecker.checkExpr(body.expr, env, expType); } @Override public void visit(BLangExternalFunctionBody body) { for (BLangAnnotationAttachment annotationAttachment : body.annAttachments) { annotationAttachment.attachPoints.add(AttachPoint.Point.EXTERNAL); this.analyzeDef(annotationAttachment, env); } validateAnnotationAttachmentCount(body.annAttachments); } @Override public void visit(BLangTypeDefinition typeDefinition) { if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE || typeDefinition.typeNode.getKind() == NodeKind.RECORD_TYPE || typeDefinition.typeNode.getKind() == NodeKind.ERROR_TYPE || typeDefinition.typeNode.getKind() == NodeKind.FINITE_TYPE_NODE) { analyzeDef(typeDefinition.typeNode, env); } typeDefinition.annAttachments.forEach(annotationAttachment -> { if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE) { annotationAttachment.attachPoints.add(AttachPoint.Point.OBJECT); } annotationAttachment.attachPoints.add(AttachPoint.Point.TYPE); annotationAttachment.accept(this); }); validateAnnotationAttachmentCount(typeDefinition.annAttachments); validateBuiltinTypeAnnotationAttachment(typeDefinition.annAttachments); } @Override public void visit(BLangClassDefinition classDefinition) { classDefinition.annAttachments.forEach(annotationAttachment -> { annotationAttachment.attachPoints.add(AttachPoint.Point.CLASS); annotationAttachment.accept(this); }); validateAnnotationAttachmentCount(classDefinition.annAttachments); analyzeClassDefinition(classDefinition); } private void analyzeClassDefinition(BLangClassDefinition classDefinition) { SymbolEnv classEnv = SymbolEnv.createClassEnv(classDefinition, classDefinition.symbol.scope, env); for (BLangSimpleVariable field : classDefinition.fields) { analyzeDef(field, classEnv); } for (BLangFunction function : classDefinition.functions) { analyzeDef(function, env); if (function.flagSet.contains(Flag.RESOURCE) && function.flagSet.contains(Flag.NATIVE)) { this.dlog.error(function.pos, DiagnosticCode.RESOURCE_FUNCTION_CANNOT_BE_EXTERN, function.name); } } for (BAttachedFunction func : ((BObjectTypeSymbol) classDefinition.symbol).referencedFunctions) { validateReferencedFunction(classDefinition.pos, func, env); } analyzerClassInitMethod(classDefinition); } private void analyzerClassInitMethod(BLangClassDefinition classDefinition) { if (classDefinition.initFunction == null) { return; } if (classDefinition.initFunction.flagSet.contains(Flag.PRIVATE)) { this.dlog.error(classDefinition.initFunction.pos, DiagnosticCode.PRIVATE_OBJECT_CONSTRUCTOR, classDefinition.symbol.name); return; } if (classDefinition.initFunction.flagSet.contains(Flag.NATIVE)) { this.dlog.error(classDefinition.initFunction.pos, DiagnosticCode.OBJECT_INIT_FUNCTION_CANNOT_BE_EXTERN, classDefinition.symbol.name); return; } analyzeDef(classDefinition.initFunction, env); } public void visit(BLangTypeConversionExpr conversionExpr) { conversionExpr.annAttachments.forEach(annotationAttachment -> { annotationAttachment.attachPoints.add(AttachPoint.Point.TYPE); if (conversionExpr.typeNode.getKind() == NodeKind.OBJECT_TYPE) { annotationAttachment.attachPoints.add(AttachPoint.Point.OBJECT); } annotationAttachment.accept(this); }); validateAnnotationAttachmentCount(conversionExpr.annAttachments); } @Override public void visit(BLangFiniteTypeNode finiteTypeNode) { finiteTypeNode.valueSpace.forEach(val -> { if (val.type.tag == TypeTags.NIL && NULL_LITERAL.equals(((BLangLiteral) val).originalValue)) { dlog.error(val.pos, DiagnosticCode.INVALID_USE_OF_NULL_LITERAL); } }); } @Override public void visit(BLangObjectTypeNode objectTypeNode) { SymbolEnv objectEnv = SymbolEnv.createTypeEnv(objectTypeNode, objectTypeNode.symbol.scope, env); objectTypeNode.fields.forEach(field -> { analyzeDef(field, objectEnv); if (field.flagSet.contains(Flag.PRIVATE)) { this.dlog.error(field.pos, DiagnosticCode.PRIVATE_FIELD_ABSTRACT_OBJECT, field.symbol.name); } }); objectTypeNode.functions.forEach(func -> { analyzeDef(func, env); if (func.flagSet.contains(Flag.PRIVATE)) { this.dlog.error(func.pos, DiagnosticCode.PRIVATE_FUNC_ABSTRACT_OBJECT, func.name, objectTypeNode.symbol.name); } if (func.flagSet.contains(Flag.NATIVE)) { this.dlog.error(func.pos, DiagnosticCode.EXTERN_FUNC_ABSTRACT_OBJECT, func.name, objectTypeNode.symbol.name); } if (func.flagSet.contains(Flag.RESOURCE) && func.flagSet.contains(Flag.NATIVE)) { this.dlog.error(func.pos, DiagnosticCode.RESOURCE_FUNCTION_CANNOT_BE_EXTERN, func.name); } }); ((BObjectTypeSymbol) objectTypeNode.symbol).referencedFunctions .forEach(func -> validateReferencedFunction(objectTypeNode.pos, func, env)); if (objectTypeNode.initFunction == null) { return; } if (objectTypeNode.initFunction.flagSet.contains(Flag.PRIVATE)) { this.dlog.error(objectTypeNode.initFunction.pos, DiagnosticCode.PRIVATE_OBJECT_CONSTRUCTOR, objectTypeNode.symbol.name); return; } this.dlog.error(objectTypeNode.initFunction.pos, DiagnosticCode.ABSTRACT_OBJECT_CONSTRUCTOR, objectTypeNode.symbol.name); } @Override public void visit(BLangRecordTypeNode recordTypeNode) { SymbolEnv recordEnv = SymbolEnv.createTypeEnv(recordTypeNode, recordTypeNode.symbol.scope, env); recordTypeNode.fields.forEach(field -> analyzeDef(field, recordEnv)); validateOptionalNeverTypedField(recordTypeNode); validateDefaultable(recordTypeNode); recordTypeNode.analyzed = true; } @Override public void visit(BLangErrorType errorType) { if (errorType.detailType == null) { return; } BType detailType = errorType.detailType.type; if (!types.isValidErrorDetailType(detailType)) { dlog.error(errorType.detailType.pos, DiagnosticCode.INVALID_ERROR_DETAIL_TYPE, errorType.detailType, symTable.detailType); } } public void visit(BLangAnnotation annotationNode) { annotationNode.annAttachments.forEach(annotationAttachment -> { annotationAttachment.attachPoints.add(AttachPoint.Point.ANNOTATION); annotationAttachment.accept(this); }); validateAnnotationAttachmentCount(annotationNode.annAttachments); } public void visit(BLangAnnotationAttachment annAttachmentNode) { BSymbol symbol = this.symResolver.resolveAnnotation(annAttachmentNode.pos, env, names.fromString(annAttachmentNode.pkgAlias.getValue()), names.fromString(annAttachmentNode.getAnnotationName().getValue())); if (symbol == this.symTable.notFoundSymbol) { this.dlog.error(annAttachmentNode.pos, DiagnosticCode.UNDEFINED_ANNOTATION, annAttachmentNode.getAnnotationName().getValue()); return; } BAnnotationSymbol annotationSymbol = (BAnnotationSymbol) symbol; annAttachmentNode.annotationSymbol = annotationSymbol; if (annotationSymbol.maskedPoints > 0 && !Symbols.isAttachPointPresent(annotationSymbol.maskedPoints, AttachPoints.asMask(annAttachmentNode.attachPoints))) { String msg = annAttachmentNode.attachPoints.stream() .map(point -> point.name().toLowerCase()) .collect(Collectors.joining(", ")); this.dlog.error(annAttachmentNode.pos, DiagnosticCode.ANNOTATION_NOT_ALLOWED, annotationSymbol, msg); } validateAnnotationAttachmentExpr(annAttachmentNode, annotationSymbol); } public void visit(BLangSimpleVariable varNode) { if (varNode.isDeclaredWithVar) { validateWorkerAnnAttachments(varNode.expr); handleDeclaredWithVar(varNode); transferForkFlag(varNode); return; } if (shouldInferErrorType(varNode)) { validateWorkerAnnAttachments(varNode.expr); handleDeclaredWithVar(varNode); transferForkFlag(varNode); if (!types.isAssignable(varNode.type, symTable.errorType)) { dlog.error(varNode.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.errorType, varNode.type); } return; } int ownerSymTag = env.scope.owner.tag; if ((ownerSymTag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (ownerSymTag & SymTag.LET) == SymTag.LET) { if (varNode.symbol == null) { analyzeVarNode(varNode, env, AttachPoint.Point.VAR); } else { analyzeVarNode(varNode, env, AttachPoint.Point.PARAMETER); } } else if ((ownerSymTag & SymTag.OBJECT) == SymTag.OBJECT) { analyzeVarNode(varNode, env, AttachPoint.Point.OBJECT_FIELD, AttachPoint.Point.FIELD); } else if ((ownerSymTag & SymTag.RECORD) == SymTag.RECORD) { analyzeVarNode(varNode, env, AttachPoint.Point.RECORD_FIELD, AttachPoint.Point.FIELD); } else { varNode.annAttachments.forEach(annotationAttachment -> { if (Symbols.isFlagOn(varNode.symbol.flags, Flags.LISTENER)) { annotationAttachment.attachPoints.add(AttachPoint.Point.LISTENER); } else if (Symbols.isFlagOn(varNode.symbol.flags, Flags.SERVICE)) { annotationAttachment.attachPoints.add(AttachPoint.Point.SERVICE); } else { annotationAttachment.attachPoints.add(AttachPoint.Point.VAR); } annotationAttachment.accept(this); }); } validateAnnotationAttachmentCount(varNode.annAttachments); validateWorkerAnnAttachments(varNode.expr); if (isIgnoredOrEmpty(varNode)) { varNode.symbol = new BVarSymbol(0, Names.IGNORE, env.enclPkg.packageID, symTable.anyType, env.scope.owner, varNode.pos, VIRTUAL); } BType lhsType = varNode.symbol.type; varNode.type = lhsType; BLangExpression rhsExpr = varNode.expr; if (rhsExpr == null) { if (lhsType.tag == TypeTags.ARRAY && typeChecker.isArrayOpenSealedType((BArrayType) lhsType)) { dlog.error(varNode.pos, DiagnosticCode.SEALED_ARRAY_TYPE_NOT_INITIALIZED); } return; } SymbolEnv varInitEnv = SymbolEnv.createVarInitEnv(varNode, env, varNode.symbol); typeChecker.checkExpr(rhsExpr, varInitEnv, lhsType); if (Symbols.isFlagOn(varNode.symbol.flags, Flags.LISTENER) && !types.checkListenerCompatibility(varNode.symbol.type)) { dlog.error(varNode.pos, DiagnosticCode.INVALID_LISTENER_VARIABLE, varNode.name); } transferForkFlag(varNode); } private boolean shouldInferErrorType(BLangSimpleVariable varNode) { return varNode.typeNode != null && varNode.typeNode.getKind() == NodeKind.ERROR_TYPE && ((BLangErrorType) varNode.typeNode).inferErrorType; } private void analyzeVarNode(BLangSimpleVariable varNode, SymbolEnv env, AttachPoint.Point... attachPoints) { if (varNode.symbol == null) { symbolEnter.defineNode(varNode, env); } if (varNode.typeNode != null && varNode.typeNode.getKind() == NodeKind.RECORD_TYPE && !((BLangRecordTypeNode) varNode.typeNode).analyzed) { analyzeDef(varNode.typeNode, env); } List<AttachPoint.Point> attachPointsList = Arrays.asList(attachPoints); for (BLangAnnotationAttachment annotationAttachment : varNode.annAttachments) { annotationAttachment.attachPoints.addAll(attachPointsList); annotationAttachment.accept(this); } } private void transferForkFlag(BLangSimpleVariable varNode) { if (varNode.expr != null && varNode.expr.getKind() == NodeKind.INVOCATION && varNode.flagSet.contains(Flag.WORKER)) { BLangInvocation expr = (BLangInvocation) varNode.expr; if (expr.name.value.startsWith("0") && (expr.symbol.flags & Flags.FORKED) == Flags.FORKED) { varNode.symbol.flags |= Flags.FORKED; } } } /** * Validate annotation attachment of the `start` action or workers. * * @param expr expression to be validated. */ private void validateWorkerAnnAttachments(BLangExpression expr) { if (expr != null && expr instanceof BLangInvocation.BLangActionInvocation && ((BLangInvocation.BLangActionInvocation) expr).async) { ((BLangInvocation) expr).annAttachments.forEach(annotationAttachment -> { annotationAttachment.attachPoints.add(AttachPoint.Point.WORKER); annotationAttachment.accept(this); }); validateAnnotationAttachmentCount(((BLangInvocation) expr).annAttachments); } } public void visit(BLangRecordVariable varNode) { if (varNode.isDeclaredWithVar) { handleDeclaredWithVar(varNode); return; } if (varNode.type == null) { varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env); } if (!validateRecordVariable(varNode)) { varNode.type = symTable.semanticError; return; } symbolEnter.defineNode(varNode, env); if (varNode.expr == null) { return; } typeChecker.checkExpr(varNode.expr, env, varNode.type); } public void visit(BLangTupleVariable varNode) { if (varNode.isDeclaredWithVar) { expType = resolveTupleType(varNode); handleDeclaredWithVar(varNode); return; } if (varNode.type == null) { varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env); } if (!(checkTypeAndVarCountConsistency(varNode))) { varNode.type = symTable.semanticError; return; } symbolEnter.defineNode(varNode, env); if (varNode.expr == null) { return; } typeChecker.checkExpr(varNode.expr, env, varNode.type); } private BType resolveTupleType(BLangTupleVariable varNode) { List<BType> memberTypes = new ArrayList<>(varNode.memberVariables.size()); for (BLangVariable memberVariable : varNode.memberVariables) { if (memberVariable.getKind() == NodeKind.TUPLE_VARIABLE) { memberTypes.add(resolveTupleType((BLangTupleVariable) memberVariable)); } else { memberTypes.add(symTable.noType); } } return new BTupleType(memberTypes); } public void visit(BLangErrorVariable varNode) { if (varNode.isDeclaredWithVar) { handleDeclaredWithVar(varNode); return; } if (varNode.type == null) { varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env); } if (!varNode.reasonVarPrefixAvailable && varNode.type == null) { BErrorType errorType = new BErrorType(varNode.type.tsymbol, null); if (varNode.type.tag == TypeTags.UNION) { Set<BType> members = types.expandAndGetMemberTypesRecursive(varNode.type); List<BErrorType> errorMembers = members.stream() .filter(m -> m.tag == TypeTags.ERROR) .map(m -> (BErrorType) m) .collect(Collectors.toList()); if (errorMembers.isEmpty()) { dlog.error(varNode.pos, DiagnosticCode.INVALID_ERROR_MATCH_PATTERN); return; } else if (errorMembers.size() == 1) { errorType.detailType = errorMembers.get(0).detailType; } else { errorType.detailType = symTable.detailType; } varNode.type = errorType; } else if (varNode.type.tag == TypeTags.ERROR) { errorType.detailType = ((BErrorType) varNode.type).detailType; } } if (!validateErrorVariable(varNode)) { varNode.type = symTable.semanticError; return; } symbolEnter.defineNode(varNode, env); if (varNode.expr == null) { return; } typeChecker.checkExpr(varNode.expr, env, varNode.type); } private void handleDeclaredWithVar(BLangVariable variable) { BLangExpression varRefExpr = variable.expr; BType rhsType; if (varRefExpr == null) { rhsType = symTable.semanticError; variable.type = symTable.semanticError; dlog.error(variable.pos, DiagnosticCode.VARIABLE_DECL_WITH_VAR_WITHOUT_INITIALIZER); } else { rhsType = typeChecker.checkExpr(varRefExpr, this.env, expType); } switch (variable.getKind()) { case VARIABLE: case LET_VARIABLE: if (!validateObjectTypeInitInvocation(varRefExpr)) { rhsType = symTable.semanticError; } if (variable.flagSet.contains(Flag.LISTENER) && !types.checkListenerCompatibility(rhsType)) { dlog.error(varRefExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, LISTENER_TYPE_NAME, rhsType); return; } BLangSimpleVariable simpleVariable = (BLangSimpleVariable) variable; Name varName = names.fromIdNode(simpleVariable.name); if (varName == Names.IGNORE) { dlog.error(simpleVariable.pos, DiagnosticCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT); return; } simpleVariable.type = rhsType; int ownerSymTag = env.scope.owner.tag; if ((ownerSymTag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (ownerSymTag & SymTag.LET) == SymTag.LET) { if (simpleVariable.symbol == null) { symbolEnter.defineNode(simpleVariable, env); } } simpleVariable.symbol.type = rhsType; break; case TUPLE_VARIABLE: if (varRefExpr == null) { return; } if (variable.isDeclaredWithVar && variable.expr.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR) { List<String> bindingPatternVars = new ArrayList<>(); List<BLangVariable> members = ((BLangTupleVariable) variable).memberVariables; for (BLangVariable var : members) { bindingPatternVars.add(((BLangSimpleVariable) var).name.value); } dlog.error(varRefExpr.pos, DiagnosticCode.CANNOT_INFER_TYPES_FOR_TUPLE_BINDING, bindingPatternVars); variable.type = symTable.semanticError; return; } if (TypeTags.TUPLE != rhsType.tag) { dlog.error(varRefExpr.pos, DiagnosticCode.INVALID_TUPLE_BINDING_PATTERN_INFERENCE, rhsType); variable.type = symTable.semanticError; return; } BLangTupleVariable tupleVariable = (BLangTupleVariable) variable; tupleVariable.type = rhsType; if (!(checkTypeAndVarCountConsistency(tupleVariable))) { tupleVariable.type = symTable.semanticError; return; } symbolEnter.defineNode(tupleVariable, env); break; case RECORD_VARIABLE: if (varRefExpr == null) { return; } if (TypeTags.RECORD != rhsType.tag && TypeTags.MAP != rhsType.tag && TypeTags.JSON != rhsType.tag) { dlog.error(varRefExpr.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_RECORD_VAR, rhsType); variable.type = symTable.semanticError; } BLangRecordVariable recordVariable = (BLangRecordVariable) variable; recordVariable.type = rhsType; if (!validateRecordVariable(recordVariable)) { recordVariable.type = symTable.semanticError; } break; case ERROR_VARIABLE: if (varRefExpr == null) { return; } if (TypeTags.ERROR != rhsType.tag) { dlog.error(variable.expr.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_ERROR_VAR, rhsType); variable.type = symTable.semanticError; return; } BLangErrorVariable errorVariable = (BLangErrorVariable) variable; if (errorVariable.typeNode != null) { symResolver.resolveTypeNode(errorVariable.typeNode, env); } errorVariable.type = rhsType; if (!validateErrorVariable(errorVariable)) { errorVariable.type = symTable.semanticError; return; } symbolEnter.defineNode(errorVariable, env); break; } } void handleDeclaredVarInForeach(BLangVariable variable, BType rhsType, SymbolEnv blockEnv) { switch (variable.getKind()) { case VARIABLE: BLangSimpleVariable simpleVariable = (BLangSimpleVariable) variable; Name varName = names.fromIdNode(simpleVariable.name); if (varName == Names.IGNORE) { dlog.error(simpleVariable.pos, DiagnosticCode.UNDERSCORE_NOT_ALLOWED); return; } simpleVariable.type = rhsType; int ownerSymTag = blockEnv.scope.owner.tag; if ((ownerSymTag & SymTag.INVOKABLE) == SymTag.INVOKABLE) { if (simpleVariable.symbol == null) { symbolEnter.defineNode(simpleVariable, blockEnv); } } recursivelySetFinalFlag(simpleVariable); break; case TUPLE_VARIABLE: BLangTupleVariable tupleVariable = (BLangTupleVariable) variable; if (TypeTags.TUPLE != rhsType.tag && TypeTags.UNION != rhsType.tag) { dlog.error(variable.pos, DiagnosticCode.INVALID_TUPLE_BINDING_PATTERN_INFERENCE, rhsType); recursivelyDefineVariables(tupleVariable, blockEnv); return; } tupleVariable.type = rhsType; if (rhsType.tag == TypeTags.TUPLE && !(checkTypeAndVarCountConsistency(tupleVariable, (BTupleType) tupleVariable.type, blockEnv))) { recursivelyDefineVariables(tupleVariable, blockEnv); return; } if (rhsType.tag == TypeTags.UNION && !(checkTypeAndVarCountConsistency(tupleVariable, null, blockEnv))) { recursivelyDefineVariables(tupleVariable, blockEnv); return; } symbolEnter.defineNode(tupleVariable, blockEnv); recursivelySetFinalFlag(tupleVariable); break; case RECORD_VARIABLE: BLangRecordVariable recordVariable = (BLangRecordVariable) variable; recordVariable.type = rhsType; validateRecordVariable(recordVariable, blockEnv); recursivelySetFinalFlag(recordVariable); break; case ERROR_VARIABLE: BLangErrorVariable errorVariable = (BLangErrorVariable) variable; if (TypeTags.ERROR != rhsType.tag) { dlog.error(variable.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_ERROR_VAR, rhsType); recursivelyDefineVariables(errorVariable, blockEnv); return; } errorVariable.type = rhsType; validateErrorVariable(errorVariable); recursivelySetFinalFlag(errorVariable); break; } } private void recursivelyDefineVariables(BLangVariable variable, SymbolEnv blockEnv) { switch (variable.getKind()) { case VARIABLE: Name name = names.fromIdNode(((BLangSimpleVariable) variable).name); if (name == Names.IGNORE) { return; } variable.type = symTable.semanticError; symbolEnter.defineVarSymbol(variable.pos, variable.flagSet, variable.type, name, blockEnv, variable.internal); break; case TUPLE_VARIABLE: ((BLangTupleVariable) variable).memberVariables.forEach(memberVariable -> recursivelyDefineVariables(memberVariable, blockEnv)); break; case RECORD_VARIABLE: ((BLangRecordVariable) variable).variableList.forEach(value -> recursivelyDefineVariables(value.valueBindingPattern, blockEnv)); break; } } private void recursivelySetFinalFlag(BLangVariable variable) { if (variable == null) { return; } switch (variable.getKind()) { case VARIABLE: if (variable.symbol == null) { return; } variable.symbol.flags |= Flags.FINAL; break; case TUPLE_VARIABLE: BLangTupleVariable tupleVariable = (BLangTupleVariable) variable; tupleVariable.memberVariables.forEach(this::recursivelySetFinalFlag); recursivelySetFinalFlag(tupleVariable.restVariable); break; case RECORD_VARIABLE: BLangRecordVariable recordVariable = (BLangRecordVariable) variable; recordVariable.variableList.forEach(value -> recursivelySetFinalFlag(value.valueBindingPattern)); recursivelySetFinalFlag((BLangVariable) recordVariable.restParam); break; case ERROR_VARIABLE: BLangErrorVariable errorVariable = (BLangErrorVariable) variable; recursivelySetFinalFlag(errorVariable.message); recursivelySetFinalFlag(errorVariable.restDetail); errorVariable.detail.forEach(bLangErrorDetailEntry -> recursivelySetFinalFlag(bLangErrorDetailEntry.valueBindingPattern)); break; } } private boolean checkTypeAndVarCountConsistency(BLangTupleVariable varNode) { return checkTypeAndVarCountConsistency(varNode, null, env); } private boolean checkTypeAndVarCountConsistency(BLangTupleVariable varNode, BTupleType tupleTypeNode, SymbolEnv env) { if (tupleTypeNode == null) { /* This switch block will resolve the tuple type of the tuple variable. For example consider the following - [int, string]|[boolean, float] [a, b] = foo(); Since the varNode type is a union, the types of 'a' and 'b' will be resolved as follows: Type of 'a' will be (int | boolean) while the type of 'b' will be (string | float). Consider anydata (a, b) = foo(); Here, the type of 'a'and type of 'b' will be both anydata. */ switch (varNode.type.tag) { case TypeTags.UNION: Set<BType> unionType = types.expandAndGetMemberTypesRecursive(varNode.type); List<BType> possibleTypes = unionType.stream() .filter(type -> { if (TypeTags.TUPLE == type.tag && (varNode.memberVariables.size() == ((BTupleType) type).tupleTypes.size())) { return true; } return TypeTags.ANY == type.tag || TypeTags.ANYDATA == type.tag; }) .collect(Collectors.toList()); if (possibleTypes.isEmpty()) { dlog.error(varNode.pos, DiagnosticCode.INVALID_TUPLE_BINDING_PATTERN_DECL, varNode.type); return false; } if (possibleTypes.size() > 1) { List<BType> memberTupleTypes = new ArrayList<>(); for (int i = 0; i < varNode.memberVariables.size(); i++) { LinkedHashSet<BType> memberTypes = new LinkedHashSet<>(); for (BType possibleType : possibleTypes) { if (possibleType.tag == TypeTags.TUPLE) { memberTypes.add(((BTupleType) possibleType).tupleTypes.get(i)); } else { memberTupleTypes.add(varNode.type); } } if (memberTypes.size() > 1) { memberTupleTypes.add(BUnionType.create(null, memberTypes)); } else { memberTupleTypes.addAll(memberTypes); } } tupleTypeNode = new BTupleType(memberTupleTypes); break; } if (possibleTypes.get(0).tag == TypeTags.TUPLE) { tupleTypeNode = (BTupleType) possibleTypes.get(0); break; } List<BType> memberTypes = new ArrayList<>(); for (int i = 0; i < varNode.memberVariables.size(); i++) { memberTypes.add(possibleTypes.get(0)); } tupleTypeNode = new BTupleType(memberTypes); break; case TypeTags.ANY: case TypeTags.ANYDATA: List<BType> memberTupleTypes = new ArrayList<>(); for (int i = 0; i < varNode.memberVariables.size(); i++) { memberTupleTypes.add(varNode.type); } tupleTypeNode = new BTupleType(memberTupleTypes); if (varNode.restVariable != null) { tupleTypeNode.restType = varNode.type; } break; case TypeTags.TUPLE: tupleTypeNode = (BTupleType) varNode.type; break; default: dlog.error(varNode.pos, DiagnosticCode.INVALID_TUPLE_BINDING_PATTERN_DECL, varNode.type); return false; } } if (tupleTypeNode.tupleTypes.size() != varNode.memberVariables.size() || (tupleTypeNode.restType == null && varNode.restVariable != null) || (tupleTypeNode.restType != null && varNode.restVariable == null)) { dlog.error(varNode.pos, DiagnosticCode.INVALID_TUPLE_BINDING_PATTERN); return false; } int ignoredCount = 0; List<BLangVariable> memberVariables = new ArrayList<>(varNode.memberVariables); if (varNode.restVariable != null) { memberVariables.add(varNode.restVariable); } for (int i = 0; i < memberVariables.size(); i++) { BLangVariable var = memberVariables.get(i); BType type = (i <= tupleTypeNode.tupleTypes.size() - 1) ? tupleTypeNode.tupleTypes.get(i) : new BArrayType(tupleTypeNode.restType); if (var.getKind() == NodeKind.VARIABLE) { BLangSimpleVariable simpleVar = (BLangSimpleVariable) var; Name varName = names.fromIdNode(simpleVar.name); if (varName == Names.IGNORE) { ignoredCount++; simpleVar.type = symTable.anyType; types.checkType(varNode.pos, type, simpleVar.type, DiagnosticCode.INCOMPATIBLE_TYPES); continue; } } var.type = type; analyzeNode(var, env); } if (!varNode.memberVariables.isEmpty() && ignoredCount == varNode.memberVariables.size() && varNode.restVariable == null) { dlog.error(varNode.pos, DiagnosticCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT); return false; } return true; } private boolean validateRecordVariable(BLangRecordVariable recordVar) { return validateRecordVariable(recordVar, env); } private boolean validateRecordVariable(BLangRecordVariable recordVar, SymbolEnv env) { BRecordType recordVarType; /* This switch block will resolve the record type of the record variable. For example consider the following - type Foo record {int a, boolean b}; type Bar record {string a, float b}; Foo|Bar {a, b} = foo(); Since the varNode type is a union, the types of 'a' and 'b' will be resolved as follows: Type of 'a' will be a union of the types of field 'a' in both Foo and Bar. i.e. type of 'a' is (int | string) and type of 'b' is (boolean | float). Consider anydata {a, b} = foo(); Here, the type of 'a'and type of 'b' will be both anydata. */ switch (recordVar.type.tag) { case TypeTags.UNION: BUnionType unionType = (BUnionType) recordVar.type; Set<BType> bTypes = types.expandAndGetMemberTypesRecursive(unionType); List<BType> possibleTypes = bTypes.stream() .filter(rec -> doesRecordContainKeys(rec, recordVar.variableList, recordVar.restParam != null)) .collect(Collectors.toList()); if (possibleTypes.isEmpty()) { dlog.error(recordVar.pos, DiagnosticCode.INVALID_RECORD_BINDING_PATTERN, recordVar.type); return false; } if (possibleTypes.size() > 1) { BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0, names.fromString(ANONYMOUS_RECORD_NAME), env.enclPkg.symbol.pkgID, null, env.scope.owner, recordVar.pos, SOURCE); recordVarType = (BRecordType) symTable.recordType; LinkedHashMap<String, BField> fields = populateAndGetPossibleFieldsForRecVar(recordVar, possibleTypes, recordSymbol); if (recordVar.restParam != null) { LinkedHashSet<BType> memberTypes = possibleTypes.stream() .map(possibleType -> { if (possibleType.tag == TypeTags.RECORD) { return ((BRecordType) possibleType).restFieldType; } else if (possibleType.tag == TypeTags.MAP) { return ((BMapType) possibleType).constraint; } else { return possibleType; } }) .collect(Collectors.toCollection(LinkedHashSet::new)); recordVarType.restFieldType = memberTypes.size() > 1 ? BUnionType.create(null, memberTypes) : memberTypes.iterator().next(); } recordVarType.tsymbol = recordSymbol; recordVarType.fields = fields; recordSymbol.type = recordVarType; break; } if (possibleTypes.get(0).tag == TypeTags.RECORD) { recordVarType = (BRecordType) possibleTypes.get(0); break; } if (possibleTypes.get(0).tag == TypeTags.MAP) { recordVarType = createSameTypedFieldsRecordType(recordVar, ((BMapType) possibleTypes.get(0)).constraint); break; } recordVarType = createSameTypedFieldsRecordType(recordVar, possibleTypes.get(0)); break; case TypeTags.RECORD: recordVarType = (BRecordType) recordVar.type; break; case TypeTags.MAP: recordVarType = createSameTypedFieldsRecordType(recordVar, ((BMapType) recordVar.type).constraint); break; case TypeTags.ANY: case TypeTags.ANYDATA: recordVarType = createSameTypedFieldsRecordType(recordVar, recordVar.type); break; default: dlog.error(recordVar.pos, DiagnosticCode.INVALID_RECORD_BINDING_PATTERN, recordVar.type); return false; } LinkedHashMap<String, BField> recordVarTypeFields = recordVarType.fields; boolean validRecord = true; int ignoredCount = 0; for (BLangRecordVariableKeyValue variable : recordVar.variableList) { if (names.fromIdNode(variable.getKey()) == Names.IGNORE) { dlog.error(recordVar.pos, DiagnosticCode.UNDERSCORE_NOT_ALLOWED); continue; } BLangVariable value = variable.getValue(); if (value.getKind() == NodeKind.VARIABLE) { BLangSimpleVariable simpleVar = (BLangSimpleVariable) value; Name varName = names.fromIdNode(simpleVar.name); if (varName == Names.IGNORE) { ignoredCount++; simpleVar.type = symTable.anyType; if (!recordVarTypeFields.containsKey(variable.getKey().getValue())) { continue; } types.checkType(variable.valueBindingPattern.pos, recordVarTypeFields.get((variable.getKey().getValue())).type, simpleVar.type, DiagnosticCode.INCOMPATIBLE_TYPES); continue; } } if (!recordVarTypeFields.containsKey(variable.getKey().getValue())) { if (recordVarType.sealed) { validRecord = false; dlog.error(recordVar.pos, DiagnosticCode.INVALID_FIELD_IN_RECORD_BINDING_PATTERN, variable.getKey().getValue(), recordVar.type); } else { BType restType; if (recordVarType.restFieldType.tag == TypeTags.ANYDATA || recordVarType.restFieldType.tag == TypeTags.ANY) { restType = recordVarType.restFieldType; } else { restType = BUnionType.create(null, recordVarType.restFieldType, symTable.nilType); } value.type = restType; analyzeNode(value, env); } continue; } value.type = recordVarTypeFields.get((variable.getKey().getValue())).type; analyzeNode(value, env); } if (!recordVar.variableList.isEmpty() && ignoredCount == recordVar.variableList.size() && recordVar.restParam == null) { dlog.error(recordVar.pos, DiagnosticCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT); return false; } if (recordVar.restParam != null) { ((BLangVariable) recordVar.restParam).type = getRestParamType(recordVarType); symbolEnter.defineNode((BLangNode) recordVar.restParam, env); } return validRecord; } private boolean validateErrorVariable(BLangErrorVariable errorVariable) { BErrorType errorType; switch (errorVariable.type.tag) { case TypeTags.UNION: BUnionType unionType = ((BUnionType) errorVariable.type); List<BErrorType> possibleTypes = unionType.getMemberTypes().stream() .filter(type -> TypeTags.ERROR == type.tag) .map(BErrorType.class::cast) .collect(Collectors.toList()); if (possibleTypes.isEmpty()) { dlog.error(errorVariable.pos, DiagnosticCode.INVALID_ERROR_BINDING_PATTERN, errorVariable.type); return false; } if (possibleTypes.size() > 1) { LinkedHashSet<BType> detailType = new LinkedHashSet<>(); for (BErrorType possibleErrType : possibleTypes) { detailType.add(possibleErrType.detailType); } BType errorDetailType = detailType.size() > 1 ? BUnionType.create(null, detailType) : detailType.iterator().next(); errorType = new BErrorType(null, errorDetailType); } else { errorType = possibleTypes.get(0); } break; case TypeTags.ERROR: errorType = (BErrorType) errorVariable.type; break; default: dlog.error(errorVariable.pos, DiagnosticCode.INVALID_ERROR_BINDING_PATTERN, errorVariable.type); return false; } errorVariable.type = errorType; if (!errorVariable.isInMatchStmt) { errorVariable.message.type = symTable.stringType; errorVariable.message.accept(this); if (errorVariable.cause != null) { errorVariable.cause.type = symTable.errorOrNilType; errorVariable.cause.accept(this); } } if (errorVariable.detail == null || (errorVariable.detail.isEmpty() && !isRestDetailBindingAvailable(errorVariable))) { return validateErrorMessageMatchPatternSyntax(errorVariable); } if (errorType.detailType.getKind() == TypeKind.RECORD || errorType.detailType.getKind() == TypeKind.MAP) { return validateErrorVariable(errorVariable, errorType); } else if (errorType.detailType.getKind() == TypeKind.UNION) { BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.ERROR, env.enclPkg.packageID, symTable.errorType, env.scope.owner, errorVariable.pos, SOURCE); errorVariable.type = new BErrorType(errorTypeSymbol, symTable.detailType); return validateErrorVariable(errorVariable); } if (isRestDetailBindingAvailable(errorVariable)) { errorVariable.restDetail.type = symTable.detailType; errorVariable.restDetail.accept(this); } return true; } private boolean validateErrorVariable(BLangErrorVariable errorVariable, BErrorType errorType) { errorVariable.message.type = symTable.stringType; errorVariable.message.accept(this); BRecordType recordType = getDetailAsARecordType(errorType); LinkedHashMap<String, BField> detailFields = recordType.fields; Set<String> matchedDetailFields = new HashSet<>(); for (BLangErrorVariable.BLangErrorDetailEntry errorDetailEntry : errorVariable.detail) { String entryName = errorDetailEntry.key.getValue(); matchedDetailFields.add(entryName); BField entryField = detailFields.get(entryName); BLangVariable boundVar = errorDetailEntry.valueBindingPattern; if (entryField != null) { if ((entryField.symbol.flags & Flags.OPTIONAL) == Flags.OPTIONAL) { boundVar.type = BUnionType.create(null, entryField.type, symTable.nilType); } else { boundVar.type = entryField.type; } } else { if (recordType.sealed) { dlog.error(errorVariable.pos, DiagnosticCode.INVALID_ERROR_BINDING_PATTERN, errorVariable.type); boundVar.type = symTable.semanticError; return false; } else { boundVar.type = BUnionType.create(null, recordType.restFieldType, symTable.nilType); } } boolean isIgnoredVar = boundVar.getKind() == NodeKind.VARIABLE && ((BLangSimpleVariable) boundVar).name.value.equals(Names.IGNORE.value); if (!isIgnoredVar) { boundVar.accept(this); } } if (isRestDetailBindingAvailable(errorVariable)) { BTypeSymbol typeSymbol = createTypeSymbol(SymTag.TYPE); BType constraint = getRestMapConstraintType(detailFields, matchedDetailFields, recordType); BMapType restType = new BMapType(TypeTags.MAP, constraint, typeSymbol); typeSymbol.type = restType; errorVariable.restDetail.type = restType; errorVariable.restDetail.accept(this); } return true; } private BRecordType getDetailAsARecordType(BErrorType errorType) { if (errorType.detailType.getKind() == TypeKind.RECORD) { return (BRecordType) errorType.detailType; } BRecordType detailRecord = new BRecordType(null); BMapType detailMap = (BMapType) errorType.detailType; detailRecord.sealed = false; detailRecord.restFieldType = detailMap.constraint; return detailRecord; } private BType getRestMapConstraintType(Map<String, BField> errorDetailFields, Set<String> matchedDetailFields, BRecordType recordType) { BUnionType restUnionType = BUnionType.create(null); if (!recordType.sealed) { restUnionType.add(recordType.restFieldType); } for (Map.Entry<String, BField> entry : errorDetailFields.entrySet()) { if (!matchedDetailFields.contains(entry.getKey())) { BType type = entry.getValue().getType(); if (!types.isAssignable(type, restUnionType)) { restUnionType.add(type); } } } Set<BType> memberTypes = restUnionType.getMemberTypes(); if (memberTypes.size() == 1) { return memberTypes.iterator().next(); } return restUnionType; } private boolean validateErrorMessageMatchPatternSyntax(BLangErrorVariable errorVariable) { if (errorVariable.isInMatchStmt && !errorVariable.reasonVarPrefixAvailable && errorVariable.reasonMatchConst == null && isReasonSpecified(errorVariable)) { BSymbol reasonConst = symResolver.lookupSymbolInMainSpace(this.env.enclEnv, names.fromString(errorVariable.message.name.value)); if ((reasonConst.tag & SymTag.CONSTANT) != SymTag.CONSTANT) { dlog.error(errorVariable.message.pos, DiagnosticCode.INVALID_ERROR_REASON_BINDING_PATTERN, errorVariable.message.name); } else { dlog.error(errorVariable.message.pos, DiagnosticCode.UNSUPPORTED_ERROR_REASON_CONST_MATCH); } return false; } return true; } private boolean isReasonSpecified(BLangErrorVariable errorVariable) { return !isIgnoredOrEmpty(errorVariable.message); } private boolean isIgnoredOrEmpty(BLangSimpleVariable varNode) { return varNode.name.value.equals(Names.IGNORE.value) || varNode.name.value.equals(""); } private boolean isRestDetailBindingAvailable(BLangErrorVariable errorVariable) { return errorVariable.restDetail != null && !errorVariable.restDetail.name.value.equals(Names.IGNORE.value); } private BTypeSymbol createTypeSymbol(int type) { return new BTypeSymbol(type, Flags.PUBLIC, Names.EMPTY, env.enclPkg.packageID, null, env.scope.owner, symTable.builtinPos, VIRTUAL); } /** * This method will resolve field types based on a list of possible types. * When a record variable has multiple possible assignable types, each field will be a union of the relevant * possible types field type. * * @param recordVar record variable whose fields types are to be resolved * @param possibleTypes list of possible types * @param recordSymbol symbol of the record type to be used in creating fields * @return the list of fields */ private LinkedHashMap<String, BField> populateAndGetPossibleFieldsForRecVar(BLangRecordVariable recordVar, List<BType> possibleTypes, BRecordTypeSymbol recordSymbol) { LinkedHashMap<String, BField> fields = new LinkedHashMap<>(); for (BLangRecordVariableKeyValue bLangRecordVariableKeyValue : recordVar.variableList) { String fieldName = bLangRecordVariableKeyValue.key.value; LinkedHashSet<BType> memberTypes = new LinkedHashSet<>(); for (BType possibleType : possibleTypes) { if (possibleType.tag == TypeTags.RECORD) { BRecordType possibleRecordType = (BRecordType) possibleType; if (possibleRecordType.fields.containsKey(fieldName)) { BField field = possibleRecordType.fields.get(fieldName); if (Symbols.isOptional(field.symbol)) { memberTypes.add(symTable.nilType); } memberTypes.add(field.type); } else { memberTypes.add(possibleRecordType.restFieldType); memberTypes.add(symTable.nilType); } continue; } if (possibleType.tag == TypeTags.MAP) { BMapType possibleMapType = (BMapType) possibleType; memberTypes.add(possibleMapType.constraint); continue; } memberTypes.add(possibleType); } BType fieldType = memberTypes.size() > 1 ? BUnionType.create(null, memberTypes) : memberTypes.iterator().next(); BField field = new BField(names.fromString(fieldName), recordVar.pos, new BVarSymbol(0, names.fromString(fieldName), env.enclPkg.symbol.pkgID, fieldType, recordSymbol, recordVar.pos, SOURCE)); fields.put(field.name.value, field); } return fields; }
class defined for an object-constructor-expression (OCE). This will be analyzed when continue; } analyzeDef((BLangNode) pkgLevelNode, pkgEnv); } while (pkgNode.lambdaFunctions.peek() != null) { BLangLambdaFunction lambdaFunction = pkgNode.lambdaFunctions.poll(); BLangFunction function = lambdaFunction.function; lambdaFunction.type = function.symbol.type; analyzeDef(lambdaFunction.function, lambdaFunction.capturedClosureEnv); }
The direct mode will also use the proxy configuration which is from the gateconnection configuration, then the gateconnection will have the high priority to apply.
protected void configureService(CosmosClientBuilder builder) { PropertyMapper map = new PropertyMapper(); map.from(this.cosmosProperties.getEndpoint()).to(builder::endpoint); map.from(this.cosmosProperties.getConsistencyLevel()).to(builder::consistencyLevel); map.from(this.cosmosProperties.getClientTelemetryEnabled()).to(builder::clientTelemetryEnabled); map.from(this.cosmosProperties.getConnectionSharingAcrossClientsEnabled()).to(builder::connectionSharingAcrossClientsEnabled); map.from(this.cosmosProperties.getContentResponseOnWriteEnabled()).to(builder::contentResponseOnWriteEnabled); map.from(this.cosmosProperties.getEndpointDiscoveryEnabled()).to(builder::endpointDiscoveryEnabled); map.from(this.cosmosProperties.getMultipleWriteRegionsEnabled()).to(builder::multipleWriteRegionsEnabled); map.from(this.cosmosProperties.getReadRequestsFallbackEnabled()).to(builder::readRequestsFallbackEnabled); map.from(this.cosmosProperties.getSessionCapturingOverrideEnabled()).to(builder::sessionCapturingOverrideEnabled); map.from(this.cosmosProperties.getPreferredRegions()).whenNot(List::isEmpty).to(builder::preferredRegions); map.from(this.cosmosProperties.getThrottlingRetryOptions()).to(builder::throttlingRetryOptions); map.from(this.cosmosProperties.getResourceToken()).to(builder::resourceToken); map.from(this.cosmosProperties.getPermissions()).whenNot(List::isEmpty).to(builder::permissions); builder.gatewayMode(this.cosmosProperties.getGatewayConnection()); if (ConnectionMode.DIRECT.equals(this.cosmosProperties.getConnectionMode())) { builder.directMode(this.cosmosProperties.getDirectConnection()); } }
builder.gatewayMode(this.cosmosProperties.getGatewayConnection());
protected void configureService(CosmosClientBuilder builder) { PropertyMapper map = new PropertyMapper(); map.from(this.cosmosProperties.getEndpoint()).to(builder::endpoint); map.from(this.cosmosProperties.getConsistencyLevel()).to(builder::consistencyLevel); map.from(this.cosmosProperties.getClientTelemetryEnabled()).to(builder::clientTelemetryEnabled); map.from(this.cosmosProperties.getConnectionSharingAcrossClientsEnabled()).to(builder::connectionSharingAcrossClientsEnabled); map.from(this.cosmosProperties.getContentResponseOnWriteEnabled()).to(builder::contentResponseOnWriteEnabled); map.from(this.cosmosProperties.getEndpointDiscoveryEnabled()).to(builder::endpointDiscoveryEnabled); map.from(this.cosmosProperties.getMultipleWriteRegionsEnabled()).to(builder::multipleWriteRegionsEnabled); map.from(this.cosmosProperties.getReadRequestsFallbackEnabled()).to(builder::readRequestsFallbackEnabled); map.from(this.cosmosProperties.getSessionCapturingOverrideEnabled()).to(builder::sessionCapturingOverrideEnabled); map.from(this.cosmosProperties.getPreferredRegions()).whenNot(List::isEmpty).to(builder::preferredRegions); map.from(this.cosmosProperties.getThrottlingRetryOptions()).to(builder::throttlingRetryOptions); map.from(this.cosmosProperties.getResourceToken()).to(builder::resourceToken); map.from(this.cosmosProperties.getPermissions()).whenNot(List::isEmpty).to(builder::permissions); if (ConnectionMode.DIRECT.equals(this.cosmosProperties.getConnectionMode())) { builder.directMode(this.cosmosProperties.getDirectConnection(), this.cosmosProperties.getGatewayConnection()); } else if (ConnectionMode.GATEWAY.equals(this.cosmosProperties.getConnectionMode())) { builder.gatewayMode(this.cosmosProperties.getGatewayConnection()); } }
class CosmosClientBuilderFactory extends AbstractAzureServiceClientBuilderFactory<CosmosClientBuilder> { private static final Logger LOGGER = LoggerFactory.getLogger(CosmosClientBuilderFactory.class); private final CosmosProperties cosmosProperties; public CosmosClientBuilderFactory(CosmosProperties cosmosProperties) { this.cosmosProperties = cosmosProperties; } @Override protected CosmosClientBuilder createBuilderInstance() { return new CosmosClientBuilder(); } @Override protected AzureProperties getAzureProperties() { return this.cosmosProperties; } @Override protected List<AuthenticationDescriptor<?>> getAuthenticationDescriptors(CosmosClientBuilder builder) { return Arrays.asList( new KeyAuthenticationDescriptor(provider -> builder.credential(provider.getCredential())), new TokenAuthenticationDescriptor(provider -> builder.credential(provider.getCredential())) ); } @Override protected void configureApplicationId(CosmosClientBuilder builder) { builder.userAgentSuffix(ApplicationId.AZURE_SPRING_COSMOS); } @Override protected void configureProxy(CosmosClientBuilder builder) { LOGGER.debug("No configureProxy for CosmosClientBuilder."); } @Override protected void configureRetry(CosmosClientBuilder builder) { LOGGER.debug("No configureRetry for CosmosClientBuilder."); } @Override @Override protected BiConsumer<CosmosClientBuilder, Configuration> consumeConfiguration() { LOGGER.warn("Configuration instance is not supported to configure in CosmosClientBuilder"); return (a, b) -> { }; } @Override protected BiConsumer<CosmosClientBuilder, TokenCredential> consumeDefaultTokenCredential() { return CosmosClientBuilder::credential; } @Override protected BiConsumer<CosmosClientBuilder, String> consumeConnectionString() { LOGGER.debug("Connection string is not supported to configure in CosmosClientBuilder"); return (a, b) -> { }; } }
class CosmosClientBuilderFactory extends AbstractAzureServiceClientBuilderFactory<CosmosClientBuilder> { private static final Logger LOGGER = LoggerFactory.getLogger(CosmosClientBuilderFactory.class); private final CosmosProperties cosmosProperties; public CosmosClientBuilderFactory(CosmosProperties cosmosProperties) { this.cosmosProperties = cosmosProperties; } @Override protected CosmosClientBuilder createBuilderInstance() { return new CosmosClientBuilder(); } @Override protected AzureProperties getAzureProperties() { return this.cosmosProperties; } @Override protected List<AuthenticationDescriptor<?>> getAuthenticationDescriptors(CosmosClientBuilder builder) { return Arrays.asList( new KeyAuthenticationDescriptor(provider -> builder.credential(provider.getCredential())), new TokenAuthenticationDescriptor(provider -> builder.credential(provider.getCredential())) ); } @Override protected void configureApplicationId(CosmosClientBuilder builder) { builder.userAgentSuffix(ApplicationId.AZURE_SPRING_COSMOS); } @Override protected void configureProxy(CosmosClientBuilder builder) { LOGGER.debug("No configureProxy for CosmosClientBuilder."); } @Override protected void configureRetry(CosmosClientBuilder builder) { LOGGER.debug("No configureRetry for CosmosClientBuilder."); } @Override @Override protected BiConsumer<CosmosClientBuilder, Configuration> consumeConfiguration() { LOGGER.warn("Configuration instance is not supported to configure in CosmosClientBuilder"); return (a, b) -> { }; } @Override protected BiConsumer<CosmosClientBuilder, TokenCredential> consumeDefaultTokenCredential() { return CosmosClientBuilder::credential; } @Override protected BiConsumer<CosmosClientBuilder, String> consumeConnectionString() { LOGGER.debug("Connection string is not supported to configure in CosmosClientBuilder"); return (a, b) -> { }; } }
Can be merged with the previous line.
public void testEncryptRsaEcbOAEPwithSHA512andMGF1() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("OAEPwithSHA512andMGF1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina"), new BString("OAEPwithSHA512andMGF1")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptRsaEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); }
new BString("OAEPwithSHA512andMGF1")};
public void testEncryptRsaEcbOAEPwithSHA512andMGF1() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("OAEPwithSHA512andMGF1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina"), new BString("OAEPwithSHA512andMGF1")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptRsaEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); }
class CryptoTest { private static final int KEY_SIZE = 16; private CompileResult compileResult; private String resourceRoot; private Path sourceRoot; private Path confRoot; @BeforeClass public void setup() { resourceRoot = Paths.get("src", "test", "resources").toAbsolutePath().toString(); sourceRoot = Paths.get(resourceRoot, "test-src"); confRoot = Paths.get(resourceRoot, "datafiles"); compileResult = BCompileUtil.compile(sourceRoot.resolve("crypto-test.bal").toString()); } @Test(description = "Test hmac generation functions") public void testHmac() throws DecoderException { byte[] message = "Ballerina HMAC test".getBytes(StandardCharsets.UTF_8); byte[] key = "abcdefghijk".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); byte[] expectedMD5Hash = Hex.decodeHex("3D5AC29160F2905A5C8153597798A4C1".toCharArray()); byte[] expectedSHA1Hash = Hex.decodeHex("13DD8D54D0EB702EDC6E8EDCAF616837D3A51499".toCharArray()); byte[] expectedSHA256Hash = Hex .decodeHex("2651203E18BF0088D3EF1215022D147E2534FD4BAD5689C9E5F12436E9758B15".toCharArray()); byte[] expectedSHA384Hash = Hex.decodeHex(("c27a281dffed3d4d176646d7261e9f6268a3d40a237cd274fc2f5970f637f1c" + "bc20a3835d7b7aa7401308737f23a9bf7").toCharArray()); byte[] expectedSHA512Hash = Hex.decodeHex(("78d99bf3e5277fc893af6cd6b0487c33ed3abc4f956fdd1fada302f135b012a" + "3c71cadaaeb462e51ff281202bdfa8807719b91f69742c3f71f036c469ac5b918").toCharArray()); BValue[] args = {messageValue, keyValue}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testHmacWithMD5", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedMD5Hash); args = new BValue[]{messageValue, keyValue}; returnValues = BRunUtil.invoke(compileResult, "testHmacWithSHA1", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSHA1Hash); args = new BValue[]{messageValue, keyValue}; returnValues = BRunUtil.invoke(compileResult, "testHmacWithSHA256", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSHA256Hash); args = new BValue[]{messageValue, keyValue}; returnValues = BRunUtil.invoke(compileResult, "testHmacWithSHA384", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSHA384Hash); args = new BValue[]{messageValue, keyValue}; returnValues = BRunUtil.invoke(compileResult, "testHmacWithSHA512", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSHA512Hash); } @Test(description = "Test hmac generation with an empty password", expectedExceptions = BLangRuntimeException.class) public void testHmacNegativeInvalidKey() { BValue[] args = {new BValueArray("Ballerina HMAC test".getBytes(StandardCharsets.UTF_8)), new BValueArray("".getBytes(StandardCharsets.UTF_8))}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testHmacWithSHA1", args); } @Test(description = "Test hashing functions") public void testHashing() throws DecoderException { byte[] expectedMd5Hash = Hex.decodeHex("3B12196DB784CD9F86CC635D32764FDF".toCharArray()); byte[] expectedSha1Hash = Hex.decodeHex("73FBC15DB28D52C03359EDE7A7DC40B4A83DF207".toCharArray()); byte[] expectedSha256Hash = Hex .decodeHex("68F6CA0B55B55099331BF4EAA659B8BDC94FBDCE2F54D94FD90DA8240797A5D7".toCharArray()); byte[] expectedSha384Hash = Hex.decodeHex(("F00B4A8C67B38E7E32FF8B1AB570345743878F7ADED9B5FA02518DDD84E16CBC" + "A344AF42CB60A1FD5C48C5FEDCFF7F24").toCharArray()); byte[] expectedSha512hash = Hex.decodeHex(("1C9BED7C87E7D17BA07ADD67F59B4A29AFD2B046409B65429E77D0CEE53A33C5" + "E26731DC1CB091FAADA8C5D6433CB1544690804CC046A55D6AFED8BE0B901062").toCharArray()); BValue[] args = {new BValueArray("Ballerina test".getBytes(StandardCharsets.UTF_8))}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testHashWithMD5", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedMd5Hash); returnValues = BRunUtil.invoke(compileResult, "testHashWithSHA1", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSha1Hash); returnValues = BRunUtil.invoke(compileResult, "testHashWithSHA256", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSha256Hash); returnValues = BRunUtil.invoke(compileResult, "testHashWithSHA384", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSha384Hash); returnValues = BRunUtil.invoke(compileResult, "testHashWithSHA512", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSha512hash); } @Test(description = "Test CRC32b generation") public void testCRC32() { byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); String expectedCRC32Hash = "d37b9692"; BValue[] returnValues = BRunUtil.invoke(compileResult, "testHashWithCRC32b", new BValue[]{new BValueArray(payload)}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(returnValues[0].stringValue(), expectedCRC32Hash); } @Test(description = "Test RSA-SHA1 siging") public void testSignRsaSha1() throws DecoderException { byte[] expectedSignature = Hex.decodeHex(("70728d6d37fd83704bcb2649d93cfd20dbadb83a9d2169965d2a241795a131f" + "cfdb8b1b4f35f5de3c1f6f1d71ea0c9f80e494627b4c01d6e670ae4698b774171e8a017d62847c92aa47e868c230532af" + "9fc3a681387eead94578d2287674940df2e2f4a28f59688257254dfaab81c17617357ae05b42898412136abed116d6b86" + "eab68ff4ace029b67c7e4c5784a9bad00129b69d5afb6a89cb596cad56e8c98a1642eab87cb337980cc987708800e62a4" + "27c6f61828437d5491549b05025e9a98bf27825dc6002068678dde1e7d365407881b2b1a4d4e522a53f69e5b43202299e" + "02f7840f8991b8c335b0332b3b4bd658030ec3007f6f36c190b8663d3b746") .toCharArray()); byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha1", new BValue[]{new BValueArray(payload), new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSignature); returnValues = BRunUtil.invoke(compileResult, "testVerifyRsaSha1", new BValue[]{new BValueArray(payload), returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BBoolean) returnValues[0]).booleanValue(), true); } @Test(description = "Test RSA-SHA256 siging") public void testSignRsaSha256() throws DecoderException { byte[] expectedSignature = Hex.decodeHex(("34477f0e0a5457ca1a95049da10d59baa33ee4fa9e1bb8be3d3c70d82b980850" + "fd017a1c9984a97384736aacfe33d39ff8d63e01b952972910c86135b7558a2274c6d772f0d2fcdc0ac4aabc75f3978edb" + "d4aabd17d6447fb88e83b055bbff24d8212125b760c8bf88e9e4908645434f53a2ab0e3d5517c8e3241d8ebabbc767e7d9" + "24b5481621831f3a63e06c393c9378d782406705cd8823e12d3b4042a3cb738b8a8bb5731ff2934394c928c4262d130af6" + "6a2b507fc538bd16bccabc2f3b95137370dcca31e80866533bf445cf7f63aec6a9fa596333abb3a59d9b327891c7e6016e" + "0c11ef2a0d32088d4683d915005c9dcc8137611e5bff9dc4a5db6f87") .toCharArray()); byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha256", new BValue[]{new BValueArray(payload), new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSignature); returnValues = BRunUtil.invoke(compileResult, "testVerifyRsaSha256", new BValue[]{new BValueArray(payload), returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BBoolean) returnValues[0]).booleanValue(), true); } @Test(description = "Test RSA-384 siging") public void testSignRsaSha384() throws DecoderException { byte[] expectedSignature = Hex.decodeHex(("4981CC5213F384E8DB7950BF76C97AE20FA2A34244A517FC585B2381B9E88" + "278E447B92F6F452332BCA65DD5D6CCE04B5AC51D92E7E820B6FB826870DFBA437BBDA7F0E5850C02F72A8644DA8382" + "237E8C1ABD50A4BAEE179C8C838EA4AC53D2223B3C57D7D463A8E1BBFFC43F3F3C44494850377A8668E156B2D23B6E0" + "D8132632E3D79D68A391F619EF2E1E986A455F8F27092C66029C98D001A81FFE3E4B00991E7F0C0141D0635275544FC" + "5BF70A40C12B7BC765F6209C9640A60B9E978AD8DEC551983F5773A72327DF1A6256BEB8DF50A03F89443123E1354A9" + "EF7D8F8BF0659E1D6B77916B4AEEC79989AFDAA2F5B8983DE476C1A0FFBB2B647DE449E") .toCharArray()); byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha384", new BValue[]{new BValueArray(payload), new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSignature); returnValues = BRunUtil.invoke(compileResult, "testVerifyRsaSha384", new BValue[]{new BValueArray(payload), returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BBoolean) returnValues[0]).booleanValue(), true); } @Test(description = "Test RSA-512 siging") public void testSignRsaSha512() throws DecoderException { byte[] expectedSignature = Hex.decodeHex(("6995ba8d2382a8c4f0ed513033126b2305df419a8b105ee60483243229d2c496" + "b7f670783c52068cd2b4b8c2392f2932c682f30057cb4d8d616ba3a142356b0394747b2a3642da4d23447bb997eacb086f" + "173b4045ee8ee014e1e667e34522defb7a4ac1b5b3f175d40a409d947d562fcf7b2b2631d273751a0f8c658bd8c1d1d23a" + "0dbe685b15e13abf45f998114577c85a6478d915a445645a6360944e4962c56bee79d2363931c77f8040c620692debc747" + "4c1e62d9d4b0b39fa664b8c3a32155c7c1966ef3d55993ad8f7f3bf4d929cf047ab91344facefeba944b043e1e31496753" + "9cb2e6e669ec3352073a8933a2a0cac6056b4997b3628132f7a7e553") .toCharArray()); byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha512", new BValue[]{new BValueArray(payload), new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSignature); returnValues = BRunUtil.invoke(compileResult, "testVerifyRsaSha512", new BValue[]{new BValueArray(payload), returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BBoolean) returnValues[0]).booleanValue(), true); } @Test(description = "Test RSA-MD5 siging") public void testSignRsaMd5() throws DecoderException { byte[] expectedSignature = Hex.decodeHex(("457050eca794baf2149f53631f373525fbc7b40de83e0af5b03473e7b726064b" + "3eb6a8b7ce48218e4adaf2b598429236192a458ad5cef1ab2f456164f2646ba57a1ce6b858403504ddc49915bf8bf34558" + "0366bd9f7d1d777572fcacd3aa935267af6cf5dc988668b8cea0f57cd0e286658f0ca7c060d7a68b6330bc590b6db59489" + "aa676b1c539e5bb0116c64a963f8a03789b9fd7e689bac5576eea15d93d45be3547aef7c7dc26251dfa7bdf23b47c6a346" + "ae3603c158cbd32ff9298df71f930cebdda8564199e948f1ac03173e9f9d425240c7f99857d5f469dd0b23c0248b4fa42e" + "67145ec0e6e8abfc3f7f10122cc278b5469eb970034483839f290eec") .toCharArray()); byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaMd5", new BValue[]{new BValueArray(payload), new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSignature); returnValues = BRunUtil.invoke(compileResult, "testVerifyRsaMd5", new BValue[]{new BValueArray(payload), returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BBoolean) returnValues[0]).booleanValue(), true); } @Test(description = "Test RSA-SHA1 siging with an invalid private key") public void testSignRsaSha1WithInvalidKey() { byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha1WithInvalidKey", new BValue[]{new BValueArray(payload)}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid uninitialized key"); } @Test(description = "Test RSA-SHA256 siging with an invalid private key") public void testSignRsaSha256WithInvalidKey() { byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha256WithInvalidKey", new BValue[]{new BValueArray(payload)}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid uninitialized key"); } @Test(description = "Test RSA-SHA384 siging with an invalid private key") public void testSignRsaSha384WithInvalidKey() { byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha384WithInvalidKey", new BValue[]{new BValueArray(payload)}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid uninitialized key"); } @Test(description = "Test RSA-SHA512 siging with an invalid private key") public void testSignRsaSha512WithInvalidKey() { byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha512WithInvalidKey", new BValue[]{new BValueArray(payload)}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid uninitialized key"); } @Test(description = "Test RSA-MD5 siging with an invalid private key") public void testSignRsaMd5WithInvalidKey() { byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaMd5WithInvalidKey", new BValue[]{new BValueArray(payload)}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid uninitialized key"); } @Test(description = "Test encrypt and decrypt with AES CBC NoPadding") public void testEncryptAesCbcNoPadding() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("NONE")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesCbc", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], keyValue, ivValue, new BString("NONE")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptAesCbc", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with AES CBC NoPadding using invalid key size") public void testEncryptAesCbcNoPaddingWithInvalidKeySize() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[31]; for (int i = 0; i < 31; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("NONE")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesCbc", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid key size. valid key sizes in bytes: [16, 24, 32]"); } @Test(description = "Test encrypt and decrypt with AES CBC NoPadding using invalid IV length") public void testEncryptAesCbcNoPaddingWithInvalidIvLength() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } byte[] iv = new byte[15]; for (int i = 0; i < 15; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("NONE")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesCbc", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "Wrong IV length: must be 16 bytes long"); } @Test(description = "Test encrypt and decrypt with AES CBC NoPadding using invalid input length") public void testEncryptAesCbcNoPaddingWithInvalidInputLength() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("NONE")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesCbc", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "Input length not multiple of 16 bytes"); } @Test(description = "Test encrypt and decrypt with AES CBC PKCS5") public void testEncryptAesCbcPkcs5() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("PKCS5")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesCbc", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], keyValue, ivValue, new BString("PKCS5")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptAesCbc", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with AES CBC PKCS1") public void testEncryptAesCbcPkcs1() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[32]; for (int i = 0; i < 32; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("PKCS1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesCbc", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "unsupported algorithm: AES CBC PKCS1"); } @Test(description = "Test encrypt and decrypt with AES ECB NoPadding") public void testEncryptAesEcbNoPadding() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, new BString("NONE")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], keyValue, new BString("NONE")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptAesEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with AES ECB NoPadding using invalid key size") public void testEncryptAesEcbNoPaddingWithInvalidKeySize() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[31]; for (int i = 0; i < 31; i++) { key[i] = (byte) i; } BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, new BString("NONE")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid key size. valid key sizes in bytes: [16, 24, 32]"); } @Test(description = "Test encrypt and decrypt with AES ECB NoPadding using invalid input length") public void testEncryptAesEcbNoPaddingWithInvalidInputLength() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, new BString("NONE")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "Input length not multiple of 16 bytes"); } @Test(description = "Test encrypt and decrypt with AES ECB PKCS5") public void testEncryptAesEcbPkcs5() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, new BString("PKCS5")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], keyValue, new BString("PKCS5")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptAesEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with AES ECB PKCS1") public void testEncryptAesEcbPkcs1() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[32]; for (int i = 0; i < 32; i++) { key[i] = (byte) i; } BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, new BString("PKCS1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "unsupported algorithm: AES ECB PKCS1"); } @Test(description = "Test encrypt and decrypt with AES GCM NoPadding") public void testEncryptAesGcmNoPadding() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("NONE"), new BInteger(128)}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesGcm", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], keyValue, ivValue, new BString("NONE"), new BInteger(128)}; returnValues = BRunUtil.invoke(compileResult, "testDecryptAesGcm", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with AES GCM NoPadding using invalid key size") public void testEncryptAesGcmNoPaddingWithInvalidKeySize() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[31]; for (int i = 0; i < 31; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("NONE"), new BInteger(128)}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesGcm", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid key size. valid key sizes in bytes: [16, 24, 32]"); } @Test(description = "Test encrypt and decrypt with AES GCM NoPadding using invalid input length") public void testEncryptAesGcmNoPaddingWithInvalidInputLength() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("NONE"), new BInteger(128)}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesGcm", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], keyValue, ivValue, new BString("NONE"), new BInteger(128)}; returnValues = BRunUtil.invoke(compileResult, "testDecryptAesGcm", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with AES GCM PKCS5") public void testEncryptAesGcmPkcs5() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("PKCS5"), new BInteger(128)}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesGcm", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], keyValue, ivValue, new BString("PKCS5"), new BInteger(128)}; returnValues = BRunUtil.invoke(compileResult, "testDecryptAesGcm", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with AES GCM PKCS1") public void testEncryptAesGcmPkcs1() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[32]; for (int i = 0; i < 32; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("PKCS1"), new BInteger(128)}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesGcm", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "unsupported algorithm: AES GCM PKCS1"); } @Test(description = "Test encrypt and decrypt with AES GCM PKCS5 with invalid tag value") public void testEncryptAesGcmPkcs5WithInvalidTagLength() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[32]; for (int i = 0; i < 32; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("PKCS5"), new BInteger(500)}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesGcm", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertTrue(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue() .startsWith("invalid tag size. valid tag sizes in bytes:")); } @Test(description = "Test encrypt and decrypt with RSA ECB PKCS1") public void testEncryptRsaEcbPkcs1() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("PKCS1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina"), new BString("PKCS1")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptRsaEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with RSA ECB OAEPwithMD5andMGF1") public void testEncryptRsaEcbOAEPwithMD5andMGF1() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("OAEPwithMD5andMGF1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina"), new BString("OAEPwithMD5andMGF1")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptRsaEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with RSA ECB OAEPWithSHA1AndMGF1") public void testEncryptRsaEcbOAEPWithSHA1AndMGF1() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("OAEPWithSHA1AndMGF1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina"), new BString("OAEPWithSHA1AndMGF1")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptRsaEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with RSA ECB OAEPWithSHA256AndMGF1") public void testEncryptRsaEcbOAEPWithSHA256AndMGF1() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("OAEPWithSHA256AndMGF1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina"), new BString("OAEPWithSHA256AndMGF1")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptRsaEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with RSA ECB OAEPwithSHA384andMGF1") public void testEncryptRsaEcbOAEPwithSHA384andMGF1() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("OAEPwithSHA384andMGF1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina"), new BString("OAEPwithSHA384andMGF1")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptRsaEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with RSA ECB OAEPwithSHA512andMGF1") @Test(description = "Test encrypt with private key and decrypt with public key using RSA ECB PKCS1") public void testEncryptRsaEcbWithPrivateKeyPkcs1() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina"), new BString("PKCS1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcbWithPrivateKey", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("PKCS1")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptRsaEcbWithPublicKey", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with RSA ECB PKCS1 with an invalid key") public void testEncryptRsaEcbWithPrivateKeyPkcs1WithInvalidKey() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString("PKCS1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcbWithInvalidKey", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid uninitialized key"); } @Test(description = "Test encrypt and decrypt with RSA ECB PKCS1 with invalid padding", expectedExceptions = BLangRuntimeException.class) public void testEncryptRsaEcbWithPrivateKeyPkcs1WithInvalidPadding() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("PKCS99")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcb", args); } }
class CryptoTest { private static final int KEY_SIZE = 16; private CompileResult compileResult; private String resourceRoot; private Path sourceRoot; private Path confRoot; @BeforeClass public void setup() { resourceRoot = Paths.get("src", "test", "resources").toAbsolutePath().toString(); sourceRoot = Paths.get(resourceRoot, "test-src"); confRoot = Paths.get(resourceRoot, "datafiles"); compileResult = BCompileUtil.compile(sourceRoot.resolve("crypto-test.bal").toString()); } @Test(description = "Test hmac generation functions") public void testHmac() throws DecoderException { byte[] message = "Ballerina HMAC test".getBytes(StandardCharsets.UTF_8); byte[] key = "abcdefghijk".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); byte[] expectedMD5Hash = Hex.decodeHex("3D5AC29160F2905A5C8153597798A4C1".toCharArray()); byte[] expectedSHA1Hash = Hex.decodeHex("13DD8D54D0EB702EDC6E8EDCAF616837D3A51499".toCharArray()); byte[] expectedSHA256Hash = Hex .decodeHex("2651203E18BF0088D3EF1215022D147E2534FD4BAD5689C9E5F12436E9758B15".toCharArray()); byte[] expectedSHA384Hash = Hex.decodeHex(("c27a281dffed3d4d176646d7261e9f6268a3d40a237cd274fc2f5970f637f1c" + "bc20a3835d7b7aa7401308737f23a9bf7").toCharArray()); byte[] expectedSHA512Hash = Hex.decodeHex(("78d99bf3e5277fc893af6cd6b0487c33ed3abc4f956fdd1fada302f135b012a" + "3c71cadaaeb462e51ff281202bdfa8807719b91f69742c3f71f036c469ac5b918").toCharArray()); BValue[] args = {messageValue, keyValue}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testHmacWithMD5", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedMD5Hash); args = new BValue[]{messageValue, keyValue}; returnValues = BRunUtil.invoke(compileResult, "testHmacWithSHA1", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSHA1Hash); args = new BValue[]{messageValue, keyValue}; returnValues = BRunUtil.invoke(compileResult, "testHmacWithSHA256", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSHA256Hash); args = new BValue[]{messageValue, keyValue}; returnValues = BRunUtil.invoke(compileResult, "testHmacWithSHA384", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSHA384Hash); args = new BValue[]{messageValue, keyValue}; returnValues = BRunUtil.invoke(compileResult, "testHmacWithSHA512", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSHA512Hash); } @Test(description = "Test hmac generation with an empty password", expectedExceptions = BLangRuntimeException.class) public void testHmacNegativeInvalidKey() { BValue[] args = {new BValueArray("Ballerina HMAC test".getBytes(StandardCharsets.UTF_8)), new BValueArray("".getBytes(StandardCharsets.UTF_8))}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testHmacWithSHA1", args); } @Test(description = "Test hashing functions") public void testHashing() throws DecoderException { byte[] expectedMd5Hash = Hex.decodeHex("3B12196DB784CD9F86CC635D32764FDF".toCharArray()); byte[] expectedSha1Hash = Hex.decodeHex("73FBC15DB28D52C03359EDE7A7DC40B4A83DF207".toCharArray()); byte[] expectedSha256Hash = Hex .decodeHex("68F6CA0B55B55099331BF4EAA659B8BDC94FBDCE2F54D94FD90DA8240797A5D7".toCharArray()); byte[] expectedSha384Hash = Hex.decodeHex(("F00B4A8C67B38E7E32FF8B1AB570345743878F7ADED9B5FA02518DDD84E16CBC" + "A344AF42CB60A1FD5C48C5FEDCFF7F24").toCharArray()); byte[] expectedSha512hash = Hex.decodeHex(("1C9BED7C87E7D17BA07ADD67F59B4A29AFD2B046409B65429E77D0CEE53A33C5" + "E26731DC1CB091FAADA8C5D6433CB1544690804CC046A55D6AFED8BE0B901062").toCharArray()); BValue[] args = {new BValueArray("Ballerina test".getBytes(StandardCharsets.UTF_8))}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testHashWithMD5", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedMd5Hash); returnValues = BRunUtil.invoke(compileResult, "testHashWithSHA1", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSha1Hash); returnValues = BRunUtil.invoke(compileResult, "testHashWithSHA256", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSha256Hash); returnValues = BRunUtil.invoke(compileResult, "testHashWithSHA384", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSha384Hash); returnValues = BRunUtil.invoke(compileResult, "testHashWithSHA512", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSha512hash); } @Test(description = "Test CRC32b generation") public void testCRC32() { byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); String expectedCRC32Hash = "d37b9692"; BValue[] returnValues = BRunUtil.invoke(compileResult, "testHashWithCRC32b", new BValue[]{new BValueArray(payload)}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(returnValues[0].stringValue(), expectedCRC32Hash); } @Test(description = "Test RSA-SHA1 siging") public void testSignRsaSha1() throws DecoderException { byte[] expectedSignature = Hex.decodeHex(("70728d6d37fd83704bcb2649d93cfd20dbadb83a9d2169965d2a241795a131f" + "cfdb8b1b4f35f5de3c1f6f1d71ea0c9f80e494627b4c01d6e670ae4698b774171e8a017d62847c92aa47e868c230532af" + "9fc3a681387eead94578d2287674940df2e2f4a28f59688257254dfaab81c17617357ae05b42898412136abed116d6b86" + "eab68ff4ace029b67c7e4c5784a9bad00129b69d5afb6a89cb596cad56e8c98a1642eab87cb337980cc987708800e62a4" + "27c6f61828437d5491549b05025e9a98bf27825dc6002068678dde1e7d365407881b2b1a4d4e522a53f69e5b43202299e" + "02f7840f8991b8c335b0332b3b4bd658030ec3007f6f36c190b8663d3b746") .toCharArray()); byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha1", new BValue[]{new BValueArray(payload), new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSignature); returnValues = BRunUtil.invoke(compileResult, "testVerifyRsaSha1", new BValue[]{new BValueArray(payload), returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BBoolean) returnValues[0]).booleanValue(), true); } @Test(description = "Test RSA-SHA256 siging") public void testSignRsaSha256() throws DecoderException { byte[] expectedSignature = Hex.decodeHex(("34477f0e0a5457ca1a95049da10d59baa33ee4fa9e1bb8be3d3c70d82b980850" + "fd017a1c9984a97384736aacfe33d39ff8d63e01b952972910c86135b7558a2274c6d772f0d2fcdc0ac4aabc75f3978edb" + "d4aabd17d6447fb88e83b055bbff24d8212125b760c8bf88e9e4908645434f53a2ab0e3d5517c8e3241d8ebabbc767e7d9" + "24b5481621831f3a63e06c393c9378d782406705cd8823e12d3b4042a3cb738b8a8bb5731ff2934394c928c4262d130af6" + "6a2b507fc538bd16bccabc2f3b95137370dcca31e80866533bf445cf7f63aec6a9fa596333abb3a59d9b327891c7e6016e" + "0c11ef2a0d32088d4683d915005c9dcc8137611e5bff9dc4a5db6f87") .toCharArray()); byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha256", new BValue[]{new BValueArray(payload), new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSignature); returnValues = BRunUtil.invoke(compileResult, "testVerifyRsaSha256", new BValue[]{new BValueArray(payload), returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BBoolean) returnValues[0]).booleanValue(), true); } @Test(description = "Test RSA-384 siging") public void testSignRsaSha384() throws DecoderException { byte[] expectedSignature = Hex.decodeHex(("4981CC5213F384E8DB7950BF76C97AE20FA2A34244A517FC585B2381B9E88" + "278E447B92F6F452332BCA65DD5D6CCE04B5AC51D92E7E820B6FB826870DFBA437BBDA7F0E5850C02F72A8644DA8382" + "237E8C1ABD50A4BAEE179C8C838EA4AC53D2223B3C57D7D463A8E1BBFFC43F3F3C44494850377A8668E156B2D23B6E0" + "D8132632E3D79D68A391F619EF2E1E986A455F8F27092C66029C98D001A81FFE3E4B00991E7F0C0141D0635275544FC" + "5BF70A40C12B7BC765F6209C9640A60B9E978AD8DEC551983F5773A72327DF1A6256BEB8DF50A03F89443123E1354A9" + "EF7D8F8BF0659E1D6B77916B4AEEC79989AFDAA2F5B8983DE476C1A0FFBB2B647DE449E") .toCharArray()); byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha384", new BValue[]{new BValueArray(payload), new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSignature); returnValues = BRunUtil.invoke(compileResult, "testVerifyRsaSha384", new BValue[]{new BValueArray(payload), returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BBoolean) returnValues[0]).booleanValue(), true); } @Test(description = "Test RSA-512 siging") public void testSignRsaSha512() throws DecoderException { byte[] expectedSignature = Hex.decodeHex(("6995ba8d2382a8c4f0ed513033126b2305df419a8b105ee60483243229d2c496" + "b7f670783c52068cd2b4b8c2392f2932c682f30057cb4d8d616ba3a142356b0394747b2a3642da4d23447bb997eacb086f" + "173b4045ee8ee014e1e667e34522defb7a4ac1b5b3f175d40a409d947d562fcf7b2b2631d273751a0f8c658bd8c1d1d23a" + "0dbe685b15e13abf45f998114577c85a6478d915a445645a6360944e4962c56bee79d2363931c77f8040c620692debc747" + "4c1e62d9d4b0b39fa664b8c3a32155c7c1966ef3d55993ad8f7f3bf4d929cf047ab91344facefeba944b043e1e31496753" + "9cb2e6e669ec3352073a8933a2a0cac6056b4997b3628132f7a7e553") .toCharArray()); byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha512", new BValue[]{new BValueArray(payload), new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSignature); returnValues = BRunUtil.invoke(compileResult, "testVerifyRsaSha512", new BValue[]{new BValueArray(payload), returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BBoolean) returnValues[0]).booleanValue(), true); } @Test(description = "Test RSA-MD5 siging") public void testSignRsaMd5() throws DecoderException { byte[] expectedSignature = Hex.decodeHex(("457050eca794baf2149f53631f373525fbc7b40de83e0af5b03473e7b726064b" + "3eb6a8b7ce48218e4adaf2b598429236192a458ad5cef1ab2f456164f2646ba57a1ce6b858403504ddc49915bf8bf34558" + "0366bd9f7d1d777572fcacd3aa935267af6cf5dc988668b8cea0f57cd0e286658f0ca7c060d7a68b6330bc590b6db59489" + "aa676b1c539e5bb0116c64a963f8a03789b9fd7e689bac5576eea15d93d45be3547aef7c7dc26251dfa7bdf23b47c6a346" + "ae3603c158cbd32ff9298df71f930cebdda8564199e948f1ac03173e9f9d425240c7f99857d5f469dd0b23c0248b4fa42e" + "67145ec0e6e8abfc3f7f10122cc278b5469eb970034483839f290eec") .toCharArray()); byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaMd5", new BValue[]{new BValueArray(payload), new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), expectedSignature); returnValues = BRunUtil.invoke(compileResult, "testVerifyRsaMd5", new BValue[]{new BValueArray(payload), returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina")}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BBoolean) returnValues[0]).booleanValue(), true); } @Test(description = "Test RSA-SHA1 siging with an invalid private key") public void testSignRsaSha1WithInvalidKey() { byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha1WithInvalidKey", new BValue[]{new BValueArray(payload)}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid uninitialized key"); } @Test(description = "Test RSA-SHA256 siging with an invalid private key") public void testSignRsaSha256WithInvalidKey() { byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha256WithInvalidKey", new BValue[]{new BValueArray(payload)}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid uninitialized key"); } @Test(description = "Test RSA-SHA384 siging with an invalid private key") public void testSignRsaSha384WithInvalidKey() { byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha384WithInvalidKey", new BValue[]{new BValueArray(payload)}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid uninitialized key"); } @Test(description = "Test RSA-SHA512 siging with an invalid private key") public void testSignRsaSha512WithInvalidKey() { byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaSha512WithInvalidKey", new BValue[]{new BValueArray(payload)}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid uninitialized key"); } @Test(description = "Test RSA-MD5 siging with an invalid private key") public void testSignRsaMd5WithInvalidKey() { byte[] payload = "Ballerina test".getBytes(StandardCharsets.UTF_8); BValue[] returnValues = BRunUtil.invoke(compileResult, "testSignRsaMd5WithInvalidKey", new BValue[]{new BValueArray(payload)}); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid uninitialized key"); } @Test(description = "Test encrypt and decrypt with AES CBC NoPadding") public void testEncryptAesCbcNoPadding() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("NONE")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesCbc", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], keyValue, ivValue, new BString("NONE")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptAesCbc", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with AES CBC NoPadding using invalid key size") public void testEncryptAesCbcNoPaddingWithInvalidKeySize() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[31]; for (int i = 0; i < 31; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("NONE")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesCbc", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid key size. valid key sizes in bytes: [16, 24, 32]"); } @Test(description = "Test encrypt and decrypt with AES CBC NoPadding using invalid IV length") public void testEncryptAesCbcNoPaddingWithInvalidIvLength() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } byte[] iv = new byte[15]; for (int i = 0; i < 15; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("NONE")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesCbc", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "Wrong IV length: must be 16 bytes long"); } @Test(description = "Test encrypt and decrypt with AES CBC NoPadding using invalid input length") public void testEncryptAesCbcNoPaddingWithInvalidInputLength() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("NONE")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesCbc", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "Input length not multiple of 16 bytes"); } @Test(description = "Test encrypt and decrypt with AES CBC PKCS5") public void testEncryptAesCbcPkcs5() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("PKCS5")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesCbc", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], keyValue, ivValue, new BString("PKCS5")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptAesCbc", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with AES CBC PKCS1") public void testEncryptAesCbcPkcs1() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[32]; for (int i = 0; i < 32; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("PKCS1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesCbc", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "unsupported algorithm: AES CBC PKCS1"); } @Test(description = "Test encrypt and decrypt with AES ECB NoPadding") public void testEncryptAesEcbNoPadding() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, new BString("NONE")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], keyValue, new BString("NONE")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptAesEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with AES ECB NoPadding using invalid key size") public void testEncryptAesEcbNoPaddingWithInvalidKeySize() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[31]; for (int i = 0; i < 31; i++) { key[i] = (byte) i; } BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, new BString("NONE")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid key size. valid key sizes in bytes: [16, 24, 32]"); } @Test(description = "Test encrypt and decrypt with AES ECB NoPadding using invalid input length") public void testEncryptAesEcbNoPaddingWithInvalidInputLength() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, new BString("NONE")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "Input length not multiple of 16 bytes"); } @Test(description = "Test encrypt and decrypt with AES ECB PKCS5") public void testEncryptAesEcbPkcs5() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, new BString("PKCS5")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], keyValue, new BString("PKCS5")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptAesEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with AES ECB PKCS1") public void testEncryptAesEcbPkcs1() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[32]; for (int i = 0; i < 32; i++) { key[i] = (byte) i; } BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, new BString("PKCS1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "unsupported algorithm: AES ECB PKCS1"); } @Test(description = "Test encrypt and decrypt with AES GCM NoPadding") public void testEncryptAesGcmNoPadding() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("NONE"), new BInteger(128)}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesGcm", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], keyValue, ivValue, new BString("NONE"), new BInteger(128)}; returnValues = BRunUtil.invoke(compileResult, "testDecryptAesGcm", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with AES GCM NoPadding using invalid key size") public void testEncryptAesGcmNoPaddingWithInvalidKeySize() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[31]; for (int i = 0; i < 31; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("NONE"), new BInteger(128)}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesGcm", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid key size. valid key sizes in bytes: [16, 24, 32]"); } @Test(description = "Test encrypt and decrypt with AES GCM NoPadding using invalid input length") public void testEncryptAesGcmNoPaddingWithInvalidInputLength() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("NONE"), new BInteger(128)}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesGcm", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], keyValue, ivValue, new BString("NONE"), new BInteger(128)}; returnValues = BRunUtil.invoke(compileResult, "testDecryptAesGcm", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with AES GCM PKCS5") public void testEncryptAesGcmPkcs5() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[KEY_SIZE]; for (int i = 0; i < KEY_SIZE; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("PKCS5"), new BInteger(128)}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesGcm", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], keyValue, ivValue, new BString("PKCS5"), new BInteger(128)}; returnValues = BRunUtil.invoke(compileResult, "testDecryptAesGcm", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with AES GCM PKCS1") public void testEncryptAesGcmPkcs1() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[32]; for (int i = 0; i < 32; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("PKCS1"), new BInteger(128)}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesGcm", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "unsupported algorithm: AES GCM PKCS1"); } @Test(description = "Test encrypt and decrypt with AES GCM PKCS5 with invalid tag value") public void testEncryptAesGcmPkcs5WithInvalidTagLength() { byte[] message = "Ballerina crypto test".getBytes(StandardCharsets.UTF_8); byte[] key = new byte[32]; for (int i = 0; i < 32; i++) { key[i] = (byte) i; } byte[] iv = new byte[16]; for (int i = 0; i < 16; i++) { iv[i] = (byte) i; } BValueArray ivValue = new BValueArray(iv); BValueArray messageValue = new BValueArray(message); BValueArray keyValue = new BValueArray(key); BValue[] args = {messageValue, keyValue, ivValue, new BString("PKCS5"), new BInteger(500)}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptAesGcm", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BError); Assert.assertTrue(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue() .startsWith("invalid tag size. valid tag sizes in bytes:")); } @Test(description = "Test encrypt and decrypt with RSA ECB PKCS1") public void testEncryptRsaEcbPkcs1() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("PKCS1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina"), new BString("PKCS1")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptRsaEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with RSA ECB OAEPwithMD5andMGF1") public void testEncryptRsaEcbOAEPwithMD5andMGF1() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("OAEPwithMD5andMGF1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina"), new BString("OAEPwithMD5andMGF1")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptRsaEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with RSA ECB OAEPWithSHA1AndMGF1") public void testEncryptRsaEcbOAEPWithSHA1AndMGF1() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("OAEPWithSHA1AndMGF1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina"), new BString("OAEPWithSHA1AndMGF1")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptRsaEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with RSA ECB OAEPWithSHA256AndMGF1") public void testEncryptRsaEcbOAEPWithSHA256AndMGF1() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("OAEPWithSHA256AndMGF1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina"), new BString("OAEPWithSHA256AndMGF1")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptRsaEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with RSA ECB OAEPwithSHA384andMGF1") public void testEncryptRsaEcbOAEPwithSHA384andMGF1() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("OAEPwithSHA384andMGF1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcb", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina"), new BString("OAEPwithSHA384andMGF1")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptRsaEcb", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with RSA ECB OAEPwithSHA512andMGF1") @Test(description = "Test encrypt with private key and decrypt with public key using RSA ECB PKCS1") public void testEncryptRsaEcbWithPrivateKeyPkcs1() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("ballerina"), new BString("PKCS1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcbWithPrivateKey", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertTrue(returnValues[0] instanceof BValueArray); BValue[] args1 = {returnValues[0], new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("PKCS1")}; returnValues = BRunUtil.invoke(compileResult, "testDecryptRsaEcbWithPublicKey", args1); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BValueArray) returnValues[0]).getBytes(), message); } @Test(description = "Test encrypt and decrypt with RSA ECB PKCS1 with an invalid key") public void testEncryptRsaEcbWithPrivateKeyPkcs1WithInvalidKey() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString("PKCS1")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcbWithInvalidKey", args); Assert.assertFalse(returnValues == null || returnValues.length == 0 || returnValues[0] == null); Assert.assertEquals(((BMap) ((BError) returnValues[0]).getDetails()).get(Constants.MESSAGE).stringValue(), "invalid uninitialized key"); } @Test(description = "Test encrypt and decrypt with RSA ECB PKCS1 with invalid padding", expectedExceptions = BLangRuntimeException.class) public void testEncryptRsaEcbWithPrivateKeyPkcs1WithInvalidPadding() { byte[] message = "Ballerina crypto test ".getBytes(StandardCharsets.UTF_8); BValueArray messageValue = new BValueArray(message); BValue[] args = {messageValue, new BString(confRoot.resolve("testKeystore.p12").toString()), new BString("ballerina"), new BString("ballerina"), new BString("PKCS99")}; BValue[] returnValues = BRunUtil.invoke(compileResult, "testEncryptRsaEcb", args); } }
+1. We can remove that variable.
private boolean checkUnionHasSameType(BUnionType unionType, BType baseType) { LinkedHashSet<BType> memberTypes = unionType.getMemberTypes(); boolean hasSameOrderedType = false; for (BType type : memberTypes) { type = getReferredType(type); if (type.tag == TypeTags.FINITE) { for (BLangExpression expr : ((BFiniteType) type).getValueSpace()) { hasSameOrderedType = isSameOrderedType(expr.getBType(), baseType); if (!hasSameOrderedType) { return false; } } } else if (type.tag == TypeTags.UNION) { if (!checkUnionHasSameType((BUnionType) type, baseType)) { return false; } hasSameOrderedType = true; } else if (type.tag == TypeTags.TUPLE || type.tag == TypeTags.ARRAY) { if (!isSameOrderedType(type, baseType)) { return false; } hasSameOrderedType = true; } else if (type.tag == TypeTags.INTERSECTION) { if (!isSameOrderedType(getEffectiveTypeForIntersection(type), baseType)) { return false; } hasSameOrderedType = true; } else if (isSimpleBasicType(type.tag)) { hasSameOrderedType = isSameOrderedType(type, baseType) || isNil(type); if (!hasSameOrderedType) { return false; } } } return hasSameOrderedType; }
boolean hasSameOrderedType = false;
private boolean checkUnionHasSameType(BUnionType unionType, BType baseType) { LinkedHashSet<BType> memberTypes = unionType.getMemberTypes(); for (BType type : memberTypes) { type = getReferredType(type); if (type.tag == TypeTags.FINITE) { for (BLangExpression expr : ((BFiniteType) type).getValueSpace()) { if (!isSameOrderedType(expr.getBType(), baseType)) { return false; } } } else if (type.tag == TypeTags.TUPLE || type.tag == TypeTags.ARRAY) { if (!isSameOrderedType(type, baseType)) { return false; } } else if (type.tag == TypeTags.INTERSECTION) { if (!isSameOrderedType(getEffectiveTypeForIntersection(type), baseType)) { return false; } } else if (isSimpleBasicType(type.tag)) { if (!isSameOrderedType(type, baseType) && !isNil(type)) { return false; } } } return true; }
class BOrderedTypeVisitor implements BTypeVisitor<BType, Boolean> { Set<TypePair> unresolvedTypes; BOrderedTypeVisitor(Set<TypePair> unresolvedTypes) { this.unresolvedTypes = unresolvedTypes; } @Override public Boolean visit(BType target, BType source) { BType sourceType = getReferredType(source); BType targetType = getReferredType(target); int sourceTag = sourceType.tag; int targetTag = targetType.tag; if (sourceTag == TypeTags.INTERSECTION || targetTag == TypeTags.INTERSECTION) { sourceTag = getEffectiveTypeForIntersection(sourceType).tag; targetTag = getEffectiveTypeForIntersection(targetType).tag; } if (isSimpleBasicType(sourceTag) && isSimpleBasicType(targetTag)) { return (source == target) || sourceTag == TypeTags.NIL || targetTag == TypeTags.NIL || isIntOrStringType(sourceTag, targetTag); } if (sourceTag == TypeTags.FINITE) { return checkValueSpaceHasSameType(((BFiniteType) sourceType), targetType); } return isSameOrderedType(targetType, sourceType, this.unresolvedTypes); } @Override public Boolean visit(BArrayType target, BType source) { source = getReferredType(source); if (source.tag == TypeTags.INTERSECTION) { source = getEffectiveTypeForIntersection(source); } if (source.tag != TypeTags.ARRAY) { if (source.tag == TypeTags.TUPLE || source.tag == TypeTags.UNION) { return isSameOrderedType(target, source); } return false; } return isSameOrderedType(target.eType, ((BArrayType) source).eType, unresolvedTypes); } @Override public Boolean visit(BTupleType target, BType source) { source = getReferredType(source); if (source.tag == TypeTags.INTERSECTION) { source = getEffectiveTypeForIntersection(source); } if (source.tag == TypeTags.UNION) { return isSameOrderedType(target, source); } if (source.tag != TypeTags.TUPLE && source.tag != TypeTags.ARRAY) { return false; } List<BType> targetTupleTypes = target.tupleTypes; BType targetRestType = target.restType; if (source.tag == TypeTags.ARRAY) { BType eType = ((BArrayType) source).eType; for (BType memberType : targetTupleTypes) { if (!isSameOrderedType(eType, memberType, this.unresolvedTypes)) { return false; } } if (targetRestType == null) { return true; } return isSameOrderedType(targetRestType, eType, this.unresolvedTypes); } BTupleType sourceT = (BTupleType) source; List<BType> sourceTupleTypes = sourceT.tupleTypes; BType sourceRestType = sourceT.restType; int sourceTupleCount = sourceTupleTypes.size(); int targetTupleCount = targetTupleTypes.size(); int len = Math.min(sourceTupleCount, targetTupleCount); for (int i = 0; i < len; i++) { if (!isSameOrderedType(sourceTupleTypes.get(i), targetTupleTypes.get(i), this.unresolvedTypes)) { return false; } } if (sourceTupleCount > targetTupleCount) { BType baseType = sourceTupleTypes.get(targetTupleCount); if (!hasCommonOrderedTypeForTuples(baseType, sourceTupleTypes, targetTupleCount + 1)) { return false; } } else if (sourceTupleCount < targetTupleCount) { BType baseType = targetTupleTypes.get(sourceTupleCount); if (!hasCommonOrderedTypeForTuples(baseType, targetTupleTypes, sourceTupleCount + 1)) { return false; } } if (sourceTupleCount == targetTupleCount) { if (sourceRestType == null || targetRestType == null) { return true; } return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes); } if (sourceTupleCount > targetTupleCount) { return checkSameOrderedTypeInTuples(sourceT, sourceTupleCount, targetTupleCount, sourceRestType, targetRestType); } return checkSameOrderedTypeInTuples(target, targetTupleCount, sourceTupleCount, targetRestType, sourceRestType); } private boolean hasCommonOrderedTypeForTuples(BType baseType, List<BType> typeList, int startIndex) { for (int i = startIndex; i < typeList.size(); i++) { if (isNil(baseType)) { baseType = typeList.get(i); continue; } if (!isSameOrderedType(baseType, typeList.get(i), this.unresolvedTypes)) { return false; } } return true; } private boolean checkSameOrderedTypeInTuples(BTupleType source, int sourceTupleCount, int targetTupleCount, BType sourceRestType, BType targetRestType) { if (targetRestType == null) { return true; } for (int i = targetTupleCount; i < sourceTupleCount; i++) { if (!isSameOrderedType(source.getTupleTypes().get(i), targetRestType, this.unresolvedTypes)) { return false; } } if (sourceRestType == null) { return true; } return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes); } @Override public Boolean visit(BUnionType target, BType source) { if (source.tag != TypeTags.UNION || !hasSameReadonlyFlag(source, target)) { return checkUnionHasSameType(target, source); } BUnionType sUnionType = (BUnionType) source; LinkedHashSet<BType> sourceTypes = sUnionType.getMemberTypes(); LinkedHashSet<BType> targetTypes = target.getMemberTypes(); if (checkUnionHasAllFiniteOrNilMembers(sourceTypes) && checkUnionHasAllFiniteOrNilMembers(targetTypes)) { BType type = target.getMemberTypes().iterator().next(); return checkValueSpaceHasSameType(((BFiniteType) getReferredType(type)), sUnionType.getMemberTypes().iterator().next()); } return checkSameOrderedTypesInUnionMembers(sourceTypes, targetTypes); } private boolean checkSameOrderedTypesInUnionMembers(LinkedHashSet<BType> sourceTypes, LinkedHashSet<BType> targetTypes) { for (BType sourceT : sourceTypes) { boolean foundSameOrderedType = false; if (isNil(sourceT)) { continue; } for (BType targetT : targetTypes) { if (isNil(targetT)) { foundSameOrderedType = true; continue; } if (isSameOrderedType(targetT, sourceT, this.unresolvedTypes)) { foundSameOrderedType = true; } else { return false; } } if (!foundSameOrderedType) { return false; } } return true; } @Override public Boolean visit(BFiniteType t, BType s) { return checkValueSpaceHasSameType(t, s); } private boolean hasSameReadonlyFlag(BType source, BType target) { return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY); } @Override public Boolean visit(BBuiltInRefType t, BType s) { return false; } @Override public Boolean visit(BAnyType t, BType s) { return false; } @Override public Boolean visit(BAnydataType t, BType s) { return false; } @Override public Boolean visit(BMapType t, BType s) { return false; } @Override public Boolean visit(BFutureType t, BType s) { return false; } @Override public Boolean visit(BXMLType t, BType s) { return false; } @Override public Boolean visit(BJSONType t, BType s) { return false; } @Override public Boolean visit(BObjectType t, BType s) { return false; } @Override public Boolean visit(BRecordType t, BType s) { return false; } @Override public Boolean visit(BStreamType t, BType s) { return false; } @Override public Boolean visit(BTableType t, BType s) { return false; } @Override public Boolean visit(BInvokableType t, BType s) { return false; } @Override public Boolean visit(BIntersectionType tIntersectionType, BType s) { return this.visit(getEffectiveTypeForIntersection(tIntersectionType), s); } @Override public Boolean visit(BErrorType t, BType s) { return false; } @Override public Boolean visit(BTypedescType t, BType s) { return false; } public Boolean visit(BTypeReferenceType t, BType s) { return this.visit(getReferredType(t), t); } @Override public Boolean visit(BParameterizedType t, BType s) { return false; } }
class BOrderedTypeVisitor implements BTypeVisitor<BType, Boolean> { Set<TypePair> unresolvedTypes; BOrderedTypeVisitor(Set<TypePair> unresolvedTypes) { this.unresolvedTypes = unresolvedTypes; } @Override public Boolean visit(BType target, BType source) { int sourceTag = source.tag; int targetTag = target.tag; if (isSimpleBasicType(sourceTag) && isSimpleBasicType(targetTag)) { return (source == target) || sourceTag == TypeTags.NIL || targetTag == TypeTags.NIL || isIntOrStringType(sourceTag, targetTag); } if (sourceTag == TypeTags.FINITE) { return checkValueSpaceHasSameType(((BFiniteType) source), target); } return isSameOrderedType(target, source, this.unresolvedTypes); } @Override public Boolean visit(BArrayType target, BType source) { if (source.tag != TypeTags.ARRAY) { if (source.tag == TypeTags.TUPLE || source.tag == TypeTags.UNION) { return isSameOrderedType(target, source); } return false; } return isSameOrderedType(target.eType, ((BArrayType) source).eType, unresolvedTypes); } @Override public Boolean visit(BTupleType target, BType source) { if (source.tag == TypeTags.UNION) { return isSameOrderedType(target, source); } if (source.tag != TypeTags.TUPLE && source.tag != TypeTags.ARRAY) { return false; } List<BType> targetTupleTypes = target.tupleTypes; BType targetRestType = target.restType; if (source.tag == TypeTags.ARRAY) { BType eType = ((BArrayType) source).eType; for (BType memberType : targetTupleTypes) { if (!isSameOrderedType(eType, memberType, this.unresolvedTypes)) { return false; } } if (targetRestType == null) { return true; } return isSameOrderedType(targetRestType, eType, this.unresolvedTypes); } BTupleType sourceT = (BTupleType) source; List<BType> sourceTupleTypes = sourceT.tupleTypes; BType sourceRestType = sourceT.restType; int sourceTupleCount = sourceTupleTypes.size(); int targetTupleCount = targetTupleTypes.size(); int len = Math.min(sourceTupleCount, targetTupleCount); for (int i = 0; i < len; i++) { if (!isSameOrderedType(sourceTupleTypes.get(i), targetTupleTypes.get(i), this.unresolvedTypes)) { return false; } } if (sourceTupleCount == targetTupleCount) { if (sourceRestType == null || targetRestType == null) { return true; } return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes); } else if (sourceTupleCount > targetTupleCount) { if (!hasCommonOrderedTypeForTuples(sourceTupleTypes, targetTupleCount + 1)) { return false; } return checkSameOrderedTypeInTuples(sourceT, sourceTupleCount, targetTupleCount, sourceRestType, targetRestType); } else { if (!hasCommonOrderedTypeForTuples(targetTupleTypes, sourceTupleCount + 1)) { return false; } return checkSameOrderedTypeInTuples(target, targetTupleCount, sourceTupleCount, targetRestType, sourceRestType); } } private boolean hasCommonOrderedTypeForTuples(List<BType> typeList, int startIndex) { BType baseType = typeList.get(startIndex - 1); for (int i = startIndex; i < typeList.size(); i++) { if (isNil(baseType)) { baseType = typeList.get(i); continue; } if (!isSameOrderedType(baseType, typeList.get(i), this.unresolvedTypes)) { return false; } } return true; } private boolean checkSameOrderedTypeInTuples(BTupleType source, int sourceTupleCount, int targetTupleCount, BType sourceRestType, BType targetRestType) { if (targetRestType == null) { return true; } for (int i = targetTupleCount; i < sourceTupleCount; i++) { if (!isSameOrderedType(source.getTupleTypes().get(i), targetRestType, this.unresolvedTypes)) { return false; } } if (sourceRestType == null) { return true; } return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes); } @Override public Boolean visit(BUnionType target, BType source) { if (source.tag != TypeTags.UNION || !hasSameReadonlyFlag(source, target)) { return checkUnionHasSameType(target, source); } BUnionType sUnionType = (BUnionType) source; LinkedHashSet<BType> sourceTypes = sUnionType.getMemberTypes(); LinkedHashSet<BType> targetTypes = target.getMemberTypes(); if (checkUnionHasAllFiniteOrNilMembers(sourceTypes) && checkUnionHasAllFiniteOrNilMembers(targetTypes)) { BType type = target.getMemberTypes().iterator().next(); return checkValueSpaceHasSameType(((BFiniteType) getReferredType(type)), sUnionType.getMemberTypes().iterator().next()); } return checkSameOrderedTypesInUnionMembers(sourceTypes, targetTypes); } private boolean checkSameOrderedTypesInUnionMembers(LinkedHashSet<BType> sourceTypes, LinkedHashSet<BType> targetTypes) { for (BType sourceT : sourceTypes) { boolean foundSameOrderedType = false; if (isNil(sourceT)) { continue; } for (BType targetT : targetTypes) { if (isNil(targetT)) { foundSameOrderedType = true; continue; } if (isSameOrderedType(targetT, sourceT, this.unresolvedTypes)) { foundSameOrderedType = true; } else { return false; } } if (!foundSameOrderedType) { return false; } } return true; } @Override public Boolean visit(BFiniteType t, BType s) { return checkValueSpaceHasSameType(t, s); } private boolean hasSameReadonlyFlag(BType source, BType target) { return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY); } @Override public Boolean visit(BBuiltInRefType t, BType s) { return false; } @Override public Boolean visit(BAnyType t, BType s) { return false; } @Override public Boolean visit(BAnydataType t, BType s) { return false; } @Override public Boolean visit(BMapType t, BType s) { return false; } @Override public Boolean visit(BFutureType t, BType s) { return false; } @Override public Boolean visit(BXMLType t, BType s) { return false; } @Override public Boolean visit(BJSONType t, BType s) { return false; } @Override public Boolean visit(BObjectType t, BType s) { return false; } @Override public Boolean visit(BRecordType t, BType s) { return false; } @Override public Boolean visit(BStreamType t, BType s) { return false; } @Override public Boolean visit(BTableType t, BType s) { return false; } @Override public Boolean visit(BInvokableType t, BType s) { return false; } @Override public Boolean visit(BIntersectionType tIntersectionType, BType s) { return this.visit(getEffectiveTypeForIntersection(tIntersectionType), s); } @Override public Boolean visit(BErrorType t, BType s) { return false; } @Override public Boolean visit(BTypedescType t, BType s) { return false; } public Boolean visit(BTypeReferenceType t, BType s) { return this.visit(getReferredType(t), t); } @Override public Boolean visit(BParameterizedType t, BType s) { return false; } }
Done, though the purpose of this empty line was to separate the "given" from the "when" in the test
void assertReviserReturnsRevisedIndex() { IndexMetaData originalMetaData = new IndexMetaData("test_idx_tableName"); originalMetaData.getColumns().add("column1"); originalMetaData.getColumns().add("column2"); originalMetaData.setUnique(true); SingleIndexReviser reviser = new SingleIndexReviser(); Optional<IndexMetaData> optionalRevised = reviser.revise("tableName", originalMetaData, null); assertTrue(optionalRevised.isPresent()); IndexMetaData actual = optionalRevised.get(); assertThat(originalMetaData.isUnique(), is(actual.isUnique())); assertThat(originalMetaData.getColumns(), is(actual.getColumns())); assertThat("test_idx", is(actual.getName())); }
void assertReviserReturnsRevisedIndex() { IndexMetaData originalMetaData = new IndexMetaData("test_idx_tableName"); originalMetaData.getColumns().add("column1"); originalMetaData.getColumns().add("column2"); originalMetaData.setUnique(true); SingleIndexReviser reviser = new SingleIndexReviser(); Optional<IndexMetaData> optionalRevised = reviser.revise("tableName", originalMetaData, null); assertTrue(optionalRevised.isPresent()); IndexMetaData actual = optionalRevised.get(); assertThat(originalMetaData.isUnique(), is(actual.isUnique())); assertThat(originalMetaData.getColumns(), is(actual.getColumns())); assertThat("test_idx", is(actual.getName())); }
class SingleIndexReviserTest { @Test }
class SingleIndexReviserTest { @Test }
add endpoint or region check, either should be non-empty but not both empty.
public void validateStorageVolumeConfig() throws InvalidConfException { switch (Config.cloud_native_storage_type.toLowerCase()) { case "s3": String[] bucketAndPrefix = getBucketAndPrefix(); String bucket = bucketAndPrefix[0]; if (bucket.isEmpty()) { throw new InvalidConfException( String.format("The configuration item \"aws_s3_path = %s\" is invalid, s3 bucket is empty.", Config.aws_s3_path)); } String credentialType = getAwsCredentialType(); if (credentialType == null) { throw new InvalidConfException("Invalid aws credential configuration."); } break; case "hdfs": if (Config.cloud_native_hdfs_url.isEmpty()) { throw new InvalidConfException("The configuration item \"cloud_native_hdfs_url\" is empty."); } break; case "azblob": if (Config.azure_blob_endpoint.isEmpty()) { throw new InvalidConfException("The configuration item \"azure_blob_endpoint\" is empty."); } if (Config.azure_blob_path.isEmpty()) { throw new InvalidConfException("The configuration item \"azure_blob_path\" is empty."); } break; default: throw new InvalidConfException(String.format( "The configuration item \"cloud_native_storage_type = %s\" is invalid, must be HDFS or S3 or AZBLOB.", Config.cloud_native_storage_type)); } }
case "hdfs":
public void validateStorageVolumeConfig() throws InvalidConfException { switch (Config.cloud_native_storage_type.toLowerCase()) { case "s3": String[] bucketAndPrefix = getBucketAndPrefix(); String bucket = bucketAndPrefix[0]; if (bucket.isEmpty()) { throw new InvalidConfException( String.format("The configuration item \"aws_s3_path = %s\" is invalid, s3 bucket is empty.", Config.aws_s3_path)); } if (Config.aws_s3_region.isEmpty() && Config.aws_s3_endpoint.isEmpty()) { throw new InvalidConfException( "Both configuration item \"aws_s3_region\" and \"aws_s3_endpoint\" are empty"); } String credentialType = getAwsCredentialType(); if (credentialType == null) { throw new InvalidConfException("Invalid aws credential configuration."); } break; case "hdfs": if (Config.cloud_native_hdfs_url.isEmpty()) { throw new InvalidConfException("The configuration item \"cloud_native_hdfs_url\" is empty."); } break; case "azblob": if (Config.azure_blob_endpoint.isEmpty()) { throw new InvalidConfException("The configuration item \"azure_blob_endpoint\" is empty."); } if (Config.azure_blob_path.isEmpty()) { throw new InvalidConfException("The configuration item \"azure_blob_path\" is empty."); } break; default: throw new InvalidConfException(String.format( "The configuration item \"cloud_native_storage_type = %s\" is invalid, must be HDFS or S3 or AZBLOB.", Config.cloud_native_storage_type)); } }
class SharedDataStorageVolumeMgr extends StorageVolumeMgr { @Override public StorageVolume getStorageVolumeByName(String svName) { try (LockCloseable lock = new LockCloseable(rwLock.readLock())) { try { FileStoreInfo fileStoreInfo = GlobalStateMgr.getCurrentState().getStarOSAgent().getFileStoreByName(svName); if (fileStoreInfo == null) { return null; } return StorageVolume.fromFileStoreInfo(fileStoreInfo); } catch (DdlException e) { throw new SemanticException(e.getMessage()); } } } @Override public StorageVolume getStorageVolume(String storageVolumeId) { try (LockCloseable lock = new LockCloseable(rwLock.readLock())) { try { FileStoreInfo fileStoreInfo = GlobalStateMgr.getCurrentState().getStarOSAgent().getFileStore(storageVolumeId); if (fileStoreInfo == null) { return null; } return StorageVolume.fromFileStoreInfo(fileStoreInfo); } catch (DdlException e) { throw new SemanticException(e.getMessage()); } } } @Override public List<String> listStorageVolumeNames() throws DdlException { try (LockCloseable lock = new LockCloseable(rwLock.readLock())) { return GlobalStateMgr.getCurrentState().getStarOSAgent().listFileStore() .stream().map(FileStoreInfo::getFsName).collect(Collectors.toList()); } } @Override protected String createInternalNoLock(String name, String svType, List<String> locations, Map<String, String> params, Optional<Boolean> enabled, String comment) throws DdlException { FileStoreInfo fileStoreInfo = StorageVolume.createFileStoreInfo(name, svType, locations, params, enabled.orElse(true), comment); return GlobalStateMgr.getCurrentState().getStarOSAgent().addFileStore(fileStoreInfo); } @Override protected void updateInternalNoLock(StorageVolume sv) throws DdlException { GlobalStateMgr.getCurrentState().getStarOSAgent().updateFileStore(sv.toFileStoreInfo()); } @Override protected void removeInternalNoLock(StorageVolume sv) throws DdlException { GlobalStateMgr.getCurrentState().getStarOSAgent().removeFileStoreByName(sv.getName()); } @Override public StorageVolume getDefaultStorageVolume() { try (LockCloseable lock = new LockCloseable(rwLock.readLock())) { if (defaultStorageVolumeId.isEmpty()) { return getStorageVolumeByName(BUILTIN_STORAGE_VOLUME); } return getStorageVolume(getDefaultStorageVolumeId()); } } private String getStorageVolumeIdOfDb(String svKey) throws DdlException { StorageVolume sv = null; if (svKey.equals(StorageVolumeMgr.DEFAULT)) { sv = getDefaultStorageVolume(); if (sv == null) { throw new DdlException("Default storage volume not exists, it should be created first"); } } else { sv = getStorageVolumeByName(svKey); if (sv == null) { throw new DdlException("Unknown storage volume \"" + svKey + "\""); } } return sv.getId(); } private boolean bindDbToStorageVolume(String svId, long dbId, boolean isReplay) { try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) { if (!isReplay && !storageVolumeToDbs.containsKey(svId) && getStorageVolume(svId) == null) { return false; } Set<Long> dbs = storageVolumeToDbs.getOrDefault(svId, new HashSet<>()); dbs.add(dbId); storageVolumeToDbs.put(svId, dbs); dbToStorageVolume.put(dbId, svId); return true; } } @Override public boolean bindDbToStorageVolume(String svKey, long dbId) throws DdlException { String svId = getStorageVolumeIdOfDb(svKey); return bindDbToStorageVolume(svId, dbId, false); } @Override public void replayBindDbToStorageVolume(String svId, long dbId) { bindDbToStorageVolume(svId, dbId, true); } @Override public void unbindDbToStorageVolume(long dbId) { try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) { if (!dbToStorageVolume.containsKey(dbId)) { return; } String svId = dbToStorageVolume.remove(dbId); Set<Long> dbs = storageVolumeToDbs.get(svId); dbs.remove(dbId); if (dbs.isEmpty()) { storageVolumeToDbs.remove(svId); } } } private String getStorageVolumeIdOfTable(String svKey, long dbId) throws DdlException { StorageVolume sv = null; if (svKey.isEmpty()) { String dbStorageVolumeId = getStorageVolumeIdOfDb(dbId); if (dbStorageVolumeId != null) { return dbStorageVolumeId; } else { sv = getStorageVolumeByName(BUILTIN_STORAGE_VOLUME); if (sv == null) { throw new DdlException("Builtin storage volume not exists, please check the params in config"); } } } else if (svKey.equals(StorageVolumeMgr.DEFAULT)) { sv = getDefaultStorageVolume(); if (sv == null) { throw new DdlException("Default storage volume not exists, it should be created first"); } } else { sv = getStorageVolumeByName(svKey); if (sv == null) { throw new DdlException("Unknown storage volume \"" + svKey + "\""); } } return sv.getId(); } @Override public boolean bindTableToStorageVolume(String svKey, long dbId, long tableId) throws DdlException { String svId = getStorageVolumeIdOfTable(svKey, dbId); return bindTableToStorageVolume(svId, tableId, false); } @Override public void replayBindTableToStorageVolume(String svId, long tableId) { bindTableToStorageVolume(svId, tableId, true); } private boolean bindTableToStorageVolume(String svId, long tableId, boolean isReplay) { try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) { if (!isReplay && !storageVolumeToDbs.containsKey(svId) && !storageVolumeToTables.containsKey(svId) && getStorageVolume(svId) == null) { return false; } Set<Long> tables = storageVolumeToTables.getOrDefault(svId, new HashSet<>()); tables.add(tableId); storageVolumeToTables.put(svId, tables); tableToStorageVolume.put(tableId, svId); return true; } } @Override public void unbindTableToStorageVolume(long tableId) { try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) { if (!tableToStorageVolume.containsKey(tableId)) { return; } String svId = tableToStorageVolume.remove(tableId); Set<Long> tables = storageVolumeToTables.get(svId); tables.remove(tableId); if (tables.isEmpty()) { storageVolumeToTables.remove(svId); } } } @Override public String createBuiltinStorageVolume() throws DdlException, AlreadyExistsException { if (!Config.enable_volume_from_conf) { return ""; } List<String> locations = parseLocationsFromConfig(); Map<String, String> params = parseParamsFromConfig(); try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) { StorageVolume sv = getStorageVolumeByName(BUILTIN_STORAGE_VOLUME); if (sv != null) { return sv.getId(); } String svId = createStorageVolume(BUILTIN_STORAGE_VOLUME, Config.cloud_native_storage_type, locations, params, Optional.of(true), ""); if (getDefaultStorageVolumeId().isEmpty()) { setDefaultStorageVolume(BUILTIN_STORAGE_VOLUME); } return svId; } } private String[] getBucketAndPrefix() { int index = Config.aws_s3_path.indexOf('/'); if (index < 0) { return new String[] {Config.aws_s3_path, ""}; } return new String[] {Config.aws_s3_path.substring(0, index), Config.aws_s3_path.substring(index + 1)}; } private String getAwsCredentialType() { if (Config.aws_s3_use_aws_sdk_default_behavior) { return "default"; } if (Config.aws_s3_use_instance_profile) { if (Config.aws_s3_iam_role_arn.isEmpty()) { return "instance_profile"; } return "assume_role"; } if (Config.aws_s3_access_key.isEmpty() || Config.aws_s3_secret_key.isEmpty()) { return null; } if (Config.aws_s3_iam_role_arn.isEmpty()) { return "simple"; } return null; } private List<String> parseLocationsFromConfig() { List<String> locations = new ArrayList<>(); switch (Config.cloud_native_storage_type.toLowerCase()) { case "s3": locations.add("s3: break; case "hdfs": locations.add(Config.cloud_native_hdfs_url); break; case "azblob": locations.add("azblob: break; default: return locations; } return locations; } private Map<String, String> parseParamsFromConfig() { Map<String, String> params = new HashMap<>(); switch (Config.cloud_native_storage_type.toLowerCase()) { case "s3": params.put(CloudConfigurationConstants.AWS_S3_ACCESS_KEY, Config.aws_s3_access_key); params.put(CloudConfigurationConstants.AWS_S3_SECRET_KEY, Config.aws_s3_secret_key); params.put(CloudConfigurationConstants.AWS_S3_REGION, Config.aws_s3_region); params.put(CloudConfigurationConstants.AWS_S3_ENDPOINT, Config.aws_s3_endpoint); params.put(CloudConfigurationConstants.AWS_S3_EXTERNAL_ID, Config.aws_s3_external_id); params.put(CloudConfigurationConstants.AWS_S3_IAM_ROLE_ARN, Config.aws_s3_iam_role_arn); params.put(CloudConfigurationConstants.AWS_S3_USE_AWS_SDK_DEFAULT_BEHAVIOR, String.valueOf(Config.aws_s3_use_aws_sdk_default_behavior)); params.put(CloudConfigurationConstants.AWS_S3_USE_INSTANCE_PROFILE, String.valueOf(Config.aws_s3_use_instance_profile)); break; case "hdfs": break; case "azblob": params.put(CloudConfigurationConstants.AZURE_BLOB_SHARED_KEY, Config.azure_blob_shared_key); params.put(CloudConfigurationConstants.AZURE_BLOB_SAS_TOKEN, Config.azure_blob_sas_token); params.put(CloudConfigurationConstants.AZURE_BLOB_ENDPOINT, Config.azure_blob_endpoint); break; default: return params; } return params; } }
class SharedDataStorageVolumeMgr extends StorageVolumeMgr { @Override public StorageVolume getStorageVolumeByName(String svName) { try (LockCloseable lock = new LockCloseable(rwLock.readLock())) { try { FileStoreInfo fileStoreInfo = GlobalStateMgr.getCurrentState().getStarOSAgent().getFileStoreByName(svName); if (fileStoreInfo == null) { return null; } return StorageVolume.fromFileStoreInfo(fileStoreInfo); } catch (DdlException e) { throw new SemanticException(e.getMessage()); } } } @Override public StorageVolume getStorageVolume(String storageVolumeId) { try (LockCloseable lock = new LockCloseable(rwLock.readLock())) { try { FileStoreInfo fileStoreInfo = GlobalStateMgr.getCurrentState().getStarOSAgent().getFileStore(storageVolumeId); if (fileStoreInfo == null) { return null; } return StorageVolume.fromFileStoreInfo(fileStoreInfo); } catch (DdlException e) { throw new SemanticException(e.getMessage()); } } } @Override public List<String> listStorageVolumeNames() throws DdlException { try (LockCloseable lock = new LockCloseable(rwLock.readLock())) { return GlobalStateMgr.getCurrentState().getStarOSAgent().listFileStore() .stream().map(FileStoreInfo::getFsName).collect(Collectors.toList()); } } @Override protected String createInternalNoLock(String name, String svType, List<String> locations, Map<String, String> params, Optional<Boolean> enabled, String comment) throws DdlException { FileStoreInfo fileStoreInfo = StorageVolume.createFileStoreInfo(name, svType, locations, params, enabled.orElse(true), comment); return GlobalStateMgr.getCurrentState().getStarOSAgent().addFileStore(fileStoreInfo); } @Override protected void updateInternalNoLock(StorageVolume sv) throws DdlException { GlobalStateMgr.getCurrentState().getStarOSAgent().updateFileStore(sv.toFileStoreInfo()); } @Override protected void removeInternalNoLock(StorageVolume sv) throws DdlException { GlobalStateMgr.getCurrentState().getStarOSAgent().removeFileStoreByName(sv.getName()); } @Override public StorageVolume getDefaultStorageVolume() { try (LockCloseable lock = new LockCloseable(rwLock.readLock())) { if (defaultStorageVolumeId.isEmpty()) { return getStorageVolumeByName(BUILTIN_STORAGE_VOLUME); } return getStorageVolume(getDefaultStorageVolumeId()); } } private String getStorageVolumeIdOfDb(String svKey) throws DdlException { StorageVolume sv = null; if (svKey.equals(StorageVolumeMgr.DEFAULT)) { sv = getDefaultStorageVolume(); if (sv == null) { throw new DdlException("Default storage volume not exists, it should be created first"); } } else { sv = getStorageVolumeByName(svKey); if (sv == null) { throw new DdlException("Unknown storage volume \"" + svKey + "\""); } } return sv.getId(); } private boolean bindDbToStorageVolume(String svId, long dbId, boolean isReplay) { try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) { if (!isReplay && !storageVolumeToDbs.containsKey(svId) && getStorageVolume(svId) == null) { return false; } Set<Long> dbs = storageVolumeToDbs.getOrDefault(svId, new HashSet<>()); dbs.add(dbId); storageVolumeToDbs.put(svId, dbs); dbToStorageVolume.put(dbId, svId); return true; } } @Override public boolean bindDbToStorageVolume(String svKey, long dbId) throws DdlException { try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) { String svId = getStorageVolumeIdOfDb(svKey); return bindDbToStorageVolume(svId, dbId, false); } } @Override public void replayBindDbToStorageVolume(String svId, long dbId) { bindDbToStorageVolume(svId, dbId, true); } @Override public void unbindDbToStorageVolume(long dbId) { try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) { if (!dbToStorageVolume.containsKey(dbId)) { return; } String svId = dbToStorageVolume.remove(dbId); Set<Long> dbs = storageVolumeToDbs.get(svId); dbs.remove(dbId); if (dbs.isEmpty()) { storageVolumeToDbs.remove(svId); } } } private String getStorageVolumeIdOfTable(String svKey, long dbId) throws DdlException { StorageVolume sv = null; if (svKey.isEmpty()) { String dbStorageVolumeId = getStorageVolumeIdOfDb(dbId); if (dbStorageVolumeId != null) { return dbStorageVolumeId; } else { sv = getStorageVolumeByName(BUILTIN_STORAGE_VOLUME); if (sv == null) { throw new DdlException("Builtin storage volume not exists, please check the params in config"); } } } else if (svKey.equals(StorageVolumeMgr.DEFAULT)) { sv = getDefaultStorageVolume(); if (sv == null) { throw new DdlException("Default storage volume not exists, it should be created first"); } } else { sv = getStorageVolumeByName(svKey); if (sv == null) { throw new DdlException("Unknown storage volume \"" + svKey + "\""); } } return sv.getId(); } @Override public boolean bindTableToStorageVolume(String svKey, long dbId, long tableId) throws DdlException { try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) { String svId = getStorageVolumeIdOfTable(svKey, dbId); return bindTableToStorageVolume(svId, tableId, false); } } @Override public void replayBindTableToStorageVolume(String svId, long tableId) { bindTableToStorageVolume(svId, tableId, true); } private boolean bindTableToStorageVolume(String svId, long tableId, boolean isReplay) { try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) { if (!isReplay && !storageVolumeToDbs.containsKey(svId) && !storageVolumeToTables.containsKey(svId) && getStorageVolume(svId) == null) { return false; } Set<Long> tables = storageVolumeToTables.getOrDefault(svId, new HashSet<>()); tables.add(tableId); storageVolumeToTables.put(svId, tables); tableToStorageVolume.put(tableId, svId); return true; } } @Override public void unbindTableToStorageVolume(long tableId) { try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) { if (!tableToStorageVolume.containsKey(tableId)) { return; } String svId = tableToStorageVolume.remove(tableId); Set<Long> tables = storageVolumeToTables.get(svId); tables.remove(tableId); if (tables.isEmpty()) { storageVolumeToTables.remove(svId); } } } @Override public String createBuiltinStorageVolume() throws DdlException, AlreadyExistsException { if (!Config.enable_load_volume_from_conf) { return ""; } try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) { StorageVolume sv = getStorageVolumeByName(BUILTIN_STORAGE_VOLUME); if (sv != null) { return sv.getId(); } validateStorageVolumeConfig(); List<String> locations = parseLocationsFromConfig(); Map<String, String> params = parseParamsFromConfig(); String svId = createStorageVolume(BUILTIN_STORAGE_VOLUME, Config.cloud_native_storage_type, locations, params, Optional.of(true), ""); if (getDefaultStorageVolumeId().isEmpty()) { setDefaultStorageVolume(BUILTIN_STORAGE_VOLUME); } return svId; } } private String[] getBucketAndPrefix() { int index = Config.aws_s3_path.indexOf('/'); if (index < 0) { return new String[] {Config.aws_s3_path, ""}; } return new String[] {Config.aws_s3_path.substring(0, index), Config.aws_s3_path.substring(index + 1)}; } private String getAwsCredentialType() { if (Config.aws_s3_use_aws_sdk_default_behavior) { return "default"; } if (Config.aws_s3_use_instance_profile) { if (Config.aws_s3_iam_role_arn.isEmpty()) { return "instance_profile"; } return "assume_role"; } if (Config.aws_s3_access_key.isEmpty() || Config.aws_s3_secret_key.isEmpty()) { return null; } if (Config.aws_s3_iam_role_arn.isEmpty()) { return "simple"; } return null; } private List<String> parseLocationsFromConfig() { List<String> locations = new ArrayList<>(); switch (Config.cloud_native_storage_type.toLowerCase()) { case "s3": locations.add("s3: break; case "hdfs": locations.add(Config.cloud_native_hdfs_url); break; case "azblob": locations.add("azblob: break; default: return locations; } return locations; } private Map<String, String> parseParamsFromConfig() { Map<String, String> params = new HashMap<>(); switch (Config.cloud_native_storage_type.toLowerCase()) { case "s3": params.put(CloudConfigurationConstants.AWS_S3_ACCESS_KEY, Config.aws_s3_access_key); params.put(CloudConfigurationConstants.AWS_S3_SECRET_KEY, Config.aws_s3_secret_key); params.put(CloudConfigurationConstants.AWS_S3_REGION, Config.aws_s3_region); params.put(CloudConfigurationConstants.AWS_S3_ENDPOINT, Config.aws_s3_endpoint); params.put(CloudConfigurationConstants.AWS_S3_EXTERNAL_ID, Config.aws_s3_external_id); params.put(CloudConfigurationConstants.AWS_S3_IAM_ROLE_ARN, Config.aws_s3_iam_role_arn); params.put(CloudConfigurationConstants.AWS_S3_USE_AWS_SDK_DEFAULT_BEHAVIOR, String.valueOf(Config.aws_s3_use_aws_sdk_default_behavior)); params.put(CloudConfigurationConstants.AWS_S3_USE_INSTANCE_PROFILE, String.valueOf(Config.aws_s3_use_instance_profile)); break; case "hdfs": break; case "azblob": params.put(CloudConfigurationConstants.AZURE_BLOB_SHARED_KEY, Config.azure_blob_shared_key); params.put(CloudConfigurationConstants.AZURE_BLOB_SAS_TOKEN, Config.azure_blob_sas_token); params.put(CloudConfigurationConstants.AZURE_BLOB_ENDPOINT, Config.azure_blob_endpoint); break; default: return params; } return params; } }
Wondering if we should make things a bit more complex. E.g. either the package should be `my.package` or it should start with `my.package.` (with a final dot). I don't think we want to catch `my.package2` and if I'm not mistaken, we catch it with what you did.
private List<Predicate<String>> initPredicates(Collection<String> exclusions) { final String packMatch = ".*"; List<Predicate<String>> predicates = new ArrayList<>(); for (String exclusionExpression : exclusions) { if (exclusionExpression.endsWith(packMatch)) { final String pack = exclusionExpression.substring(0, exclusionExpression.length() - packMatch.length()); predicates.add(new Predicate<String>() { @Override public boolean test(String packageName) { return packageName.startsWith(pack); } }); } else { predicates.add(new Predicate<String>() { @Override public boolean test(String packageName) { return packageName.equals(exclusionExpression); } }); } } return predicates; }
final String pack = exclusionExpression.substring(0, exclusionExpression.length() - packMatch.length());
Override public boolean test(String packageName) { return packageName.equals(pack) || packageName.startsWith(pack + "."); }
class in each archive for (ClassInfo classInfo : archive.getIndex().getKnownClasses()) { String packageName = DotNames.packageName(classInfo.name()); packageToArchiveMap.compute(packageName, (key, val) -> { Set<ApplicationArchive> returnValue = val == null ? new HashSet<>() : val; returnValue.add(archive); return returnValue; }); }
class in each archive for (ClassInfo classInfo : archive.getIndex().getKnownClasses()) { String packageName = DotNames.packageName(classInfo.name()); packageToArchiveMap.compute(packageName, (key, val) -> { Set<ApplicationArchive> returnValue = val == null ? new HashSet<>() : val; returnValue.add(archive); return returnValue; }); }
It is required since when we run the non native image (standard java jar) we do not want the TSM to be started.
public void configureRuntimeProperties(NarayanaSTMRecorder recorder) { recorder.disableTransactionStatusManager(); }
recorder.disableTransactionStatusManager();
public void configureRuntimeProperties(NarayanaSTMRecorder recorder) { recorder.disableTransactionStatusManager(); }
class NarayanaSTMProcessor { private static final Logger log = Logger.getLogger(NarayanaSTMProcessor.class.getName()); @Inject CombinedIndexBuildItem combinedIndexBuildItem; @Inject BuildProducer<ReflectiveHierarchyBuildItem> reflectiveHierarchyClass; @Inject BuildProducer<ReflectiveClassBuildItem> reflectiveClass; @BuildStep ReflectiveClassBuildItem registerFeature(BuildProducer<FeatureBuildItem> feature) { feature.produce(new FeatureBuildItem(FeatureBuildItem.NARAYANA_STM)); return new ReflectiveClassBuildItem(true, false, ShadowNoFileLockStore.class.getName(), CheckedActionFactoryImple.class.getName(), Lock.class.getName()); } @BuildStep() public NativeImageSystemPropertyBuildItem substrateSystemPropertyBuildItem() { return new NativeImageSystemPropertyBuildItem("CoordinatorEnvironmentBean.transactionStatusManagerEnable", "false"); } @BuildStep() @Record(RUNTIME_INIT) @BuildStep NativeImageProxyDefinitionBuildItem stmProxies() { final DotName TRANSACTIONAL = DotName.createSimple(Transactional.class.getName()); IndexView index = combinedIndexBuildItem.getIndex(); Collection<String> proxies = new ArrayList<>(); for (AnnotationInstance stm : index.getAnnotations(TRANSACTIONAL)) { if (AnnotationTarget.Kind.CLASS.equals(stm.target().kind())) { DotName name = stm.target().asClass().name(); proxies.add(name.toString()); log.debugf("Registering transactional interface %s%n", name); for (ClassInfo ci : index.getAllKnownImplementors(name)) { reflectiveHierarchyClass.produce( new ReflectiveHierarchyBuildItem(Type.create(ci.name(), Type.Kind.CLASS))); } } } String[] classNames = proxies.toArray(new String[0]); reflectiveClass.produce(new ReflectiveClassBuildItem(true, true, classNames)); return new NativeImageProxyDefinitionBuildItem(classNames); } }
class NarayanaSTMProcessor { private static final Logger log = Logger.getLogger(NarayanaSTMProcessor.class.getName()); @Inject CombinedIndexBuildItem combinedIndexBuildItem; @Inject BuildProducer<ReflectiveHierarchyBuildItem> reflectiveHierarchyClass; @Inject BuildProducer<ReflectiveClassBuildItem> reflectiveClass; @BuildStep ReflectiveClassBuildItem registerFeature(BuildProducer<FeatureBuildItem> feature) { feature.produce(new FeatureBuildItem(FeatureBuildItem.NARAYANA_STM)); return new ReflectiveClassBuildItem(true, false, ShadowNoFileLockStore.class.getName(), CheckedActionFactoryImple.class.getName(), Lock.class.getName()); } @BuildStep() public NativeImageSystemPropertyBuildItem substrateSystemPropertyBuildItem() { return new NativeImageSystemPropertyBuildItem("CoordinatorEnvironmentBean.transactionStatusManagerEnable", "false"); } @BuildStep() @Record(RUNTIME_INIT) @BuildStep NativeImageProxyDefinitionBuildItem stmProxies() { final DotName TRANSACTIONAL = DotName.createSimple(Transactional.class.getName()); IndexView index = combinedIndexBuildItem.getIndex(); Collection<String> proxies = new ArrayList<>(); for (AnnotationInstance stm : index.getAnnotations(TRANSACTIONAL)) { if (AnnotationTarget.Kind.CLASS.equals(stm.target().kind())) { DotName name = stm.target().asClass().name(); proxies.add(name.toString()); log.debugf("Registering transactional interface %s%n", name); for (ClassInfo ci : index.getAllKnownImplementors(name)) { reflectiveHierarchyClass.produce( new ReflectiveHierarchyBuildItem(Type.create(ci.name(), Type.Kind.CLASS))); } } } String[] classNames = proxies.toArray(new String[0]); reflectiveClass.produce(new ReflectiveClassBuildItem(true, true, classNames)); return new NativeImageProxyDefinitionBuildItem(classNames); } }
```suggestion String.format("The jni reader fields' size {%s} is not matched with paimon fields' size {%s}. Please refresh table and try again", ```
private void initReader() throws IOException { ReadBuilder readBuilder = table.newReadBuilder(); if (this.fields.length != this.paimonAllFieldNames.size()) { throw new IOException( String.format("The jni reader fields' size {%s} is not matched with paimon fields' size {%s}", fields.length, paimonAllFieldNames.size())); } readBuilder.withProjection(getProjected()); readBuilder.withFilter(getPredicates()); reader = readBuilder.newRead().createReader(getSplit()); }
String.format("The jni reader fields' size {%s} is not matched with paimon fields' size {%s}",
private void initReader() throws IOException { ReadBuilder readBuilder = table.newReadBuilder(); if (this.fields.length != this.paimonAllFieldNames.size()) { throw new IOException( String.format( "The jni reader fields' size {%s} is not matched with paimon fields' size {%s}." + " Please refresh table and try again", fields.length, paimonAllFieldNames.size())); } readBuilder.withProjection(getProjected()); readBuilder.withFilter(getPredicates()); reader = readBuilder.newRead().createReader(getSplit()); }
class PaimonJniScanner extends JniScanner { private static final Logger LOG = LoggerFactory.getLogger(PaimonJniScanner.class); private static final String PAIMON_OPTION_PREFIX = "paimon_option_prefix."; private final Map<String, String> params; private final Map<String, String> paimonOptionParams; private final String dbName; private final String tblName; private final String paimonSplit; private final String paimonPredicate; private Table table; private RecordReader<InternalRow> reader; private final PaimonColumnValue columnValue = new PaimonColumnValue(); private List<String> paimonAllFieldNames; private long ctlId; private long dbId; private long tblId; private long lastUpdateTime; private RecordReader.RecordIterator<InternalRow> recordIterator = null; private final ClassLoader classLoader; public PaimonJniScanner(int batchSize, Map<String, String> params) { this.classLoader = this.getClass().getClassLoader(); if (LOG.isDebugEnabled()) { LOG.debug("params:{}", params); } this.params = params; String[] requiredFields = params.get("required_fields").split(","); String[] requiredTypes = params.get("columns_types").split(" ColumnType[] columnTypes = new ColumnType[requiredTypes.length]; for (int i = 0; i < requiredTypes.length; i++) { columnTypes[i] = ColumnType.parseType(requiredFields[i], requiredTypes[i]); } paimonSplit = params.get("paimon_split"); paimonPredicate = params.get("paimon_predicate"); dbName = params.get("db_name"); tblName = params.get("table_name"); ctlId = Long.parseLong(params.get("ctl_id")); dbId = Long.parseLong(params.get("db_id")); tblId = Long.parseLong(params.get("tbl_id")); lastUpdateTime = Long.parseLong(params.get("last_update_time")); initTableInfo(columnTypes, requiredFields, batchSize); paimonOptionParams = params.entrySet().stream() .filter(kv -> kv.getKey().startsWith(PAIMON_OPTION_PREFIX)) .collect(Collectors .toMap(kv1 -> kv1.getKey().substring(PAIMON_OPTION_PREFIX.length()), kv1 -> kv1.getValue())); } @Override public void open() throws IOException { try { Thread.currentThread().setContextClassLoader(classLoader); initTable(); initReader(); resetDatetimeV2Precision(); } catch (Exception e) { LOG.warn("Failed to open paimon_scanner: " + e.getMessage(), e); throw e; } } private int[] getProjected() { return Arrays.stream(fields).mapToInt(paimonAllFieldNames::indexOf).toArray(); } private List<Predicate> getPredicates() { List<Predicate> predicates = PaimonScannerUtils.decodeStringToObject(paimonPredicate); if (LOG.isDebugEnabled()) { LOG.debug("predicates:{}", predicates); } return predicates; } private Split getSplit() { Split split = PaimonScannerUtils.decodeStringToObject(paimonSplit); if (LOG.isDebugEnabled()) { LOG.debug("split:{}", split); } return split; } private void resetDatetimeV2Precision() { for (int i = 0; i < types.length; i++) { if (types[i].isDateTimeV2()) { int index = paimonAllFieldNames.indexOf(fields[i]); if (index != -1) { DataType dataType = table.rowType().getTypeAt(index); if (dataType instanceof TimestampType) { types[i].setPrecision(((TimestampType) dataType).getPrecision()); } } } } } @Override public void close() throws IOException { if (reader != null) { reader.close(); } } @Override protected int getNext() throws IOException { int rows = 0; try { if (recordIterator == null) { recordIterator = reader.readBatch(); } while (recordIterator != null) { InternalRow record; while ((record = recordIterator.next()) != null) { columnValue.setOffsetRow(record); for (int i = 0; i < fields.length; i++) { columnValue.setIdx(i, types[i]); appendData(i, columnValue); } rows++; if (rows >= batchSize) { return rows; } } recordIterator.releaseBatch(); recordIterator = reader.readBatch(); } } catch (Exception e) { close(); LOG.warn("Failed to get the next batch of paimon. " + "split: {}, requiredFieldNames: {}, paimonAllFieldNames: {}", getSplit(), params.get("required_fields"), paimonAllFieldNames, e); throw new IOException(e); } return rows; } @Override protected TableSchema parseTableSchema() throws UnsupportedOperationException { return null; } private void initTable() { PaimonTableCacheKey key = new PaimonTableCacheKey(ctlId, dbId, tblId, paimonOptionParams, dbName, tblName); TableExt tableExt = PaimonTableCache.getTable(key); if (tableExt.getCreateTime() < lastUpdateTime) { LOG.warn("invalidate cache table:{}, localTime:{}, remoteTime:{}", key, tableExt.getCreateTime(), lastUpdateTime); PaimonTableCache.invalidateTableCache(key); tableExt = PaimonTableCache.getTable(key); } this.table = tableExt.getTable(); paimonAllFieldNames = PaimonScannerUtils.fieldNames(this.table.rowType()); if (LOG.isDebugEnabled()) { LOG.debug("paimonAllFieldNames:{}", paimonAllFieldNames); } } }
class PaimonJniScanner extends JniScanner { private static final Logger LOG = LoggerFactory.getLogger(PaimonJniScanner.class); private static final String PAIMON_OPTION_PREFIX = "paimon_option_prefix."; private final Map<String, String> params; private final Map<String, String> paimonOptionParams; private final String dbName; private final String tblName; private final String paimonSplit; private final String paimonPredicate; private Table table; private RecordReader<InternalRow> reader; private final PaimonColumnValue columnValue = new PaimonColumnValue(); private List<String> paimonAllFieldNames; private long ctlId; private long dbId; private long tblId; private long lastUpdateTime; private RecordReader.RecordIterator<InternalRow> recordIterator = null; private final ClassLoader classLoader; public PaimonJniScanner(int batchSize, Map<String, String> params) { this.classLoader = this.getClass().getClassLoader(); if (LOG.isDebugEnabled()) { LOG.debug("params:{}", params); } this.params = params; String[] requiredFields = params.get("required_fields").split(","); String[] requiredTypes = params.get("columns_types").split(" ColumnType[] columnTypes = new ColumnType[requiredTypes.length]; for (int i = 0; i < requiredTypes.length; i++) { columnTypes[i] = ColumnType.parseType(requiredFields[i], requiredTypes[i]); } paimonSplit = params.get("paimon_split"); paimonPredicate = params.get("paimon_predicate"); dbName = params.get("db_name"); tblName = params.get("table_name"); ctlId = Long.parseLong(params.get("ctl_id")); dbId = Long.parseLong(params.get("db_id")); tblId = Long.parseLong(params.get("tbl_id")); lastUpdateTime = Long.parseLong(params.get("last_update_time")); initTableInfo(columnTypes, requiredFields, batchSize); paimonOptionParams = params.entrySet().stream() .filter(kv -> kv.getKey().startsWith(PAIMON_OPTION_PREFIX)) .collect(Collectors .toMap(kv1 -> kv1.getKey().substring(PAIMON_OPTION_PREFIX.length()), kv1 -> kv1.getValue())); } @Override public void open() throws IOException { try { Thread.currentThread().setContextClassLoader(classLoader); initTable(); initReader(); resetDatetimeV2Precision(); } catch (Exception e) { LOG.warn("Failed to open paimon_scanner: " + e.getMessage(), e); throw e; } } private int[] getProjected() { return Arrays.stream(fields).mapToInt(paimonAllFieldNames::indexOf).toArray(); } private List<Predicate> getPredicates() { List<Predicate> predicates = PaimonScannerUtils.decodeStringToObject(paimonPredicate); if (LOG.isDebugEnabled()) { LOG.debug("predicates:{}", predicates); } return predicates; } private Split getSplit() { Split split = PaimonScannerUtils.decodeStringToObject(paimonSplit); if (LOG.isDebugEnabled()) { LOG.debug("split:{}", split); } return split; } private void resetDatetimeV2Precision() { for (int i = 0; i < types.length; i++) { if (types[i].isDateTimeV2()) { int index = paimonAllFieldNames.indexOf(fields[i]); if (index != -1) { DataType dataType = table.rowType().getTypeAt(index); if (dataType instanceof TimestampType) { types[i].setPrecision(((TimestampType) dataType).getPrecision()); } } } } } @Override public void close() throws IOException { if (reader != null) { reader.close(); } } @Override protected int getNext() throws IOException { int rows = 0; try { if (recordIterator == null) { recordIterator = reader.readBatch(); } while (recordIterator != null) { InternalRow record; while ((record = recordIterator.next()) != null) { columnValue.setOffsetRow(record); for (int i = 0; i < fields.length; i++) { columnValue.setIdx(i, types[i]); appendData(i, columnValue); } rows++; if (rows >= batchSize) { return rows; } } recordIterator.releaseBatch(); recordIterator = reader.readBatch(); } } catch (Exception e) { close(); LOG.warn("Failed to get the next batch of paimon. " + "split: {}, requiredFieldNames: {}, paimonAllFieldNames: {}", getSplit(), params.get("required_fields"), paimonAllFieldNames, e); throw new IOException(e); } return rows; } @Override protected TableSchema parseTableSchema() throws UnsupportedOperationException { return null; } private void initTable() { PaimonTableCacheKey key = new PaimonTableCacheKey(ctlId, dbId, tblId, paimonOptionParams, dbName, tblName); TableExt tableExt = PaimonTableCache.getTable(key); if (tableExt.getCreateTime() < lastUpdateTime) { LOG.warn("invalidate cache table:{}, localTime:{}, remoteTime:{}", key, tableExt.getCreateTime(), lastUpdateTime); PaimonTableCache.invalidateTableCache(key); tableExt = PaimonTableCache.getTable(key); } this.table = tableExt.getTable(); paimonAllFieldNames = PaimonScannerUtils.fieldNames(this.table.rowType()); if (LOG.isDebugEnabled()) { LOG.debug("paimonAllFieldNames:{}", paimonAllFieldNames); } } }
No, it's just important to not accept all exceptions. Otherwise the AssertionError thrown by `snsWriterFnLogs.verifyWarn` fullfills this, which is kind of pointless. But that's exactly what happened, because logs for the wrong logger were captured. Even more, the message of the assertion error contained exactly that string we expected to be thrown because it's also the string we were checking for in the logs.
public void testRetries() throws Throwable { thrown.expect(IOException.class); thrown.expectMessage("Error writing to SNS"); thrown.expectMessage("No more attempts allowed"); final PublishRequest request1 = createSampleMessage("my message that will not be published"); final TupleTag<PublishResult> results = new TupleTag<>(); final AmazonSNS amazonSnsErrors = getAmazonSnsMockErrors(); p.apply(Create.of(request1)) .apply( SnsIO.write() .withTopicName(topicName) .withRetryConfiguration( SnsIO.RetryConfiguration.create(4, standardSeconds(10), millis(1))) .withAWSClientsProvider(new Provider(amazonSnsErrors)) .withResultOutputTag(results)); try { p.run(); } catch (final Pipeline.PipelineExecutionException e) { snsWriterFnLogs.verifyWarn(String.format(SnsIO.Write.SnsWriterFn.RETRY_ATTEMPT_LOG, 1)); snsWriterFnLogs.verifyWarn(String.format(SnsIO.Write.SnsWriterFn.RETRY_ATTEMPT_LOG, 2)); snsWriterFnLogs.verifyWarn(String.format(SnsIO.Write.SnsWriterFn.RETRY_ATTEMPT_LOG, 3)); throw e.getCause(); } }
thrown.expect(IOException.class);
public void testRetries() throws Throwable { thrown.expect(IOException.class); thrown.expectMessage("Error writing to SNS"); thrown.expectMessage("No more attempts allowed"); final PublishRequest request1 = createSampleMessage("my message that will not be published"); final TupleTag<PublishResult> results = new TupleTag<>(); final AmazonSNS amazonSnsErrors = getAmazonSnsMockErrors(); p.apply(Create.of(request1)) .apply( SnsIO.write() .withTopicName(topicName) .withRetryConfiguration( SnsIO.RetryConfiguration.create(4, standardSeconds(10), millis(1))) .withAWSClientsProvider(new Provider(amazonSnsErrors)) .withResultOutputTag(results)); try { p.run(); } catch (final Pipeline.PipelineExecutionException e) { snsWriterFnLogs.verifyWarn(String.format(SnsIO.Write.SnsWriterFn.RETRY_ATTEMPT_LOG, 1)); snsWriterFnLogs.verifyWarn(String.format(SnsIO.Write.SnsWriterFn.RETRY_ATTEMPT_LOG, 2)); snsWriterFnLogs.verifyWarn(String.format(SnsIO.Write.SnsWriterFn.RETRY_ATTEMPT_LOG, 3)); throw e.getCause(); } }
class Provider implements AwsClientsProvider { private static AmazonSNS publisher; public Provider(AmazonSNS pub) { publisher = pub; } @Override public AmazonCloudWatch getCloudWatchClient() { return Mockito.mock(AmazonCloudWatch.class); } @Override public AmazonSNS createSnsPublisher() { return publisher; } }
class Provider implements AwsClientsProvider { private static AmazonSNS publisher; public Provider(AmazonSNS pub) { publisher = pub; } @Override public AmazonCloudWatch getCloudWatchClient() { return Mockito.mock(AmazonCloudWatch.class); } @Override public AmazonSNS createSnsPublisher() { return publisher; } }
Can you add some comment highlighting and justifying that you are iterating here over all of the state handles?
public String toString() { synchronized (registeredStates) { return "SharedStateRegistry{" + "registeredStates=" + registeredStates + '}'; } }
synchronized (registeredStates) {
public String toString() { synchronized (registeredStates) { return "SharedStateRegistry{" + "registeredStates=" + registeredStates + '}'; } }
class SharedStateRegistryImpl implements SharedStateRegistry { private static final Logger LOG = LoggerFactory.getLogger(SharedStateRegistryImpl.class); /** All registered state objects by an artificial key */ private final Map<SharedStateRegistryKey, SharedStateEntry> registeredStates; /** This flag indicates whether or not the registry is open or if close() was called */ private boolean open; /** Executor for async state deletion */ private final Executor asyncDisposalExecutor; /** Default uses direct executor to delete unreferenced state */ public SharedStateRegistryImpl() { this(Executors.directExecutor()); } public SharedStateRegistryImpl(Executor asyncDisposalExecutor) { this.registeredStates = new HashMap<>(); this.asyncDisposalExecutor = checkNotNull(asyncDisposalExecutor); this.open = true; } public StreamStateHandle registerReference( SharedStateRegistryKey registrationKey, StreamStateHandle state, long checkpointID) { checkNotNull(state); StreamStateHandle scheduledStateDeletion = null; SharedStateEntry entry; synchronized (registeredStates) { checkState(open, "Attempt to register state to closed SharedStateRegistry."); entry = registeredStates.get(registrationKey); if (entry == null) { checkState( !isPlaceholder(state), "Attempt to reference unknown state: " + registrationKey); entry = new SharedStateEntry(state, checkpointID); registeredStates.put(registrationKey, entry); } else { if (!Objects.equals(state, entry.stateHandle)) { scheduledStateDeletion = state; LOG.trace( "Identified duplicate state registration under key {}. New state {} was determined to " + "be an unnecessary copy of existing state {} and will be dropped.", registrationKey, state, entry.stateHandle); } entry.lastUsedCheckpointID = Math.max(checkpointID, entry.lastUsedCheckpointID); } } scheduleAsyncDelete(scheduledStateDeletion); LOG.trace("Registered shared state {} under key {}.", entry, registrationKey); return entry.stateHandle; } @Override public void unregisterUnusedState(long lowestCheckpointID) { LOG.debug( "Discard state created before checkpoint {} and not used afterwards", lowestCheckpointID); List<StreamStateHandle> subsumed = new ArrayList<>(); synchronized (registeredStates) { Iterator<SharedStateEntry> it = registeredStates.values().iterator(); while (it.hasNext()) { SharedStateEntry entry = it.next(); if (entry.lastUsedCheckpointID < lowestCheckpointID) { subsumed.add(entry.stateHandle); it.remove(); } } } LOG.trace("Discard {} state asynchronously", subsumed.size()); for (StreamStateHandle handle : subsumed) { scheduleAsyncDelete(handle); } } @Override public void registerAll( Iterable<? extends CompositeStateHandle> stateHandles, long checkpointID) { if (stateHandles == null) { return; } synchronized (registeredStates) { for (CompositeStateHandle stateHandle : stateHandles) { stateHandle.registerSharedStates(this, checkpointID); } } } @Override private void scheduleAsyncDelete(StreamStateHandle streamStateHandle) { if (streamStateHandle != null && !isPlaceholder(streamStateHandle)) { LOG.trace("Scheduled delete of state handle {}.", streamStateHandle); AsyncDisposalRunnable asyncDisposalRunnable = new AsyncDisposalRunnable(streamStateHandle); try { asyncDisposalExecutor.execute(asyncDisposalRunnable); } catch (RejectedExecutionException ex) { asyncDisposalRunnable.run(); } } } private boolean isPlaceholder(StreamStateHandle stateHandle) { return stateHandle instanceof PlaceholderStreamStateHandle; } @Override public void close() { synchronized (registeredStates) { open = false; } } /** Encapsulates the operation the delete state handles asynchronously. */ private static final class AsyncDisposalRunnable implements Runnable { private final StateObject toDispose; public AsyncDisposalRunnable(StateObject toDispose) { this.toDispose = checkNotNull(toDispose); } @Override public void run() { try { toDispose.discardState(); } catch (Exception e) { LOG.warn( "A problem occurred during asynchronous disposal of a shared state object: {}", toDispose, e); } } } /** An entry in the registry, tracking the handle and the corresponding reference count. */ private static final class SharedStateEntry { /** The shared state handle */ final StreamStateHandle stateHandle; private long lastUsedCheckpointID; SharedStateEntry(StreamStateHandle value, long checkpointID) { this.stateHandle = value; this.lastUsedCheckpointID = checkpointID; } @Override public String toString() { return "SharedStateEntry{" + "stateHandle=" + stateHandle + ", lastUsedCheckpointID=" + lastUsedCheckpointID + '}'; } } }
class SharedStateRegistryImpl implements SharedStateRegistry { private static final Logger LOG = LoggerFactory.getLogger(SharedStateRegistryImpl.class); /** All registered state objects by an artificial key */ private final Map<SharedStateRegistryKey, SharedStateEntry> registeredStates; /** This flag indicates whether or not the registry is open or if close() was called */ private boolean open; /** Executor for async state deletion */ private final Executor asyncDisposalExecutor; /** Default uses direct executor to delete unreferenced state */ public SharedStateRegistryImpl() { this(Executors.directExecutor()); } public SharedStateRegistryImpl(Executor asyncDisposalExecutor) { this.registeredStates = new HashMap<>(); this.asyncDisposalExecutor = checkNotNull(asyncDisposalExecutor); this.open = true; } public StreamStateHandle registerReference( SharedStateRegistryKey registrationKey, StreamStateHandle state, long checkpointID) { checkNotNull(state); StreamStateHandle scheduledStateDeletion = null; SharedStateEntry entry; synchronized (registeredStates) { checkState(open, "Attempt to register state to closed SharedStateRegistry."); entry = registeredStates.get(registrationKey); if (entry == null) { checkState( !isPlaceholder(state), "Attempt to reference unknown state: " + registrationKey); entry = new SharedStateEntry(state, checkpointID); registeredStates.put(registrationKey, entry); LOG.trace("Registered new shared state {} under key {}.", entry, registrationKey); } else { if (!Objects.equals(state, entry.stateHandle)) { if (entry.confirmed || isPlaceholder(state)) { scheduledStateDeletion = state; } else { scheduledStateDeletion = entry.stateHandle; entry.stateHandle = state; } LOG.trace( "Identified duplicate state registration under key {}. New state {} was determined to " + "be an unnecessary copy of existing state {} and will be dropped.", registrationKey, state, entry.stateHandle); } LOG.trace( "Updating last checkpoint for {} from {} to {}", registrationKey, entry.lastUsedCheckpointID, checkpointID); entry.advanceLastUsingCheckpointID(checkpointID); } } scheduleAsyncDelete(scheduledStateDeletion); return entry.stateHandle; } @Override public void unregisterUnusedState(long lowestCheckpointID) { LOG.debug( "Discard state created before checkpoint {} and not used afterwards", lowestCheckpointID); List<StreamStateHandle> subsumed = new ArrayList<>(); synchronized (registeredStates) { Iterator<SharedStateEntry> it = registeredStates.values().iterator(); while (it.hasNext()) { SharedStateEntry entry = it.next(); if (entry.lastUsedCheckpointID < lowestCheckpointID) { subsumed.add(entry.stateHandle); it.remove(); } } } LOG.trace("Discard {} state asynchronously", subsumed.size()); for (StreamStateHandle handle : subsumed) { scheduleAsyncDelete(handle); } } @Override public void registerAll( Iterable<? extends CompositeStateHandle> stateHandles, long checkpointID) { if (stateHandles == null) { return; } synchronized (registeredStates) { for (CompositeStateHandle stateHandle : stateHandles) { stateHandle.registerSharedStates(this, checkpointID); } } } @Override public void checkpointCompleted(long checkpointId) { for (SharedStateEntry entry : registeredStates.values()) { if (entry.lastUsedCheckpointID == checkpointId) { entry.confirmed = true; } } } @Override private void scheduleAsyncDelete(StreamStateHandle streamStateHandle) { if (streamStateHandle != null && !isPlaceholder(streamStateHandle)) { LOG.trace("Scheduled delete of state handle {}.", streamStateHandle); AsyncDisposalRunnable asyncDisposalRunnable = new AsyncDisposalRunnable(streamStateHandle); try { asyncDisposalExecutor.execute(asyncDisposalRunnable); } catch (RejectedExecutionException ex) { asyncDisposalRunnable.run(); } } } private boolean isPlaceholder(StreamStateHandle stateHandle) { return stateHandle instanceof PlaceholderStreamStateHandle; } @Override public void close() { synchronized (registeredStates) { open = false; } } /** Encapsulates the operation the delete state handles asynchronously. */ private static final class AsyncDisposalRunnable implements Runnable { private final StateObject toDispose; public AsyncDisposalRunnable(StateObject toDispose) { this.toDispose = checkNotNull(toDispose); } @Override public void run() { try { toDispose.discardState(); } catch (Exception e) { LOG.warn( "A problem occurred during asynchronous disposal of a shared state object: {}", toDispose, e); } } } /** An entry in the registry, tracking the handle and the corresponding reference count. */ private static final class SharedStateEntry { /** The shared state handle */ StreamStateHandle stateHandle; private long lastUsedCheckpointID; /** Whether this entry is included into a confirmed checkpoint. */ private boolean confirmed; SharedStateEntry(StreamStateHandle value, long checkpointID) { this.stateHandle = value; this.lastUsedCheckpointID = checkpointID; } @Override public String toString() { return "SharedStateEntry{" + "stateHandle=" + stateHandle + ", lastUsedCheckpointID=" + lastUsedCheckpointID + '}'; } private void advanceLastUsingCheckpointID(long checkpointID) { lastUsedCheckpointID = Math.max(checkpointID, lastUsedCheckpointID); } } }
Shall we add tests for the not-covered cases?
private boolean isOverridden(Method method1, Method method2, Class<?> clazz) { if ((Modifier.isStatic(method1.getModifiers()) ^ Modifier.isStatic(method2.getModifiers())) || method1.getParameterCount() != method2.getParameterCount()) { throw new JInteropException( OVERLOADED_METHODS, "Overloaded methods cannot be differentiated. Please specify the " + "parameterTypes for each parameter in 'paramTypes' field in the annotation"); } Method currentMethod; Method otherMethod; if (method2.getReturnType().isAssignableFrom(method1.getReturnType())) { currentMethod = method1; otherMethod = method2; } else if (method1.getReturnType().isAssignableFrom(method2.getReturnType())) { currentMethod = method2; otherMethod = method1; } else { return false; } try { Method superMethod = clazz.getSuperclass().getDeclaredMethod(currentMethod.getName(), currentMethod.getParameterTypes()); if (Modifier.isStatic(currentMethod.getModifiers())) { return superMethod.equals(otherMethod); } return Arrays.equals(superMethod.getParameterTypes(), otherMethod.getParameterTypes()) && superMethod.getReturnType().equals(otherMethod.getReturnType()); } catch (NoSuchMethodException e) { return false; } }
return false;
private boolean isOverridden(Method method1, Method method2, Class<?> clazz) { if (method1.getParameterCount() != method2.getParameterCount()) { throw new JInteropException(OVERLOADED_METHODS, "Overloaded methods cannot be differentiated. " + "Please specify the parameter types for each parameter in 'paramTypes' field in the annotation"); } Method currentMethod; Method otherMethod; if (method2.getReturnType().isAssignableFrom(method1.getReturnType())) { currentMethod = method1; otherMethod = method2; } else if (method1.getReturnType().isAssignableFrom(method2.getReturnType())) { currentMethod = method2; otherMethod = method1; } else { return false; } try { Method superMethod = clazz.getSuperclass() .getDeclaredMethod(currentMethod.getName(), currentMethod.getParameterTypes()); return Arrays.equals(superMethod.getParameterTypes(), otherMethod.getParameterTypes()) && superMethod.getReturnType().equals(otherMethod.getReturnType()); } catch (NoSuchMethodException e) { return false; } }
class names for each parameter " + "with 'paramTypes' field in the annotation"); } } JMethod jMethod = resolveExactMethod(jMethodRequest.declaringClass, jMethodRequest.methodName, jMethodRequest.kind, jMethodRequest.paramTypeConstraints, jMethodRequest.receiverType); if (jMethod == JMethod.NO_SUCH_METHOD) { return resolveMatchingMethod(jMethodRequest, jMethods); }
class '" + jMethodRequest.declaringClass.getName() + "'"); } } jMethods = resolveByParamCount(jMethods, jMethodRequest); if (jMethods.isEmpty()) { throwMethodNotFoundError(jMethodRequest); }
Shall we add the test function name too here, so that it's easier to identify the exact error?
private static void resolveFunctions(TestSuite suite) { List<TesterinaFunction> functions = suite.getTestUtilityFunctions(); List<String> functionNames = functions.stream().map(testerinaFunction -> testerinaFunction.getName()).collect (Collectors.toList()); for (Test test : suite.getTests()) { if (test.getTestName() != null && functionNames.contains(test.getTestName())) { test.setTestFunction(functions.stream().filter(e -> e.getName().equals(test .getTestName())).findFirst().get()); } if (test.getBeforeTestFunction() != null && functionNames.contains(test.getBeforeTestFunction())) { test.setBeforeTestFunctionObj(functions.stream().filter(e -> e.getName().equals(test .getBeforeTestFunction())).findFirst().get()); } if (test.getAfterTestFunction() != null && functionNames.contains(test.getAfterTestFunction())) { test.setAfterTestFunctionObj(functions.stream().filter(e -> e.getName().equals(test .getAfterTestFunction())).findFirst().get()); } if (test.getDataProvider() != null && functionNames.contains(test.getDataProvider())) { String dataProvider = test.getDataProvider(); test.setDataProviderFunction(functions.stream().filter(e -> e.getName().equals(test.getDataProvider() )).findFirst().map(func -> { if (func.getbFunction().getRetParamTypes().length == 1) { BType bType = func.getbFunction().getRetParamTypes()[0]; if (bType.getTag() == TypeTags.ARRAY_TAG) { BArrayType bArrayType = (BArrayType) bType; if (bArrayType.getElementType().getTag() != TypeTags.ARRAY_TAG) { String message = String.format("Data provider function [%s] should return an array of" + " arrays.", dataProvider); throw new BallerinaException(message); } } else { String message = String.format("Data provider function [%s] should return an array of " + "arrays.", dataProvider); throw new BallerinaException(message); } } else { String message = String.format("Data provider function [%s] should have only one return type" + ".", dataProvider); throw new BallerinaException(message); } return func; }).get()); if (test.getDataProviderFunction() == null) { String message = String.format("Data provider function [%s] cannot be found.", dataProvider); throw new BallerinaException(message); } } for (String dependsOnFn : test.getDependsOnTestFunctions()) { if (!functions.stream().anyMatch(func -> func.getName().equals(dependsOnFn))) { throw new BallerinaException("Cannot find the specified dependsOn function : " + dependsOnFn); } test.addDependsOnTestFunction(functions.stream().filter(e -> e.getName().equals(dependsOnFn)) .findFirst().get()); } } suite.getMockFunctionNamesMap().forEach((id, functionName) -> { TesterinaFunction function = suite.getTestUtilityFunctions().stream().filter(e -> e.getName().equals (functionName)).findFirst().get(); suite.addMockFunctionObj(id, function); }); suite.getBeforeSuiteFunctionNames().forEach(functionName -> { TesterinaFunction function = suite.getTestUtilityFunctions().stream().filter(e -> e.getName().equals (functionName)).findFirst().get(); suite.addBeforeSuiteFunctionObj(function); }); suite.getAfterSuiteFunctionNames().forEach(functionName -> { TesterinaFunction function = suite.getTestUtilityFunctions().stream().filter(e -> e.getName().equals (functionName)).findFirst().get(); suite.addAfterSuiteFunctionObj(function); }); suite.getBeforeEachFunctionNames().forEach(functionName -> { TesterinaFunction function = suite.getTestUtilityFunctions().stream().filter(e -> e.getName().equals (functionName)).findFirst().get(); suite.addBeforeEachFunctionObj(function); }); suite.getAfterEachFunctionNames().forEach(functionName -> { TesterinaFunction function = suite.getTestUtilityFunctions().stream().filter(e -> e.getName().equals (functionName)).findFirst().get(); suite.addAfterEachFunctionObj(function); }); }
throw new BallerinaException("Cannot find the specified dependsOn function : " + dependsOnFn);
private static void resolveFunctions(TestSuite suite) { List<TesterinaFunction> functions = suite.getTestUtilityFunctions(); List<String> functionNames = functions.stream().map(testerinaFunction -> testerinaFunction.getName()).collect (Collectors.toList()); for (Test test : suite.getTests()) { if (test.getTestName() != null && functionNames.contains(test.getTestName())) { test.setTestFunction(functions.stream().filter(e -> e.getName().equals(test .getTestName())).findFirst().get()); } if (test.getBeforeTestFunction() != null && functionNames.contains(test.getBeforeTestFunction())) { test.setBeforeTestFunctionObj(functions.stream().filter(e -> e.getName().equals(test .getBeforeTestFunction())).findFirst().get()); } if (test.getAfterTestFunction() != null && functionNames.contains(test.getAfterTestFunction())) { test.setAfterTestFunctionObj(functions.stream().filter(e -> e.getName().equals(test .getAfterTestFunction())).findFirst().get()); } if (test.getDataProvider() != null && functionNames.contains(test.getDataProvider())) { String dataProvider = test.getDataProvider(); test.setDataProviderFunction(functions.stream().filter(e -> e.getName().equals(test.getDataProvider() )).findFirst().map(func -> { if (func.getbFunction().getRetParamTypes().length == 1) { BType bType = func.getbFunction().getRetParamTypes()[0]; if (bType.getTag() == TypeTags.ARRAY_TAG) { BArrayType bArrayType = (BArrayType) bType; if (bArrayType.getElementType().getTag() != TypeTags.ARRAY_TAG) { String message = String.format("Data provider function [%s] should return an array of" + " arrays.", dataProvider); throw new BallerinaException(message); } } else { String message = String.format("Data provider function [%s] should return an array of " + "arrays.", dataProvider); throw new BallerinaException(message); } } else { String message = String.format("Data provider function [%s] should have only one return type" + ".", dataProvider); throw new BallerinaException(message); } return func; }).get()); if (test.getDataProviderFunction() == null) { String message = String.format("Data provider function [%s] cannot be found.", dataProvider); throw new BallerinaException(message); } } for (String dependsOnFn : test.getDependsOnTestFunctions()) { if (!functions.stream().parallel().anyMatch(func -> func.getName().equals(dependsOnFn))) { throw new BallerinaException("Cannot find the specified dependsOn function : " + dependsOnFn); } test.addDependsOnTestFunction(functions.stream().filter(e -> e.getName().equals(dependsOnFn)) .findFirst().get()); } } suite.getMockFunctionNamesMap().forEach((id, functionName) -> { TesterinaFunction function = suite.getTestUtilityFunctions().stream().filter(e -> e.getName().equals (functionName)).findFirst().get(); suite.addMockFunctionObj(id, function); }); suite.getBeforeSuiteFunctionNames().forEach(functionName -> { TesterinaFunction function = suite.getTestUtilityFunctions().stream().filter(e -> e.getName().equals (functionName)).findFirst().get(); suite.addBeforeSuiteFunctionObj(function); }); suite.getAfterSuiteFunctionNames().forEach(functionName -> { TesterinaFunction function = suite.getTestUtilityFunctions().stream().filter(e -> e.getName().equals (functionName)).findFirst().get(); suite.addAfterSuiteFunctionObj(function); }); suite.getBeforeEachFunctionNames().forEach(functionName -> { TesterinaFunction function = suite.getTestUtilityFunctions().stream().filter(e -> e.getName().equals (functionName)).findFirst().get(); suite.addBeforeEachFunctionObj(function); }); suite.getAfterEachFunctionNames().forEach(functionName -> { TesterinaFunction function = suite.getTestUtilityFunctions().stream().filter(e -> e.getName().equals (functionName)).findFirst().get(); suite.addAfterEachFunctionObj(function); }); }
class TestAnnotationProcessor extends AbstractCompilerPlugin { private static final String TEST_ANNOTATION_NAME = "Config"; private static final String BEFORE_SUITE_ANNOTATION_NAME = "BeforeSuite"; private static final String AFTER_SUITE_ANNOTATION_NAME = "AfterSuite"; private static final String BEFORE_EACH_ANNOTATION_NAME = "BeforeEach"; private static final String AFTER_EACH_ANNOTATION_NAME = "AfterEach"; private static final String MOCK_ANNOTATION_NAME = "Mock"; private static final String BEFORE_FUNCTION = "before"; private static final String AFTER_FUNCTION = "after"; private static final String DEPENDS_ON_FUNCTIONS = "dependsOn"; private static final String PACKAGE = "packageName"; private static final String FUNCTION = "functionName"; private static final String GROUP_ANNOTATION_NAME = "groups"; private static final String VALUE_SET_ANNOTATION_NAME = "dataProvider"; private static final String TEST_ENABLE_ANNOTATION_NAME = "enable"; private static final String MOCK_ANNOTATION_DELIMITER = " private TesterinaRegistry registry = TesterinaRegistry.getInstance(); private TestSuite suite; private boolean enabled = true; /** * this property is used as a work-around to initialize test suites only once for a package as Compiler * Annotation currently emits package import events too to the process method. */ private boolean packageInit; @Override public void init(DiagnosticLog diagnosticLog) { if (registry.getInstance().isTestSuitesCompiled()) { enabled = false; } } @Override public void process(PackageNode packageNode) { if (!enabled) { return; } if (!packageInit) { String packageName = ((BLangPackage) packageNode).packageID == null ? "." : ((BLangPackage) packageNode) .packageID.getName().getValue(); suite = registry.getTestSuites().computeIfAbsent(packageName, func -> new TestSuite(packageName)); packageInit = true; } } @Override public void process(FunctionNode functionNode, List<AnnotationAttachmentNode> annotations) { if (!enabled) { return; } if (!suite.getSuiteName().equals(functionNode.getPosition().getSource().getPackageName())) { return; } for (AnnotationAttachmentNode attachmentNode : annotations) { String annotationName = attachmentNode.getAnnotationName().getValue(); String functionName = functionNode.getName().getValue(); if (BEFORE_SUITE_ANNOTATION_NAME.equals(annotationName)) { suite.addBeforeSuiteFunction(functionName); } else if (AFTER_SUITE_ANNOTATION_NAME.equals(annotationName)) { suite.addAfterSuiteFunction(functionName); } else if (BEFORE_EACH_ANNOTATION_NAME.equals(annotationName)) { suite.addBeforeEachFunction(functionName); } else if (AFTER_EACH_ANNOTATION_NAME.equals(annotationName)) { suite.addAfterEachFunction(functionName); } else if (MOCK_ANNOTATION_NAME.equals(annotationName)) { String[] vals = new String[2]; vals[0] = "."; if (attachmentNode.getExpression() instanceof BLangRecordLiteral) { List<BLangRecordLiteral.BLangRecordKeyValue> attributes = ((BLangRecordLiteral) attachmentNode .getExpression()).getKeyValuePairs(); attributes.forEach(attributeNode -> { String name = attributeNode.getKey().toString(); String value = attributeNode.getValue().toString(); if (PACKAGE.equals(name)) { vals[0] = value; } else if (FUNCTION.equals(name)) { vals[1] = value; } }); suite.addMockFunction(vals[0] + MOCK_ANNOTATION_DELIMITER + vals[1], functionName); } } else if (TEST_ANNOTATION_NAME.equals(annotationName)) { Test test = new Test(); test.setTestName(functionName); AtomicBoolean shouldSkip = new AtomicBoolean(); AtomicBoolean groupsFound = new AtomicBoolean(); List<String> groups = registry.getGroups(); boolean shouldIncludeGroups = registry.shouldIncludeGroups(); if (attachmentNode.getExpression() instanceof BLangRecordLiteral) { List<BLangRecordLiteral.BLangRecordKeyValue> attributes = ((BLangRecordLiteral) attachmentNode .getExpression()).getKeyValuePairs(); attributes.forEach(attributeNode -> { String name = attributeNode.getKey().toString(); if (TEST_ENABLE_ANNOTATION_NAME.equals(name) && "false".equals(attributeNode.getValue() .toString())) { shouldSkip.set(true); return; } if (GROUP_ANNOTATION_NAME.equals(name)) { if (attributeNode.getValue() instanceof BLangArrayLiteral) { BLangArrayLiteral values = (BLangArrayLiteral) attributeNode.getValue(); test.setGroups(values.exprs.stream().map(node -> node.toString()) .collect(Collectors.toList())); if (groups != null && !groups.isEmpty()) { boolean isGroupPresent = isGroupAvailable(groups, test.getGroups()); if (shouldIncludeGroups) { if (!isGroupPresent) { shouldSkip.set(true); return; } } else { if (isGroupPresent) { shouldSkip.set(true); return; } } groupsFound.set(true); } } } if (VALUE_SET_ANNOTATION_NAME.equals(name)) { test.setDataProvider(attributeNode.getValue().toString()); } if (BEFORE_FUNCTION.equals(name)) { test.setBeforeTestFunction(attributeNode.getValue().toString()); } if (AFTER_FUNCTION.equals(name)) { test.setAfterTestFunction(attributeNode.getValue().toString()); } if (DEPENDS_ON_FUNCTIONS.equals(name)) { if (attributeNode.getValue() instanceof BLangArrayLiteral) { BLangArrayLiteral values = (BLangArrayLiteral) attributeNode.getValue(); values.exprs.stream().map(node -> node.toString()).forEach (test::addDependsOnTestFunction); } } }); } if (groups != null && !groups.isEmpty() && !groupsFound.get() && shouldIncludeGroups) { shouldSkip.set(true); } if (!shouldSkip.get()) { suite.addTests(test); } } else { } } } /** * TODO this is a temporary solution, till we get a proper API from Ballerina Core. * This method will get executed at the completion of the processing of a ballerina package. * * @param programFile {@link ProgramFile} corresponds to the current ballerina package */ public void packageProcessed(ProgramFile programFile) { if (!enabled) { return; } packageInit = false; suite = TesterinaRegistry.getInstance().getTestSuites().get(programFile.getEntryPkgName()); if (suite == null) { throw new BallerinaException("No test suite found for [package]: " + programFile.getEntryPkgName()); } suite.setInitFunction(new TesterinaFunction(programFile, programFile.getEntryPackage().getInitFunctionInfo(), TesterinaFunction.Type.INIT)); Arrays.stream(programFile.getEntryPackage().getFunctionInfoEntries()).forEach(functionInfo -> { suite.addTestUtilityFunction(new TesterinaFunction(programFile, functionInfo, TesterinaFunction.Type.UTIL)); }); resolveFunctions(suite); int[] testExecutionOrder = checkCyclicDependencies(suite.getTests()); List<Test> sortedTests = orderTests(suite.getTests(), testExecutionOrder); suite.setTests(sortedTests); suite.setProgramFile(programFile); } /** * Process a given {@link TestSuite} and inject the user defined mock functions. * * @param suite a @{@link TestSuite} */ public static void injectMocks(TestSuite suite) { ProgramFile programFile = suite.getProgramFile(); Map<String, TesterinaFunction> mockFunctions = suite.getMockFunctionsMap(); mockFunctions.forEach((k, v) -> { String[] info = k.split(MOCK_ANNOTATION_DELIMITER); if (info.length != 2) { return; } for (PackageInfo packageInfo : programFile.getPackageInfoEntries()) { for (Instruction ins : packageInfo.getInstructions()) { if (ins instanceof Instruction.InstructionCALL) { Instruction.InstructionCALL call = (Instruction.InstructionCALL) ins; if (call.functionInfo.getPkgPath().equals(info[0]) && call.functionInfo.getName().equals (info[1])) { suite.addMockedRealFunction(k, call.functionInfo); call.functionInfo = v.getbFunction(); } } } } }); } /** * Process a given {@link TestSuite} and reset the mock functions with their original pointers. * * @param suite a @{@link TestSuite} */ public static void resetMocks(TestSuite suite) { ProgramFile programFile = suite.getProgramFile(); Map<String, TesterinaFunction> mockFunctions = suite.getMockFunctionsMap(); Map<String, FunctionInfo> mockedRealFunctionsMap = suite.getMockedRealFunctionsMap(); mockFunctions.forEach((k, v) -> { String[] info = k.split(MOCK_ANNOTATION_DELIMITER); if (info.length != 2) { return; } for (PackageInfo packageInfo : programFile.getPackageInfoEntries()) { for (Instruction ins : packageInfo.getInstructions()) { if (ins instanceof Instruction.InstructionCALL) { Instruction.InstructionCALL call = (Instruction.InstructionCALL) ins; if (call.functionInfo.getPkgPath().equals(info[0]) && call.functionInfo.getName().equals (info[1])) { call.functionInfo = mockedRealFunctionsMap.get(k); } } } } }); } private static List<Test> orderTests(List<Test> tests, int[] testExecutionOrder) { List<Test> sortedTests = new ArrayList<>(); for (int idx : testExecutionOrder) { sortedTests.add(tests.get(idx)); } return sortedTests; } /** * Resolve function names to {@link TesterinaFunction}s. * * @param suite {@link TestSuite} whose functions to be resolved. */ private static int[] checkCyclicDependencies(List<Test> tests) { int numberOfNodes = tests.size(); int[] indegrees = new int[numberOfNodes]; int[] sortedElts = new int[numberOfNodes]; List<Integer> dependencyMatrix[] = new ArrayList[numberOfNodes]; for (int i = 0; i < numberOfNodes; i++) { dependencyMatrix[i] = new ArrayList<>(); } List<String> testNames = tests.stream().map(k -> k.getTestName()).collect(Collectors.toList()); int i = 0; for (Test test : tests) { if (!test.getDependsOnTestFunctions().isEmpty()) { for (String dependsOnFn : test.getDependsOnTestFunctions()) { int idx = testNames.indexOf(dependsOnFn); if (idx == -1) { String message = String.format("Test [%s] depends on function [%s], but it couldn't be found" + ".", test.getTestFunction().getName(), dependsOnFn); throw new BallerinaException(message); } dependencyMatrix[i].add(idx); } } i++; } for (int j = 0; j < numberOfNodes; j++) { List<Integer> dependencies = dependencyMatrix[j]; for (int node : dependencies) { indegrees[node]++; } } Queue<Integer> q = new LinkedList<Integer>(); for (i = 0; i < numberOfNodes; i++) { if (indegrees[i] == 0) { q.add(i); } } int cnt = 0; Vector<Integer> topOrder = new Vector<Integer>(); while (!q.isEmpty()) { int u = q.poll(); topOrder.add(u); for (int node : dependencyMatrix[u]) { if (--indegrees[node] == 0) { q.add(node); } } cnt++; } if (cnt != numberOfNodes) { String message = "Cyclic test dependency detected"; throw new BallerinaException(message); } i = numberOfNodes - 1; for (int elt : topOrder) { sortedElts[i] = elt; i--; } return sortedElts; } /** * Check whether there is a common element in two Lists. * * @param inputGroups String @{@link List} to match * @param functionGroups String @{@link List} to match agains * @return true if a match is found */ private boolean isGroupAvailable(List<String> inputGroups, List<String> functionGroups) { for (String group : inputGroups) { for (String funcGroup : functionGroups) { if (group.equals(funcGroup)) { return true; } } } return false; } }
class TestAnnotationProcessor extends AbstractCompilerPlugin { private static final String TEST_ANNOTATION_NAME = "Config"; private static final String BEFORE_SUITE_ANNOTATION_NAME = "BeforeSuite"; private static final String AFTER_SUITE_ANNOTATION_NAME = "AfterSuite"; private static final String BEFORE_EACH_ANNOTATION_NAME = "BeforeEach"; private static final String AFTER_EACH_ANNOTATION_NAME = "AfterEach"; private static final String MOCK_ANNOTATION_NAME = "Mock"; private static final String BEFORE_FUNCTION = "before"; private static final String AFTER_FUNCTION = "after"; private static final String DEPENDS_ON_FUNCTIONS = "dependsOn"; private static final String PACKAGE = "packageName"; private static final String FUNCTION = "functionName"; private static final String GROUP_ANNOTATION_NAME = "groups"; private static final String VALUE_SET_ANNOTATION_NAME = "dataProvider"; private static final String TEST_ENABLE_ANNOTATION_NAME = "enable"; private static final String MOCK_ANNOTATION_DELIMITER = " private TesterinaRegistry registry = TesterinaRegistry.getInstance(); private TestSuite suite; private boolean enabled = true; /** * this property is used as a work-around to initialize test suites only once for a package as Compiler * Annotation currently emits package import events too to the process method. */ private boolean packageInit; @Override public void init(DiagnosticLog diagnosticLog) { if (registry.getInstance().isTestSuitesCompiled()) { enabled = false; } } @Override public void process(PackageNode packageNode) { if (!enabled) { return; } if (!packageInit) { String packageName = ((BLangPackage) packageNode).packageID == null ? "." : ((BLangPackage) packageNode) .packageID.getName().getValue(); suite = registry.getTestSuites().computeIfAbsent(packageName, func -> new TestSuite(packageName)); packageInit = true; } } @Override public void process(FunctionNode functionNode, List<AnnotationAttachmentNode> annotations) { if (!enabled) { return; } if (!suite.getSuiteName().equals(functionNode.getPosition().getSource().getPackageName())) { return; } for (AnnotationAttachmentNode attachmentNode : annotations) { String annotationName = attachmentNode.getAnnotationName().getValue(); String functionName = functionNode.getName().getValue(); if (BEFORE_SUITE_ANNOTATION_NAME.equals(annotationName)) { suite.addBeforeSuiteFunction(functionName); } else if (AFTER_SUITE_ANNOTATION_NAME.equals(annotationName)) { suite.addAfterSuiteFunction(functionName); } else if (BEFORE_EACH_ANNOTATION_NAME.equals(annotationName)) { suite.addBeforeEachFunction(functionName); } else if (AFTER_EACH_ANNOTATION_NAME.equals(annotationName)) { suite.addAfterEachFunction(functionName); } else if (MOCK_ANNOTATION_NAME.equals(annotationName)) { String[] vals = new String[2]; vals[0] = "."; if (attachmentNode.getExpression() instanceof BLangRecordLiteral) { List<BLangRecordLiteral.BLangRecordKeyValue> attributes = ((BLangRecordLiteral) attachmentNode .getExpression()).getKeyValuePairs(); attributes.forEach(attributeNode -> { String name = attributeNode.getKey().toString(); String value = attributeNode.getValue().toString(); if (PACKAGE.equals(name)) { vals[0] = value; } else if (FUNCTION.equals(name)) { vals[1] = value; } }); suite.addMockFunction(vals[0] + MOCK_ANNOTATION_DELIMITER + vals[1], functionName); } } else if (TEST_ANNOTATION_NAME.equals(annotationName)) { Test test = new Test(); test.setTestName(functionName); AtomicBoolean shouldSkip = new AtomicBoolean(); AtomicBoolean groupsFound = new AtomicBoolean(); List<String> groups = registry.getGroups(); boolean shouldIncludeGroups = registry.shouldIncludeGroups(); if (attachmentNode.getExpression() instanceof BLangRecordLiteral) { List<BLangRecordLiteral.BLangRecordKeyValue> attributes = ((BLangRecordLiteral) attachmentNode .getExpression()).getKeyValuePairs(); attributes.forEach(attributeNode -> { String name = attributeNode.getKey().toString(); if (TEST_ENABLE_ANNOTATION_NAME.equals(name) && "false".equals(attributeNode.getValue() .toString())) { shouldSkip.set(true); return; } if (GROUP_ANNOTATION_NAME.equals(name)) { if (attributeNode.getValue() instanceof BLangArrayLiteral) { BLangArrayLiteral values = (BLangArrayLiteral) attributeNode.getValue(); test.setGroups(values.exprs.stream().map(node -> node.toString()) .collect(Collectors.toList())); if (groups != null && !groups.isEmpty()) { boolean isGroupPresent = isGroupAvailable(groups, test.getGroups()); if (shouldIncludeGroups) { if (!isGroupPresent) { shouldSkip.set(true); return; } } else { if (isGroupPresent) { shouldSkip.set(true); return; } } groupsFound.set(true); } } } if (VALUE_SET_ANNOTATION_NAME.equals(name)) { test.setDataProvider(attributeNode.getValue().toString()); } if (BEFORE_FUNCTION.equals(name)) { test.setBeforeTestFunction(attributeNode.getValue().toString()); } if (AFTER_FUNCTION.equals(name)) { test.setAfterTestFunction(attributeNode.getValue().toString()); } if (DEPENDS_ON_FUNCTIONS.equals(name)) { if (attributeNode.getValue() instanceof BLangArrayLiteral) { BLangArrayLiteral values = (BLangArrayLiteral) attributeNode.getValue(); values.exprs.stream().map(node -> node.toString()).forEach (test::addDependsOnTestFunction); } } }); } if (groups != null && !groups.isEmpty() && !groupsFound.get() && shouldIncludeGroups) { shouldSkip.set(true); } if (!shouldSkip.get()) { suite.addTests(test); } } else { } } } /** * TODO this is a temporary solution, till we get a proper API from Ballerina Core. * This method will get executed at the completion of the processing of a ballerina package. * * @param programFile {@link ProgramFile} corresponds to the current ballerina package */ public void packageProcessed(ProgramFile programFile) { if (!enabled) { return; } packageInit = false; suite = TesterinaRegistry.getInstance().getTestSuites().get(programFile.getEntryPkgName()); if (suite == null) { throw new BallerinaException("No test suite found for [package]: " + programFile.getEntryPkgName()); } suite.setInitFunction(new TesterinaFunction(programFile, programFile.getEntryPackage().getInitFunctionInfo(), TesterinaFunction.Type.INIT)); Arrays.stream(programFile.getEntryPackage().getFunctionInfoEntries()).forEach(functionInfo -> { suite.addTestUtilityFunction(new TesterinaFunction(programFile, functionInfo, TesterinaFunction.Type.UTIL)); }); resolveFunctions(suite); int[] testExecutionOrder = checkCyclicDependencies(suite.getTests()); List<Test> sortedTests = orderTests(suite.getTests(), testExecutionOrder); suite.setTests(sortedTests); suite.setProgramFile(programFile); } /** * Process a given {@link TestSuite} and inject the user defined mock functions. * * @param suite a @{@link TestSuite} */ public static void injectMocks(TestSuite suite) { ProgramFile programFile = suite.getProgramFile(); Map<String, TesterinaFunction> mockFunctions = suite.getMockFunctionsMap(); mockFunctions.forEach((k, v) -> { String[] info = k.split(MOCK_ANNOTATION_DELIMITER); if (info.length != 2) { return; } for (PackageInfo packageInfo : programFile.getPackageInfoEntries()) { for (Instruction ins : packageInfo.getInstructions()) { if (ins instanceof Instruction.InstructionCALL) { Instruction.InstructionCALL call = (Instruction.InstructionCALL) ins; if (call.functionInfo.getPkgPath().equals(info[0]) && call.functionInfo.getName().equals (info[1])) { suite.addMockedRealFunction(k, call.functionInfo); call.functionInfo = v.getbFunction(); } } } } }); } /** * Process a given {@link TestSuite} and reset the mock functions with their original pointers. * * @param suite a @{@link TestSuite} */ public static void resetMocks(TestSuite suite) { ProgramFile programFile = suite.getProgramFile(); Map<String, TesterinaFunction> mockFunctions = suite.getMockFunctionsMap(); Map<String, FunctionInfo> mockedRealFunctionsMap = suite.getMockedRealFunctionsMap(); mockFunctions.forEach((k, v) -> { String[] info = k.split(MOCK_ANNOTATION_DELIMITER); if (info.length != 2) { return; } for (PackageInfo packageInfo : programFile.getPackageInfoEntries()) { for (Instruction ins : packageInfo.getInstructions()) { if (ins instanceof Instruction.InstructionCALL) { Instruction.InstructionCALL call = (Instruction.InstructionCALL) ins; if (call.functionInfo.getPkgPath().equals(info[0]) && call.functionInfo.getName().equals (info[1])) { call.functionInfo = mockedRealFunctionsMap.get(k); } } } } }); } private static List<Test> orderTests(List<Test> tests, int[] testExecutionOrder) { List<Test> sortedTests = new ArrayList<>(); for (int idx : testExecutionOrder) { sortedTests.add(tests.get(idx)); } return sortedTests; } /** * Resolve function names to {@link TesterinaFunction}s. * * @param suite {@link TestSuite} whose functions to be resolved. */ private static int[] checkCyclicDependencies(List<Test> tests) { int numberOfNodes = tests.size(); int[] indegrees = new int[numberOfNodes]; int[] sortedElts = new int[numberOfNodes]; List<Integer> dependencyMatrix[] = new ArrayList[numberOfNodes]; for (int i = 0; i < numberOfNodes; i++) { dependencyMatrix[i] = new ArrayList<>(); } List<String> testNames = tests.stream().map(k -> k.getTestName()).collect(Collectors.toList()); int i = 0; for (Test test : tests) { if (!test.getDependsOnTestFunctions().isEmpty()) { for (String dependsOnFn : test.getDependsOnTestFunctions()) { int idx = testNames.indexOf(dependsOnFn); if (idx == -1) { String message = String.format("Test [%s] depends on function [%s], but it couldn't be found" + ".", test.getTestFunction().getName(), dependsOnFn); throw new BallerinaException(message); } dependencyMatrix[i].add(idx); } } i++; } for (int j = 0; j < numberOfNodes; j++) { List<Integer> dependencies = dependencyMatrix[j]; for (int node : dependencies) { indegrees[node]++; } } Queue<Integer> q = new LinkedList<Integer>(); for (i = 0; i < numberOfNodes; i++) { if (indegrees[i] == 0) { q.add(i); } } int cnt = 0; Vector<Integer> topOrder = new Vector<Integer>(); while (!q.isEmpty()) { int u = q.poll(); topOrder.add(u); for (int node : dependencyMatrix[u]) { if (--indegrees[node] == 0) { q.add(node); } } cnt++; } if (cnt != numberOfNodes) { String message = "Cyclic test dependency detected"; throw new BallerinaException(message); } i = numberOfNodes - 1; for (int elt : topOrder) { sortedElts[i] = elt; i--; } return sortedElts; } /** * Check whether there is a common element in two Lists. * * @param inputGroups String @{@link List} to match * @param functionGroups String @{@link List} to match agains * @return true if a match is found */ private boolean isGroupAvailable(List<String> inputGroups, List<String> functionGroups) { for (String group : inputGroups) { for (String funcGroup : functionGroups) { if (group.equals(funcGroup)) { return true; } } } return false; } }
To be clear all versions in a detail object will be of the same type. The Vault API will not allow mixing asymmetric and symmetric key versions. So, with your suggestion, you'd have two separate maps of versions and one would always be empty. You'd have to check the size of the maps to see which one is not empty and use that one. That's more work and more confusing than checking the detail object's type and casting the detail object to get the correctly typed versions.
public void symmetricReadAESKey() { assertFalse(transitSecretEngine.listKeys().contains(KEY_NAME)); transitSecretEngine.createKey(KEY_NAME, new KeyCreationRequestDetail().setType("aes256-gcm96")); assertTrue(transitSecretEngine.listKeys().contains(KEY_NAME)); VaultTransitKeyDetail mykey = transitSecretEngine.readKey(KEY_NAME); assertTrue(mykey instanceof VaultTransitSymmetricKeyDetail); assertEquals(KEY_NAME, mykey.getName()); assertFalse(mykey.isExportable()); assertFalse(mykey.isDeletionAllowed()); assertTrue(mykey.isSupportsDecryption()); assertTrue(mykey.isSupportsEncryption()); assertTrue(mykey.isSupportsDerivation()); assertFalse(mykey.isSupportsSigning()); assertEquals(mykey.getType(), "aes256-gcm96"); assertEquals(1, mykey.getKeys().size()); assertTrue(mykey.getKeys().containsKey("1")); assertEquals(1, mykey.getVersions().size()); assertTrue(mykey.getVersions().containsKey("1")); assertNotNull(mykey.getVersions().get("1").getCreationTime()); assertTrue(mykey.getVersions().get("1") instanceof VaultTransitSymmetricKeyVersion); assertEquals(1, mykey.getLatestVersion()); assertEquals(0, mykey.getMinAvailableVersion()); assertEquals(1, mykey.getMinDecryptionVersion()); assertEquals(0, mykey.getMinEncryptionVersion()); transitSecretEngine.updateKeyConfiguration(KEY_NAME, new KeyConfigRequestDetail().setDeletionAllowed(true)); mykey = transitSecretEngine.readKey(KEY_NAME); assertTrue(mykey.isDeletionAllowed()); transitSecretEngine.deleteKey(KEY_NAME); assertNull(transitSecretEngine.readKey(KEY_NAME)); }
assertEquals(mykey.getType(), "aes256-gcm96");
public void symmetricReadAESKey() { assertFalse(transitSecretEngine.listKeys().contains(KEY_NAME)); transitSecretEngine.createKey(KEY_NAME, new KeyCreationRequestDetail().setType("aes256-gcm96")); assertTrue(transitSecretEngine.listKeys().contains(KEY_NAME)); VaultTransitKeyDetail<?> mykey = transitSecretEngine.readKey(KEY_NAME); assertTrue(mykey instanceof VaultTransitSymmetricKeyDetail); assertEquals(KEY_NAME, mykey.getName()); assertFalse(mykey.isExportable()); assertFalse(mykey.isDeletionAllowed()); assertTrue(mykey.isSupportsDecryption()); assertTrue(mykey.isSupportsEncryption()); assertTrue(mykey.isSupportsDerivation()); assertFalse(mykey.isSupportsSigning()); assertEquals(mykey.getType(), "aes256-gcm96"); assertEquals(1, mykey.getKeys().size()); assertTrue(mykey.getKeys().containsKey("1")); assertEquals(1, mykey.getVersions().size()); assertTrue(mykey.getVersions().containsKey("1")); assertNotNull(mykey.getVersions().get("1").getCreationTime()); assertTrue(mykey.getVersions().get("1") instanceof VaultTransitSymmetricKeyVersion); assertEquals(1, mykey.getLatestVersion()); assertEquals(0, mykey.getMinAvailableVersion()); assertEquals(1, mykey.getMinDecryptionVersion()); assertEquals(0, mykey.getMinEncryptionVersion()); transitSecretEngine.updateKeyConfiguration(KEY_NAME, new KeyConfigRequestDetail().setDeletionAllowed(true)); mykey = transitSecretEngine.readKey(KEY_NAME); assertTrue(mykey.isDeletionAllowed()); transitSecretEngine.deleteKey(KEY_NAME); assertNull(transitSecretEngine.readKey(KEY_NAME)); }
class VaultTransitITCase { private static final Logger log = Logger.getLogger(VaultTransitITCase.class); public static final String COUCOU = "coucou"; public static final String NEW_KEY = "new-key"; @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) .addAsResource("application-vault.properties", "application.properties")); public static final String KEY_NAME = "mykey"; private TransitContext context = TransitContext.fromContext("my context"); private ClearData data = new ClearData(COUCOU); private SigningInput input = new SigningInput(COUCOU); @Inject VaultTransitSecretEngine transitSecretEngine; @Inject VaultAuthManager vaultAuthManager; @Test public void encryptionString() { String ciphertext = transitSecretEngine.encrypt(ENCRYPTION_KEY_NAME, COUCOU); ClearData decrypted = transitSecretEngine.decrypt(ENCRYPTION_KEY_NAME, ciphertext); assertEquals(COUCOU, decrypted.asString()); } @Test public void encryptionBytes() { String ciphertext = transitSecretEngine.encrypt(ENCRYPTION_KEY_NAME, data, null); ClearData decrypted = transitSecretEngine.decrypt(ENCRYPTION_KEY_NAME, ciphertext, null); assertEquals(COUCOU, decrypted.asString()); } @Test public void encryptionContext() { String ciphertext = transitSecretEngine.encrypt(ENCRYPTION_DERIVED_KEY_NAME, data, context); ClearData decrypted = transitSecretEngine.decrypt(ENCRYPTION_DERIVED_KEY_NAME, ciphertext, context); assertEquals(COUCOU, decrypted.asString()); } @Test public void encryptionBatch() { List<EncryptionRequest> encryptBatch = singletonList(new EncryptionRequest(data)); Map<EncryptionRequest, String> encryptList = transitSecretEngine.encrypt(ENCRYPTION_KEY_NAME, encryptBatch); String ciphertext = getSingleValue(encryptList); List<DecryptionRequest> decryptBatch = singletonList(new DecryptionRequest(ciphertext)); Map<DecryptionRequest, ClearData> decryptList = transitSecretEngine.decrypt(ENCRYPTION_KEY_NAME, decryptBatch); assertEquals(1, decryptList.size()); assertEquals(COUCOU, getSingleValue(decryptList).asString()); } @Test public void rewrapBatch() { String ciphertext = transitSecretEngine.encrypt(ENCRYPTION_KEY_NAME, COUCOU); ClearData decrypted = transitSecretEngine.decrypt(ENCRYPTION_KEY_NAME, ciphertext); assertEquals(COUCOU, decrypted.asString()); List<RewrappingRequest> rewrapBatch = singletonList(new RewrappingRequest(ciphertext)); Map<RewrappingRequest, String> rewrapBatchResult = transitSecretEngine.rewrap(ENCRYPTION_KEY_NAME, rewrapBatch); ciphertext = getSingleValue(rewrapBatchResult); decrypted = transitSecretEngine.decrypt(ENCRYPTION_KEY_NAME, ciphertext); assertEquals(COUCOU, decrypted.asString()); } @Test public void upsert() { String ciphertext = transitSecretEngine.encrypt(NEW_KEY, data, null); ClearData decrypted = transitSecretEngine.decrypt(NEW_KEY, ciphertext, null); assertEquals(COUCOU, decrypted.asString()); } @Test public void signString() { String signature = transitSecretEngine.sign(SIGN_KEY_NAME, input, null); transitSecretEngine.verifySignature(SIGN_KEY_NAME, signature, input, null); } @Test public void signJws() { String signature = transitSecretEngine.sign("jws", input, null); transitSecretEngine.verifySignature("jws", signature, input, null); } @Test public void signBytes() { String signature = transitSecretEngine.sign(SIGN_KEY_NAME, input, null); transitSecretEngine.verifySignature(SIGN_KEY_NAME, signature, input, null); } @Test public void signContext() { String signature = transitSecretEngine.sign(SIGN_DERIVATION_KEY_NAME, input, context); transitSecretEngine.verifySignature(SIGN_DERIVATION_KEY_NAME, signature, input, context); } @Test public void signBatch() { List<SigningRequest> batch = singletonList(new SigningRequest(input)); Map<SigningRequest, String> signatures = transitSecretEngine.sign(SIGN_KEY_NAME, batch); assertEquals(1, signatures.size()); String signature = getSingleValue(signatures); List<VerificationRequest> batchVerify = singletonList(new VerificationRequest(signature, input)); transitSecretEngine.verifySignature(SIGN_KEY_NAME, batchVerify); } @Test public void keyVersionEncryption() { rotate(ENCRYPTION_KEY2_NAME); String encryptV1 = encrypt(1); assertTrue(encryptV1.startsWith("vault:v1")); assertEquals(COUCOU, decrypt(encryptV1)); String rewraped = transitSecretEngine.rewrap(ENCRYPTION_KEY2_NAME, encryptV1, null); assertTrue(rewraped.startsWith("vault:v2")); String encryptV2 = encrypt(2); assertTrue(encryptV2.startsWith("vault:v2")); assertEquals(COUCOU, decrypt(encryptV2)); } private void rotate(String keyName) { String clientToken = vaultAuthManager.getClientToken(); new TestVaultClient().rotate(clientToken, keyName); } private String encrypt(int keyVersion) { EncryptionRequest request = new EncryptionRequest(data, keyVersion); List<EncryptionRequest> encryptBatch = singletonList(request); Map<EncryptionRequest, String> encryptList = transitSecretEngine.encrypt(ENCRYPTION_KEY2_NAME, encryptBatch); String ciphertext = getSingleValue(encryptList); return ciphertext; } private String decrypt(String ciphertext) { DecryptionRequest request = new DecryptionRequest(ciphertext); List<DecryptionRequest> decryptBatch = singletonList(request); Map<DecryptionRequest, ClearData> decryptList = transitSecretEngine.decrypt(ENCRYPTION_KEY2_NAME, decryptBatch); return getSingleValue(decryptList).asString(); } @Test public void keyVersionSign() { rotate(SIGN_KEY2_NAME); String sign1 = sign(1); assertTrue(sign1.startsWith("vault:v1")); transitSecretEngine.verifySignature(SIGN_KEY2_NAME, sign1, input, null); String sign2 = sign(2); assertTrue(sign2.startsWith("vault:v2")); transitSecretEngine.verifySignature(SIGN_KEY2_NAME, sign2, input, null); } @Test public void keyVersionSignBatch() { SigningRequest signingRequest1 = new SigningRequest(input, 1); SigningRequest signingRequest2 = new SigningRequest(input, 2); List<SigningRequest> signingRequests = Arrays.asList(signingRequest1, signingRequest2); Map<SigningRequest, String> signatures = transitSecretEngine.sign(SIGN_KEY2_NAME, signingRequests); assertEquals(2, signatures.size()); String sign1 = signatures.get(signingRequest1); String sign2 = signatures.get(signingRequest2); assertTrue(sign1.startsWith("vault:v1")); assertTrue(sign2.startsWith("vault:v2")); VerificationRequest verificationRequest1 = new VerificationRequest(sign1, input); VerificationRequest verificationRequest2 = new VerificationRequest(sign2, input); List<VerificationRequest> verificationRequests = Arrays.asList(verificationRequest1, verificationRequest2); transitSecretEngine.verifySignature(SIGN_KEY2_NAME, verificationRequests); } private String sign(int keyVersion) { SigningRequest request = new SigningRequest(input, keyVersion); Map<SigningRequest, String> signingResults = transitSecretEngine.sign(SIGN_KEY2_NAME, singletonList(request)); String signature = getSingleValue(signingResults); return signature; } @Test public void verifySignatureInvalid() { String signature = transitSecretEngine.sign(SIGN_KEY_NAME, input, null); SigningInput otherInput = new SigningInput("some other input"); try { transitSecretEngine.verifySignature(SIGN_KEY_NAME, signature, otherInput, null); fail(); } catch (VaultException e) { assertEquals(INVALID_SIGNATURE, e.getMessage()); } VerificationRequest request = new VerificationRequest(signature, otherInput); try { transitSecretEngine.verifySignature(SIGN_KEY_NAME, Arrays.asList(request)); fail(); } catch (VaultVerificationBatchException e) { assertTrue(e.getValid().isEmpty()); assertEquals(1, e.getErrors().size()); assertEquals(INVALID_SIGNATURE, e.getErrors().get(request)); } } @Test public void bigSignBatch() { List<SigningRequest> signingRequests = IntStream.range(0, 1000) .mapToObj(i -> new SigningRequest(new SigningInput("coucou" + i))) .collect(toList()); Map<SigningRequest, String> signatures = transitSecretEngine.sign(SIGN_KEY_NAME, signingRequests); List<VerificationRequest> verificationRequests = signatures.entrySet().stream() .map(e -> new VerificationRequest(e.getValue(), e.getKey().getInput())) .collect(toList()); transitSecretEngine.verifySignature(SIGN_KEY_NAME, verificationRequests); } private <K, V> V getSingleValue(Map<K, V> map) { assertEquals(1, map.size()); return map.values().stream().findFirst().get(); } @Test public void adminKey() { assertFalse(transitSecretEngine.listKeys().contains(KEY_NAME)); transitSecretEngine.createKey(KEY_NAME, new KeyCreationRequestDetail().setExportable(true)); assertTrue(transitSecretEngine.listKeys().contains(KEY_NAME)); VaultTransitKeyDetail mykey = transitSecretEngine.readKey(KEY_NAME); assertEquals(KEY_NAME, mykey.getName()); assertTrue(mykey.isExportable()); assertFalse(mykey.isDeletionAllowed()); assertTrue(mykey.isSupportsDecryption()); assertTrue(mykey.isSupportsEncryption()); assertTrue(mykey.isSupportsDerivation()); assertEquals(1, mykey.getKeys().size()); assertTrue(mykey.getKeys().containsKey("1")); assertEquals(1, mykey.getVersions().size()); assertTrue(mykey.getVersions().containsKey("1")); assertEquals(1, mykey.getMinDecryptionVersion()); assertEquals(0, mykey.getMinEncryptionVersion()); VaultTransitKeyExportDetail exportDetail = transitSecretEngine.exportKey(KEY_NAME, encryption, "1"); assertEquals(KEY_NAME, exportDetail.getName()); assertEquals(1, exportDetail.getKeys().size()); assertTrue(exportDetail.getKeys().containsKey("1")); transitSecretEngine.updateKeyConfiguration(KEY_NAME, new KeyConfigRequestDetail().setDeletionAllowed(true)); mykey = transitSecretEngine.readKey(KEY_NAME); assertTrue(mykey.isDeletionAllowed()); transitSecretEngine.deleteKey(KEY_NAME); assertNull(transitSecretEngine.readKey(KEY_NAME)); } @Test public void asymmetricReadECDSAKey() { assertFalse(transitSecretEngine.listKeys().contains(KEY_NAME)); transitSecretEngine.createKey(KEY_NAME, new KeyCreationRequestDetail().setType("ecdsa-p256")); assertTrue(transitSecretEngine.listKeys().contains(KEY_NAME)); VaultTransitKeyDetail mykey = transitSecretEngine.readKey(KEY_NAME); assertTrue(mykey instanceof VaultTransitAsymmetricKeyDetail); assertEquals(KEY_NAME, mykey.getName()); assertFalse(mykey.isExportable()); assertFalse(mykey.isDeletionAllowed()); assertFalse(mykey.isSupportsDecryption()); assertFalse(mykey.isSupportsEncryption()); assertFalse(mykey.isSupportsDerivation()); assertTrue(mykey.isSupportsSigning()); assertEquals(mykey.getType(), "ecdsa-p256"); assertEquals(1, mykey.getKeys().size()); assertTrue(mykey.getKeys().containsKey("1")); assertEquals(1, mykey.getVersions().size()); assertTrue(mykey.getVersions().containsKey("1")); assertNotNull(mykey.getVersions().get("1").getCreationTime()); assertTrue(mykey.getVersions().get("1") instanceof VaultTransitAsymmetricKeyVersion); assertNotNull(((VaultTransitAsymmetricKeyVersion) mykey.getVersions().get("1")).getPublicKey()); assertEquals(1, mykey.getLatestVersion()); assertEquals(0, mykey.getMinAvailableVersion()); assertEquals(1, mykey.getMinDecryptionVersion()); assertEquals(0, mykey.getMinEncryptionVersion()); transitSecretEngine.updateKeyConfiguration(KEY_NAME, new KeyConfigRequestDetail().setDeletionAllowed(true)); mykey = transitSecretEngine.readKey(KEY_NAME); assertTrue(mykey.isDeletionAllowed()); transitSecretEngine.deleteKey(KEY_NAME); assertNull(transitSecretEngine.readKey(KEY_NAME)); } @Test public void asymmetricReadRSAKey() { assertFalse(transitSecretEngine.listKeys().contains(KEY_NAME)); transitSecretEngine.createKey(KEY_NAME, new KeyCreationRequestDetail().setType("rsa-2048")); assertTrue(transitSecretEngine.listKeys().contains(KEY_NAME)); VaultTransitKeyDetail mykey = transitSecretEngine.readKey(KEY_NAME); assertTrue(mykey instanceof VaultTransitAsymmetricKeyDetail); assertEquals(KEY_NAME, mykey.getName()); assertFalse(mykey.isExportable()); assertFalse(mykey.isDeletionAllowed()); assertTrue(mykey.isSupportsDecryption()); assertTrue(mykey.isSupportsEncryption()); assertFalse(mykey.isSupportsDerivation()); assertTrue(mykey.isSupportsSigning()); assertEquals("rsa-2048", mykey.getType()); assertEquals(1, mykey.getKeys().size()); assertTrue(mykey.getKeys().containsKey("1")); assertEquals(1, mykey.getVersions().size()); assertTrue(mykey.getVersions().containsKey("1")); assertNotNull(mykey.getVersions().get("1").getCreationTime()); assertTrue(mykey.getVersions().get("1") instanceof VaultTransitAsymmetricKeyVersion); assertNotNull(((VaultTransitAsymmetricKeyVersion) mykey.getVersions().get("1")).getPublicKey()); assertEquals(1, mykey.getLatestVersion()); assertEquals(0, mykey.getMinAvailableVersion()); assertEquals(1, mykey.getMinDecryptionVersion()); assertEquals(0, mykey.getMinEncryptionVersion()); transitSecretEngine.updateKeyConfiguration(KEY_NAME, new KeyConfigRequestDetail().setDeletionAllowed(true)); mykey = transitSecretEngine.readKey(KEY_NAME); assertTrue(mykey.isDeletionAllowed()); transitSecretEngine.deleteKey(KEY_NAME); assertNull(transitSecretEngine.readKey(KEY_NAME)); } @Test }
class VaultTransitITCase { private static final Logger log = Logger.getLogger(VaultTransitITCase.class); public static final String COUCOU = "coucou"; public static final String NEW_KEY = "new-key"; @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class) .addAsResource("application-vault.properties", "application.properties")); public static final String KEY_NAME = "mykey"; private TransitContext context = TransitContext.fromContext("my context"); private ClearData data = new ClearData(COUCOU); private SigningInput input = new SigningInput(COUCOU); @Inject VaultTransitSecretEngine transitSecretEngine; @Inject VaultAuthManager vaultAuthManager; @Test public void encryptionString() { String ciphertext = transitSecretEngine.encrypt(ENCRYPTION_KEY_NAME, COUCOU); ClearData decrypted = transitSecretEngine.decrypt(ENCRYPTION_KEY_NAME, ciphertext); assertEquals(COUCOU, decrypted.asString()); } @Test public void encryptionBytes() { String ciphertext = transitSecretEngine.encrypt(ENCRYPTION_KEY_NAME, data, null); ClearData decrypted = transitSecretEngine.decrypt(ENCRYPTION_KEY_NAME, ciphertext, null); assertEquals(COUCOU, decrypted.asString()); } @Test public void encryptionContext() { String ciphertext = transitSecretEngine.encrypt(ENCRYPTION_DERIVED_KEY_NAME, data, context); ClearData decrypted = transitSecretEngine.decrypt(ENCRYPTION_DERIVED_KEY_NAME, ciphertext, context); assertEquals(COUCOU, decrypted.asString()); } @Test public void encryptionBatch() { List<EncryptionRequest> encryptBatch = singletonList(new EncryptionRequest(data)); Map<EncryptionRequest, String> encryptList = transitSecretEngine.encrypt(ENCRYPTION_KEY_NAME, encryptBatch); String ciphertext = getSingleValue(encryptList); List<DecryptionRequest> decryptBatch = singletonList(new DecryptionRequest(ciphertext)); Map<DecryptionRequest, ClearData> decryptList = transitSecretEngine.decrypt(ENCRYPTION_KEY_NAME, decryptBatch); assertEquals(1, decryptList.size()); assertEquals(COUCOU, getSingleValue(decryptList).asString()); } @Test public void rewrapBatch() { String ciphertext = transitSecretEngine.encrypt(ENCRYPTION_KEY_NAME, COUCOU); ClearData decrypted = transitSecretEngine.decrypt(ENCRYPTION_KEY_NAME, ciphertext); assertEquals(COUCOU, decrypted.asString()); List<RewrappingRequest> rewrapBatch = singletonList(new RewrappingRequest(ciphertext)); Map<RewrappingRequest, String> rewrapBatchResult = transitSecretEngine.rewrap(ENCRYPTION_KEY_NAME, rewrapBatch); ciphertext = getSingleValue(rewrapBatchResult); decrypted = transitSecretEngine.decrypt(ENCRYPTION_KEY_NAME, ciphertext); assertEquals(COUCOU, decrypted.asString()); } @Test public void upsert() { String ciphertext = transitSecretEngine.encrypt(NEW_KEY, data, null); ClearData decrypted = transitSecretEngine.decrypt(NEW_KEY, ciphertext, null); assertEquals(COUCOU, decrypted.asString()); } @Test public void signString() { String signature = transitSecretEngine.sign(SIGN_KEY_NAME, input, null); transitSecretEngine.verifySignature(SIGN_KEY_NAME, signature, input, null); } @Test public void signStringExplicitHashAlgorithmSha256() { SignVerifyOptions options = new SignVerifyOptions().setHashAlgorithm("sha2-256"); String signature = transitSecretEngine.sign(SIGN_KEY_NAME, input, options, null); transitSecretEngine.verifySignature(SIGN_KEY_NAME, signature, input, options, null); } @Test public void signStringExplicitHashAlgorithmSha512() { SignVerifyOptions options = new SignVerifyOptions().setHashAlgorithm("sha2-512"); String signature = transitSecretEngine.sign(SIGN_KEY_NAME, input, options, null); transitSecretEngine.verifySignature(SIGN_KEY_NAME, signature, input, options, null); } @Test public void signStringExplicitHashAlgorithmMismatched() { SignVerifyOptions options = new SignVerifyOptions().setHashAlgorithm("sha2-256"); String signature = transitSecretEngine.sign(SIGN_KEY_NAME, input, options, null); assertThrows(VaultException.class, () -> transitSecretEngine.verifySignature(SIGN_KEY_NAME, signature, input, options.setHashAlgorithm("sha1"), null)); } @Test public void signStringExplicitMarshalingAlgorithmASN1() { SignVerifyOptions options = new SignVerifyOptions().setMarshalingAlgorithm("asn1"); String signature = transitSecretEngine.sign(SIGN_KEY_NAME, input, options, null); transitSecretEngine.verifySignature(SIGN_KEY_NAME, signature, input, options, null); } @Test public void signStringExplicitMarshalingAlgorithmJWS() { SignVerifyOptions options = new SignVerifyOptions().setMarshalingAlgorithm("jws"); String signature = transitSecretEngine.sign(SIGN_KEY_NAME, input, options, null); transitSecretEngine.verifySignature(SIGN_KEY_NAME, signature, input, options, null); } @Test public void signStringExplicitMarshalingAlgorithmMismatched() { SignVerifyOptions options = new SignVerifyOptions().setMarshalingAlgorithm("jws"); String signature = transitSecretEngine.sign(SIGN_KEY_NAME, input, options, null); assertThrows(VaultException.class, () -> transitSecretEngine.verifySignature(SIGN_KEY_NAME, signature, input, options.setMarshalingAlgorithm("asn1"), null)); } @Test public void signStringExplicitSignatureAlgorithmPKCS1() { SignVerifyOptions options = new SignVerifyOptions().setSignatureAlgorithm("pkcs1v15"); String signature = transitSecretEngine.sign(SIGN_KEY2_NAME, input, options, null); transitSecretEngine.verifySignature(SIGN_KEY2_NAME, signature, input, options, null); } @Test public void signStringExplicitSignatureAlgorithmPSS() { SignVerifyOptions options = new SignVerifyOptions().setSignatureAlgorithm("pss"); String signature = transitSecretEngine.sign(SIGN_KEY2_NAME, input, options, null); transitSecretEngine.verifySignature(SIGN_KEY2_NAME, signature, input, options, null); } @Test public void signJws() { String signature = transitSecretEngine.sign("jws", input, null); transitSecretEngine.verifySignature("jws", signature, input, null); } @Test public void signBytes() { String signature = transitSecretEngine.sign(SIGN_KEY_NAME, input, null); transitSecretEngine.verifySignature(SIGN_KEY_NAME, signature, input, null); } @Test public void signContext() { String signature = transitSecretEngine.sign(SIGN_DERIVATION_KEY_NAME, input, context); transitSecretEngine.verifySignature(SIGN_DERIVATION_KEY_NAME, signature, input, context); } @Test public void signBatch() { List<SigningRequest> batch = singletonList(new SigningRequest(input)); Map<SigningRequest, String> signatures = transitSecretEngine.sign(SIGN_KEY_NAME, batch); assertEquals(1, signatures.size()); String signature = getSingleValue(signatures); List<VerificationRequest> batchVerify = singletonList(new VerificationRequest(signature, input)); transitSecretEngine.verifySignature(SIGN_KEY_NAME, batchVerify); } @Test public void keyVersionEncryption() { rotate(ENCRYPTION_KEY2_NAME); String encryptV1 = encrypt(1); assertTrue(encryptV1.startsWith("vault:v1")); assertEquals(COUCOU, decrypt(encryptV1)); String rewraped = transitSecretEngine.rewrap(ENCRYPTION_KEY2_NAME, encryptV1, null); assertTrue(rewraped.startsWith("vault:v2")); String encryptV2 = encrypt(2); assertTrue(encryptV2.startsWith("vault:v2")); assertEquals(COUCOU, decrypt(encryptV2)); } private void rotate(String keyName) { String clientToken = vaultAuthManager.getClientToken(); new TestVaultClient().rotate(clientToken, keyName); } private String encrypt(int keyVersion) { EncryptionRequest request = new EncryptionRequest(data, keyVersion); List<EncryptionRequest> encryptBatch = singletonList(request); Map<EncryptionRequest, String> encryptList = transitSecretEngine.encrypt(ENCRYPTION_KEY2_NAME, encryptBatch); String ciphertext = getSingleValue(encryptList); return ciphertext; } private String decrypt(String ciphertext) { DecryptionRequest request = new DecryptionRequest(ciphertext); List<DecryptionRequest> decryptBatch = singletonList(request); Map<DecryptionRequest, ClearData> decryptList = transitSecretEngine.decrypt(ENCRYPTION_KEY2_NAME, decryptBatch); return getSingleValue(decryptList).asString(); } @Test public void keyVersionSign() { rotate(SIGN_KEY2_NAME); String sign1 = sign(1); assertTrue(sign1.startsWith("vault:v1")); transitSecretEngine.verifySignature(SIGN_KEY2_NAME, sign1, input, null); String sign2 = sign(2); assertTrue(sign2.startsWith("vault:v2")); transitSecretEngine.verifySignature(SIGN_KEY2_NAME, sign2, input, null); } @Test public void keyVersionSignBatch() { SigningRequest signingRequest1 = new SigningRequest(input, 1); SigningRequest signingRequest2 = new SigningRequest(input, 2); List<SigningRequest> signingRequests = Arrays.asList(signingRequest1, signingRequest2); Map<SigningRequest, String> signatures = transitSecretEngine.sign(SIGN_KEY2_NAME, signingRequests); assertEquals(2, signatures.size()); String sign1 = signatures.get(signingRequest1); String sign2 = signatures.get(signingRequest2); assertTrue(sign1.startsWith("vault:v1")); assertTrue(sign2.startsWith("vault:v2")); VerificationRequest verificationRequest1 = new VerificationRequest(sign1, input); VerificationRequest verificationRequest2 = new VerificationRequest(sign2, input); List<VerificationRequest> verificationRequests = Arrays.asList(verificationRequest1, verificationRequest2); transitSecretEngine.verifySignature(SIGN_KEY2_NAME, verificationRequests); } private String sign(int keyVersion) { SigningRequest request = new SigningRequest(input, keyVersion); Map<SigningRequest, String> signingResults = transitSecretEngine.sign(SIGN_KEY2_NAME, singletonList(request)); String signature = getSingleValue(signingResults); return signature; } @Test public void verifySignatureInvalid() { String signature = transitSecretEngine.sign(SIGN_KEY_NAME, input, null); SigningInput otherInput = new SigningInput("some other input"); try { transitSecretEngine.verifySignature(SIGN_KEY_NAME, signature, otherInput, null); fail(); } catch (VaultException e) { assertEquals(INVALID_SIGNATURE, e.getMessage()); } VerificationRequest request = new VerificationRequest(signature, otherInput); try { transitSecretEngine.verifySignature(SIGN_KEY_NAME, Arrays.asList(request)); fail(); } catch (VaultVerificationBatchException e) { assertTrue(e.getValid().isEmpty()); assertEquals(1, e.getErrors().size()); assertEquals(INVALID_SIGNATURE, e.getErrors().get(request)); } } @Test public void bigSignBatch() { List<SigningRequest> signingRequests = IntStream.range(0, 1000) .mapToObj(i -> new SigningRequest(new SigningInput("coucou" + i))) .collect(toList()); Map<SigningRequest, String> signatures = transitSecretEngine.sign(SIGN_KEY_NAME, signingRequests); List<VerificationRequest> verificationRequests = signatures.entrySet().stream() .map(e -> new VerificationRequest(e.getValue(), e.getKey().getInput())) .collect(toList()); transitSecretEngine.verifySignature(SIGN_KEY_NAME, verificationRequests); } private <K, V> V getSingleValue(Map<K, V> map) { assertEquals(1, map.size()); return map.values().stream().findFirst().get(); } @Test public void adminKey() { assertFalse(transitSecretEngine.listKeys().contains(KEY_NAME)); transitSecretEngine.createKey(KEY_NAME, new KeyCreationRequestDetail().setExportable(true)); assertTrue(transitSecretEngine.listKeys().contains(KEY_NAME)); VaultTransitKeyDetail mykey = transitSecretEngine.readKey(KEY_NAME); assertEquals(KEY_NAME, mykey.getName()); assertTrue(mykey.isExportable()); assertFalse(mykey.isDeletionAllowed()); assertTrue(mykey.isSupportsDecryption()); assertTrue(mykey.isSupportsEncryption()); assertTrue(mykey.isSupportsDerivation()); assertEquals(1, mykey.getKeys().size()); assertTrue(mykey.getKeys().containsKey("1")); assertEquals(1, mykey.getVersions().size()); assertTrue(mykey.getVersions().containsKey("1")); assertEquals(1, mykey.getMinDecryptionVersion()); assertEquals(0, mykey.getMinEncryptionVersion()); VaultTransitKeyExportDetail exportDetail = transitSecretEngine.exportKey(KEY_NAME, encryption, "1"); assertEquals(KEY_NAME, exportDetail.getName()); assertEquals(1, exportDetail.getKeys().size()); assertTrue(exportDetail.getKeys().containsKey("1")); transitSecretEngine.updateKeyConfiguration(KEY_NAME, new KeyConfigRequestDetail().setDeletionAllowed(true)); mykey = transitSecretEngine.readKey(KEY_NAME); assertTrue(mykey.isDeletionAllowed()); transitSecretEngine.deleteKey(KEY_NAME); assertNull(transitSecretEngine.readKey(KEY_NAME)); } @Test public void asymmetricReadECDSAKey() { assertFalse(transitSecretEngine.listKeys().contains(KEY_NAME)); transitSecretEngine.createKey(KEY_NAME, new KeyCreationRequestDetail().setType("ecdsa-p256")); assertTrue(transitSecretEngine.listKeys().contains(KEY_NAME)); VaultTransitKeyDetail<?> mykey = transitSecretEngine.readKey(KEY_NAME); assertTrue(mykey instanceof VaultTransitAsymmetricKeyDetail); assertEquals(KEY_NAME, mykey.getName()); assertFalse(mykey.isExportable()); assertFalse(mykey.isDeletionAllowed()); assertFalse(mykey.isSupportsDecryption()); assertFalse(mykey.isSupportsEncryption()); assertFalse(mykey.isSupportsDerivation()); assertTrue(mykey.isSupportsSigning()); assertEquals(mykey.getType(), "ecdsa-p256"); assertEquals(1, mykey.getKeys().size()); assertTrue(mykey.getKeys().containsKey("1")); assertEquals(1, mykey.getVersions().size()); assertTrue(mykey.getVersions().containsKey("1")); assertNotNull(mykey.getVersions().get("1").getCreationTime()); assertTrue(mykey.getVersions().get("1") instanceof VaultTransitAsymmetricKeyVersion); assertNotNull(((VaultTransitAsymmetricKeyVersion) mykey.getVersions().get("1")).getPublicKey()); assertEquals(1, mykey.getLatestVersion()); assertEquals(0, mykey.getMinAvailableVersion()); assertEquals(1, mykey.getMinDecryptionVersion()); assertEquals(0, mykey.getMinEncryptionVersion()); transitSecretEngine.updateKeyConfiguration(KEY_NAME, new KeyConfigRequestDetail().setDeletionAllowed(true)); mykey = transitSecretEngine.readKey(KEY_NAME); assertTrue(mykey.isDeletionAllowed()); transitSecretEngine.deleteKey(KEY_NAME); assertNull(transitSecretEngine.readKey(KEY_NAME)); } @Test public void asymmetricReadRSAKey() { assertFalse(transitSecretEngine.listKeys().contains(KEY_NAME)); transitSecretEngine.createKey(KEY_NAME, new KeyCreationRequestDetail().setType("rsa-2048")); assertTrue(transitSecretEngine.listKeys().contains(KEY_NAME)); VaultTransitKeyDetail<?> mykey = transitSecretEngine.readKey(KEY_NAME); assertTrue(mykey instanceof VaultTransitAsymmetricKeyDetail); assertEquals(KEY_NAME, mykey.getName()); assertFalse(mykey.isExportable()); assertFalse(mykey.isDeletionAllowed()); assertTrue(mykey.isSupportsDecryption()); assertTrue(mykey.isSupportsEncryption()); assertFalse(mykey.isSupportsDerivation()); assertTrue(mykey.isSupportsSigning()); assertEquals("rsa-2048", mykey.getType()); assertEquals(1, mykey.getKeys().size()); assertTrue(mykey.getKeys().containsKey("1")); assertEquals(1, mykey.getVersions().size()); assertTrue(mykey.getVersions().containsKey("1")); assertNotNull(mykey.getVersions().get("1").getCreationTime()); assertTrue(mykey.getVersions().get("1") instanceof VaultTransitAsymmetricKeyVersion); assertNotNull(((VaultTransitAsymmetricKeyVersion) mykey.getVersions().get("1")).getPublicKey()); assertEquals(1, mykey.getLatestVersion()); assertEquals(0, mykey.getMinAvailableVersion()); assertEquals(1, mykey.getMinDecryptionVersion()); assertEquals(0, mykey.getMinEncryptionVersion()); transitSecretEngine.updateKeyConfiguration(KEY_NAME, new KeyConfigRequestDetail().setDeletionAllowed(true)); mykey = transitSecretEngine.readKey(KEY_NAME); assertTrue(mykey.isDeletionAllowed()); transitSecretEngine.deleteKey(KEY_NAME); assertNull(transitSecretEngine.readKey(KEY_NAME)); } @Test }
based on our discussion converting all the exception to SBRE .
private Throwable mapError(Throwable throwable, ServiceBusErrorSource errorSource) { if (!(throwable instanceof AmqpException) && !(throwable instanceof ServiceBusReceiverException)) { return new ServiceBusReceiverException(throwable, errorSource); } return throwable; }
if (!(throwable instanceof AmqpException) && !(throwable instanceof ServiceBusReceiverException)) {
private Throwable mapError(Throwable throwable, ServiceBusErrorSource errorSource) { if (!(throwable instanceof ServiceBusReceiverException)) { return new ServiceBusReceiverException(throwable, errorSource); } return throwable; }
class ServiceBusReceiverAsyncClient implements AutoCloseable { private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions(); private static final String TRANSACTION_LINK_NAME = "coordinator"; private final LockContainer<LockRenewalOperation> renewalContainer; private final AtomicBoolean isDisposed = new AtomicBoolean(); private final LockContainer<OffsetDateTime> managementNodeLocks; private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class); private final String fullyQualifiedNamespace; private final String entityPath; private final MessagingEntityType entityType; private final ReceiverOptions receiverOptions; private final ServiceBusConnectionProcessor connectionProcessor; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final Runnable onClientClose; private final ServiceBusSessionManager sessionManager; private final Semaphore completionLock = new Semaphore(1); private final AtomicLong lastPeekedSequenceNumber = new AtomicLong(-1); private final AtomicReference<ServiceBusAsyncConsumer> consumer = new AtomicReference<>(); /** * Creates a receiver that listens to a Service Bus resource. * * @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource. * @param entityPath The name of the topic or queue. * @param entityType The type of the Service Bus resource. * @param receiverOptions Options when receiving messages. * @param connectionProcessor The AMQP connection to the Service Bus resource. * @param tracerProvider Tracer for telemetry. * @param messageSerializer Serializes and deserializes Service Bus messages. * @param onClientClose Operation to run when the client completes. */ ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType, ReceiverOptions receiverOptions, ServiceBusConnectionProcessor connectionProcessor, Duration cleanupInterval, TracerProvider tracerProvider, MessageSerializer messageSerializer, Runnable onClientClose) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null."); this.entityType = Objects.requireNonNull(entityType, "'entityType' cannot be null."); this.receiverOptions = Objects.requireNonNull(receiverOptions, "'receiveOptions cannot be null.'"); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.onClientClose = Objects.requireNonNull(onClientClose, "'onClientClose' cannot be null."); this.managementNodeLocks = new LockContainer<>(cleanupInterval); this.renewalContainer = new LockContainer<>(Duration.ofMinutes(2), renewal -> { logger.verbose("Closing expired renewal operation. lockToken[{}]. status[{}]. throwable[{}].", renewal.getLockToken(), renewal.getStatus(), renewal.getThrowable()); renewal.close(); }); this.sessionManager = null; } ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType, ReceiverOptions receiverOptions, ServiceBusConnectionProcessor connectionProcessor, Duration cleanupInterval, TracerProvider tracerProvider, MessageSerializer messageSerializer, Runnable onClientClose, ServiceBusSessionManager sessionManager) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null."); this.entityType = Objects.requireNonNull(entityType, "'entityType' cannot be null."); this.receiverOptions = Objects.requireNonNull(receiverOptions, "'receiveOptions cannot be null.'"); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.onClientClose = Objects.requireNonNull(onClientClose, "'onClientClose' cannot be null."); this.sessionManager = Objects.requireNonNull(sessionManager, "'sessionManager' cannot be null."); this.managementNodeLocks = new LockContainer<>(cleanupInterval); this.renewalContainer = new LockContainer<>(Duration.ofMinutes(2), renewal -> { logger.info("Closing expired renewal operation. sessionId[{}]. status[{}]. throwable[{}]", renewal.getSessionId(), renewal.getStatus(), renewal.getThrowable()); renewal.close(); }); } /** * Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Service Bus namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Service Bus resource this client interacts with. * * @return The Service Bus resource this client interacts with. */ public String getEntityPath() { return entityPath; } /** * Abandon a {@link ServiceBusReceivedMessage message}. This will make the message available * again for processing. Abandoning a message will increase the delivery count on the message. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * * @return A {@link Mono} that completes when the Service Bus abandon operation completes. * @throws NullPointerException if {@code message} is null. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. */ public Mono<Void> abandon(ServiceBusReceivedMessage message) { return updateDisposition(message, DispositionStatus.ABANDONED, null, null, null, null); } /** * Abandon a {@link ServiceBusReceivedMessage message} updates the message's properties. * This will make the message available again for processing. Abandoning a message will increase the delivery count * on the message. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * @param options to abandon the message. You can specify * {@link AbandonOptions * {@code transactionContext} can be set using * {@link AbandonOptions * created first by {@link ServiceBusReceiverAsyncClient * {@link ServiceBusSenderAsyncClient * * @return A {@link Mono} that completes when the Service Bus operation finishes. * @throws NullPointerException if {@code message} or {@code options} is null. Also if * {@code transactionContext.transactionId} is null when {@code options.transactionContext} is specified. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. */ public Mono<Void> abandon(ServiceBusReceivedMessage message, AbandonOptions options) { if (Objects.isNull(options)) { return monoError(logger, new NullPointerException("'settlementOptions' cannot be null.")); } else if (!Objects.isNull(options.getTransactionContext()) && Objects.isNull(options.getTransactionContext().getTransactionId())) { return monoError(logger, new NullPointerException( "'options.transactionContext.transactionId' cannot be null.")); } return updateDisposition(message, DispositionStatus.ABANDONED, null, null, options.getPropertiesToModify(), options.getTransactionContext()); } /** * Completes a {@link ServiceBusReceivedMessage message}. This will delete the message from the service. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * * @return A {@link Mono} that finishes when the message is completed on Service Bus. * @throws NullPointerException if {@code message} is null. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. */ public Mono<Void> complete(ServiceBusReceivedMessage message) { return updateDisposition(message, DispositionStatus.COMPLETED, null, null, null, null); } /** * Completes a {@link ServiceBusReceivedMessage message}. This will delete the message from the * service. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * @param options to complete the message. The {@code transactionContext} can be set using * {@link CompleteOptions * created first by {@link ServiceBusReceiverAsyncClient * {@link ServiceBusSenderAsyncClient * * @return A {@link Mono} that finishes when the message is completed on Service Bus. * @throws NullPointerException if {@code message} or {@code options} is null. Also if * {@code transactionContext.transactionId} is null when {@code options.transactionContext} is specified. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. */ public Mono<Void> complete(ServiceBusReceivedMessage message, CompleteOptions options) { if (Objects.isNull(options)) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } else if (!Objects.isNull(options.getTransactionContext()) && Objects.isNull(options.getTransactionContext().getTransactionId())) { return monoError(logger, new NullPointerException( "'options.transactionContext.transactionId' cannot be null.")); } return updateDisposition(message, DispositionStatus.COMPLETED, null, null, null, options.getTransactionContext()); } /** * Defers a {@link ServiceBusReceivedMessage message}. This will move message into the deferred subqueue. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * * @return A {@link Mono} that completes when the Service Bus defer operation finishes. * @throws NullPointerException if {@code message} is null. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * @see <a href="https: */ public Mono<Void> defer(ServiceBusReceivedMessage message) { return updateDisposition(message, DispositionStatus.DEFERRED, null, null, null, null); } /** * Defers a {@link ServiceBusReceivedMessage message} with modified message property. This will move message into * the deferred subqueue. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * @param options to defer the message. You can specify {@link DeferOptions * to modify on the Message. The {@code transactionContext} can be set using * {@link DeferOptions * created first by {@link ServiceBusReceiverAsyncClient * {@link ServiceBusSenderAsyncClient * * @return A {@link Mono} that completes when the defer operation finishes. * @throws NullPointerException if {@code message} or {@code options} is null. Also if * {@code transactionContext.transactionId} is null when {@code options.transactionContext} is specified. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. * @see <a href="https: */ public Mono<Void> defer(ServiceBusReceivedMessage message, DeferOptions options) { if (Objects.isNull(options)) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } else if (!Objects.isNull(options.getTransactionContext()) && Objects.isNull(options.getTransactionContext().getTransactionId())) { return monoError(logger, new NullPointerException( "'options.transactionContext.transactionId' cannot be null.")); } return updateDisposition(message, DispositionStatus.DEFERRED, null, null, options.getPropertiesToModify(), options.getTransactionContext()); } /** * Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * * @return A {@link Mono} that completes when the dead letter operation finishes. * @throws NullPointerException if {@code message} is null. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. * @see <a href="https: * queues</a> */ public Mono<Void> deadLetter(ServiceBusReceivedMessage message) { return deadLetter(message, DEFAULT_DEAD_LETTER_OPTIONS); } /** * Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * @param options to deadLetter the message. You can specify * {@link DeadLetterOptions * {@code transactionContext} can be set using * {@link DeadLetterOptions * created first by {@link ServiceBusReceiverAsyncClient * {@link ServiceBusSenderAsyncClient * @return A {@link Mono} that completes when the dead letter operation finishes. * @throws NullPointerException if {@code message} or {@code options} is null. Also if * {@code transactionContext.transactionId} is null when {@code options.transactionContext} is specified. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. * @see <a href="https: * queues</a> */ public Mono<Void> deadLetter(ServiceBusReceivedMessage message, DeadLetterOptions options) { if (Objects.isNull(options)) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } else if (!Objects.isNull(options.getTransactionContext()) && Objects.isNull(options.getTransactionContext().getTransactionId())) { return monoError(logger, new NullPointerException( "'options.transactionContext.transactionId' cannot be null.")); } return updateDisposition(message, DispositionStatus.SUSPENDED, options.getDeadLetterReason(), options.getDeadLetterErrorDescription(), options.getPropertiesToModify(), options.getTransactionContext()); } /** * Gets the state of the session if this receiver is a session receiver. * * @return The session state or an empty Mono if there is no state set for the session. * @throws IllegalStateException if the receiver is a non-session receiver. */ public Mono<byte[]> getSessionState() { return getSessionState(receiverOptions.getSessionId()); } /** * Reads the next active message without changing the state of the receiver or the message source. The first call to * {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent * message in the entity. * * @return A peeked {@link ServiceBusReceivedMessage}. * @see <a href="https: */ public Mono<ServiceBusReceivedMessage> peekMessage() { return peekMessage(receiverOptions.getSessionId()); } /** * Reads the next active message without changing the state of the receiver or the message source. The first call to * {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent * message in the entity. * * @param sessionId Session id of the message to peek from. {@code null} if there is no session. * * @return A peeked {@link ServiceBusReceivedMessage}. * @throws IllegalStateException if the receiver is disposed. * @see <a href="https: */ Mono<ServiceBusReceivedMessage> peekMessage(String sessionId) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek"))); } return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(channel -> { final long sequence = lastPeekedSequenceNumber.get() + 1; logger.verbose("Peek message from sequence number: {}", sequence); return channel.peek(sequence, sessionId, getLinkName(sessionId)); }) .handle((message, sink) -> { final long current = lastPeekedSequenceNumber .updateAndGet(value -> Math.max(value, message.getSequenceNumber())); logger.verbose("Updating last peeked sequence number: {}", current); sink.next(message); }); } /** * Starting from the given sequence number, reads next the active message without changing the state of the receiver * or the message source. * * @param sequenceNumber The sequence number from where to read the message. * * @return A peeked {@link ServiceBusReceivedMessage}. * @see <a href="https: */ public Mono<ServiceBusReceivedMessage> peekMessageAt(long sequenceNumber) { return peekMessageAt(sequenceNumber, receiverOptions.getSessionId()); } /** * Starting from the given sequence number, reads next the active message without changing the state of the receiver * or the message source. * * @param sequenceNumber The sequence number from where to read the message. * @param sessionId Session id of the message to peek from. {@code null} if there is no session. * * @return A peeked {@link ServiceBusReceivedMessage}. * @see <a href="https: */ Mono<ServiceBusReceivedMessage> peekMessageAt(long sequenceNumber, String sessionId) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt"))); } return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(node -> node.peek(sequenceNumber, sessionId, getLinkName(sessionId))); } /** * Reads the next batch of active messages without changing the state of the receiver or the message source. * * @param maxMessages The number of messages. * * @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked. * @throws IllegalArgumentException if {@code maxMessages} is not a positive integer. * @see <a href="https: */ public Flux<ServiceBusReceivedMessage> peekMessages(int maxMessages) { return peekMessages(maxMessages, receiverOptions.getSessionId()); } /** * Reads the next batch of active messages without changing the state of the receiver or the message source. * * @param maxMessages The number of messages. * @param sessionId Session id of the messages to peek from. {@code null} if there is no session. * * @return An {@link IterableStream} of {@link ServiceBusReceivedMessage messages} that are peeked. * @throws IllegalArgumentException if {@code maxMessages} is not a positive integer. * @see <a href="https: */ Flux<ServiceBusReceivedMessage> peekMessages(int maxMessages, String sessionId) { if (isDisposed.get()) { return fluxError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch"))); } return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMapMany(node -> { final long nextSequenceNumber = lastPeekedSequenceNumber.get() + 1; logger.verbose("Peek batch from sequence number: {}", nextSequenceNumber); final Flux<ServiceBusReceivedMessage> messages = node.peek(nextSequenceNumber, sessionId, getLinkName(sessionId), maxMessages); final Mono<ServiceBusReceivedMessage> handle = messages .switchIfEmpty(Mono.fromCallable(() -> { ServiceBusReceivedMessage emptyMessage = new ServiceBusReceivedMessage(BinaryData .fromBytes(new byte[0])); emptyMessage.setSequenceNumber(lastPeekedSequenceNumber.get()); return emptyMessage; })) .last() .handle((last, sink) -> { final long current = lastPeekedSequenceNumber .updateAndGet(value -> Math.max(value, last.getSequenceNumber())); logger.verbose("Last peeked sequence number in batch: {}", current); sink.complete(); }); return Flux.merge(messages, handle); }); } /** * Starting from the given sequence number, reads the next batch of active messages without changing the state of * the receiver or the message source. * * @param maxMessages The number of messages. * @param sequenceNumber The sequence number from where to start reading messages. * * @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked. * @throws IllegalArgumentException if {@code maxMessages} is not a positive integer. * @see <a href="https: */ public Flux<ServiceBusReceivedMessage> peekMessagesAt(int maxMessages, long sequenceNumber) { return peekMessagesAt(maxMessages, sequenceNumber, receiverOptions.getSessionId()); } /** * Starting from the given sequence number, reads the next batch of active messages without changing the state of * the receiver or the message source. * * @param maxMessages The number of messages. * @param sequenceNumber The sequence number from where to start reading messages. * @param sessionId Session id of the messages to peek from. {@code null} if there is no session. * * @return An {@link IterableStream} of {@link ServiceBusReceivedMessage} peeked. * @throws IllegalArgumentException if {@code maxMessages} is not a positive integer. * @see <a href="https: */ Flux<ServiceBusReceivedMessage> peekMessagesAt(int maxMessages, long sequenceNumber, String sessionId) { if (isDisposed.get()) { return fluxError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt"))); } return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMapMany(node -> node.peek(sequenceNumber, sessionId, getLinkName(sessionId), maxMessages)); } /** * Receives an <b>infinite</b> stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity. * This Flux continuously receives messages from a Service Bus entity until either: * * <ul> * <li>The receiver is closed.</li> * <li>The subscription to the Flux is disposed.</li> * <li>A terminal signal from a downstream subscriber is propagated upstream (ie. {@link Flux * {@link Flux * <li>An {@link AmqpException} occurs that causes the receive link to stop.</li> * </ul> * * @return An <b>infinite</b> stream of messages from the Service Bus entity. */ public Flux<ServiceBusReceivedMessage> receiveMessages() { return receiveMessagesWithContext() .handle((serviceBusMessageContext, sink) -> { if (serviceBusMessageContext.hasError()) { sink.error(serviceBusMessageContext.getThrowable()); return; } sink.next(serviceBusMessageContext.getMessage()); }); } /** * Receives an <b>infinite</b> stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity. * This Flux continuously receives messages from a Service Bus entity until either: * * <ul> * <li>The receiver is closed.</li> * <li>The subscription to the Flux is disposed.</li> * <li>A terminal signal from a downstream subscriber is propagated upstream (ie. {@link Flux * {@link Flux * <li>An {@link AmqpException} occurs that causes the receive link to stop.</li> * </ul> * * @return An <b>infinite</b> stream of messages from the Service Bus entity. */ Flux<ServiceBusMessageContext> receiveMessagesWithContext() { final Flux<ServiceBusMessageContext> messageFlux = sessionManager != null ? sessionManager.receive() : getOrCreateConsumer().receive().map(ServiceBusMessageContext::new); final Flux<ServiceBusMessageContext> withAutoLockRenewal; if (receiverOptions.isAutoLockRenewEnabled()) { withAutoLockRenewal = new FluxAutoLockRenew(messageFlux, receiverOptions.getMaxLockRenewDuration(), renewalContainer, this::renewMessageLock); } else { withAutoLockRenewal = messageFlux; } final Flux<ServiceBusMessageContext> withAutoComplete; if (receiverOptions.isEnableAutoComplete()) { withAutoComplete = new FluxAutoComplete(withAutoLockRenewal, completionLock, context -> context.getMessage() != null ? complete(context.getMessage()) : Mono.empty(), context -> context.getMessage() != null ? abandon(context.getMessage()) : Mono.empty()); } else { withAutoComplete = withAutoLockRenewal; } return withAutoComplete .onErrorMap(throwable -> mapError(throwable, ServiceBusErrorSource.RECEIVE)); } /** * Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using * sequence number. * * @param sequenceNumber The {@link ServiceBusReceivedMessage * message. * * @return A deferred message with the matching {@code sequenceNumber}. */ public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) { return receiveDeferredMessage(sequenceNumber, receiverOptions.getSessionId()); } /** * Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using * sequence number. * * @param sequenceNumber The {@link ServiceBusReceivedMessage * message. * @param sessionId Session id of the deferred message. {@code null} if there is no session. * * @return A deferred message with the matching {@code sequenceNumber}. */ Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber, String sessionId) { return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(node -> node.receiveDeferredMessages(receiverOptions.getReceiveMode(), sessionId, getLinkName(sessionId), Collections.singleton(sequenceNumber)).last()) .map(receivedMessage -> { if (CoreUtils.isNullOrEmpty(receivedMessage.getLockToken())) { return receivedMessage; } if (receiverOptions.getReceiveMode() == ReceiveMode.PEEK_LOCK) { receivedMessage.setLockedUntil(managementNodeLocks.addOrUpdate(receivedMessage.getLockToken(), receivedMessage.getLockedUntil(), receivedMessage.getLockedUntil())); } return receivedMessage; }); } /** * Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received * by using sequence number. * * @param sequenceNumbers The sequence numbers of the deferred messages. * * @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}. */ public Flux<ServiceBusReceivedMessage> receiveDeferredMessages(Iterable<Long> sequenceNumbers) { return receiveDeferredMessages(sequenceNumbers, receiverOptions.getSessionId()); } /** * Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received * by using sequence number. * * @param sequenceNumbers The sequence numbers of the deferred messages. * @param sessionId Session id of the deferred messages. {@code null} if there is no session. * * @return An {@link IterableStream} of deferred {@link ServiceBusReceivedMessage messages}. */ Flux<ServiceBusReceivedMessage> receiveDeferredMessages(Iterable<Long> sequenceNumbers, String sessionId) { if (isDisposed.get()) { return fluxError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch"))); } return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMapMany(node -> node.receiveDeferredMessages(receiverOptions.getReceiveMode(), sessionId, getLinkName(sessionId), sequenceNumbers)) .map(receivedMessage -> { if (CoreUtils.isNullOrEmpty(receivedMessage.getLockToken())) { return receivedMessage; } if (receiverOptions.getReceiveMode() == ReceiveMode.PEEK_LOCK) { receivedMessage.setLockedUntil(managementNodeLocks.addOrUpdate(receivedMessage.getLockToken(), receivedMessage.getLockedUntil(), receivedMessage.getLockedUntil())); } return receivedMessage; }); } /** * Asynchronously renews the lock on the message. The lock will be renewed based on the setting specified on the * entity. When a message is received in {@link ReceiveMode * this receiver instance for a duration as specified during the entity creation (LockDuration). If processing of * the message requires longer than this duration, the lock needs to be renewed. For each renewal, the lock is reset * to the entity's LockDuration value. * * @param message The {@link ServiceBusReceivedMessage} to perform auto-lock renewal. * * @return The new expiration time for the message. * @throws NullPointerException if {@code message} or {@code message.getLockToken()} is null. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. * @throws IllegalStateException if the receiver is a session receiver. * @throws IllegalArgumentException if {@code message.getLockToken()} is an empty value. */ public Mono<OffsetDateTime> renewMessageLock(ServiceBusReceivedMessage message) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock"))); } else if (Objects.isNull(message)) { return monoError(logger, new NullPointerException("'message' cannot be null.")); } else if (Objects.isNull(message.getLockToken())) { return monoError(logger, new NullPointerException("'message.getLockToken()' cannot be null.")); } else if (message.getLockToken().isEmpty()) { return monoError(logger, new IllegalArgumentException("'message.getLockToken()' cannot be empty.")); } else if (receiverOptions.isSessionReceiver()) { return monoError(logger, new IllegalStateException( String.format("Cannot renew message lock [%s] for a session receiver.", message.getLockToken()))); } return renewMessageLock(message.getLockToken()) .onErrorMap(throwable -> mapError(throwable, ServiceBusErrorSource.RENEW_LOCK)); } /** * Asynchronously renews the lock on the message. The lock will be renewed based on the setting specified on the * entity. * * @param lockToken to be renewed. * * @return The new expiration time for the message. */ Mono<OffsetDateTime> renewMessageLock(String lockToken) { return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(serviceBusManagementNode -> serviceBusManagementNode.renewMessageLock(lockToken, getLinkName(null))) .map(offsetDateTime -> managementNodeLocks.addOrUpdate(lockToken, offsetDateTime, offsetDateTime)); } /** * Starts the auto lock renewal for a {@link ServiceBusReceivedMessage message}. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * @param maxLockRenewalDuration Maximum duration to keep renewing the lock token. * * @return A lock renewal operation for the message. * @throws NullPointerException if {@code message}, {@code message.getLockToken()} or {@code * maxLockRenewalDuration} is null. * @throws IllegalStateException if the receiver is a session receiver or the receiver is disposed. * @throws IllegalArgumentException if {@code message.getLockToken()} is an empty value. */ public Mono<Void> renewMessageLock(ServiceBusReceivedMessage message, Duration maxLockRenewalDuration) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "getAutoRenewMessageLock"))); } else if (Objects.isNull(message)) { return monoError(logger, new NullPointerException("'message' cannot be null.")); } else if (Objects.isNull(message.getLockToken())) { return monoError(logger, new NullPointerException("'message.getLockToken()' cannot be null.")); } else if (message.getLockToken().isEmpty()) { return monoError(logger, new IllegalArgumentException("'message.getLockToken()' cannot be empty.")); } else if (receiverOptions.isSessionReceiver()) { return monoError(logger, new IllegalStateException( String.format("Cannot renew message lock [%s] for a session receiver.", message.getLockToken()))); } else if (maxLockRenewalDuration == null) { return monoError(logger, new NullPointerException("'maxLockRenewalDuration' cannot be null.")); } else if (maxLockRenewalDuration.isNegative()) { return monoError(logger, new IllegalArgumentException("'maxLockRenewalDuration' cannot be negative.")); } final LockRenewalOperation operation = new LockRenewalOperation(message.getLockToken(), maxLockRenewalDuration, false, ignored -> renewMessageLock(message)); renewalContainer.addOrUpdate(message.getLockToken(), OffsetDateTime.now().plus(maxLockRenewalDuration), operation); return operation.getCompletionOperation() .onErrorMap(throwable -> mapError(throwable, ServiceBusErrorSource.RENEW_LOCK)); } /** * Renews the session lock if this receiver is a session receiver. * * @return The next expiration time for the session lock. * @throws IllegalStateException if the receiver is a non-session receiver. */ public Mono<OffsetDateTime> renewSessionLock() { return renewSessionLock(receiverOptions.getSessionId()); } /** * Starts the auto lock renewal for the session this receiver works for. * * @param maxLockRenewalDuration Maximum duration to keep renewing the session lock. * * @return A lock renewal operation for the message. * @throws NullPointerException if {@code sessionId} or {@code maxLockRenewalDuration} is null. * @throws IllegalArgumentException if {@code sessionId} is an empty string. * @throws IllegalStateException if the receiver is a non-session receiver or the receiver is disposed. */ public Mono<Void> renewSessionLock(Duration maxLockRenewalDuration) { return this.renewSessionLock(receiverOptions.getSessionId(), maxLockRenewalDuration); } /** * Sets the state of the session this receiver works for. * * @param sessionState State to set on the session. * * @return A Mono that completes when the session is set * @throws IllegalStateException if the receiver is a non-session receiver. */ public Mono<Void> setSessionState(byte[] sessionState) { return this.setSessionState(receiverOptions.getSessionId(), sessionState); } /** * Starts a new service side transaction. The {@link ServiceBusTransactionContext} should be passed to all * operations that needs to be in this transaction. * * <p><strong>Create a transaction</strong></p> * {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.createTransaction} * * @return The {@link Mono} that finishes this operation on service bus resource. */ public Mono<ServiceBusTransactionContext> createTransaction() { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "createTransaction"))); } return connectionProcessor .flatMap(connection -> connection.createSession(TRANSACTION_LINK_NAME)) .flatMap(transactionSession -> transactionSession.createTransaction()) .map(transaction -> new ServiceBusTransactionContext(transaction.getTransactionId())); } /** * Commits the transaction given {@link ServiceBusTransactionContext}. This will make a call to Service Bus. * <p><strong>Commit a transaction</strong></p> * {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.commitTransaction} * * @param transactionContext to be committed. * * @return The {@link Mono} that finishes this operation on service bus resource. * @throws NullPointerException if {@code transactionContext} or {@code transactionContext.transactionId} is * null. */ public Mono<Void> commitTransaction(ServiceBusTransactionContext transactionContext) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "commitTransaction"))); } if (Objects.isNull(transactionContext)) { return monoError(logger, new NullPointerException("'transactionContext' cannot be null.")); } else if (Objects.isNull(transactionContext.getTransactionId())) { return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null.")); } return connectionProcessor .flatMap(connection -> connection.createSession(TRANSACTION_LINK_NAME)) .flatMap(transactionSession -> transactionSession.commitTransaction(new AmqpTransaction( transactionContext.getTransactionId()))); } /** * Rollbacks the transaction given {@link ServiceBusTransactionContext}. This will make a call to Service Bus. * <p><strong>Rollback a transaction</strong></p> * {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.rollbackTransaction} * * @param transactionContext to be rollbacked. * * @return The {@link Mono} that finishes this operation on service bus resource. * @throws NullPointerException if {@code transactionContext} or {@code transactionContext.transactionId} is * null. */ public Mono<Void> rollbackTransaction(ServiceBusTransactionContext transactionContext) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "rollbackTransaction"))); } if (Objects.isNull(transactionContext)) { return monoError(logger, new NullPointerException("'transactionContext' cannot be null.")); } else if (Objects.isNull(transactionContext.getTransactionId())) { return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null.")); } return connectionProcessor .flatMap(connection -> connection.createSession(TRANSACTION_LINK_NAME)) .flatMap(transactionSession -> transactionSession.rollbackTransaction(new AmqpTransaction( transactionContext.getTransactionId()))); } /** * Disposes of the consumer by closing the underlying connection to the service. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } try { completionLock.acquire(); } catch (InterruptedException e) { logger.info("Unable to obtain completion lock.", e); } logger.info("Removing receiver links."); final ServiceBusAsyncConsumer disposed = consumer.getAndSet(null); if (disposed != null) { disposed.close(); } if (sessionManager != null) { sessionManager.close(); } onClientClose.run(); } /** * @return receiver options set by user; */ ReceiverOptions getReceiverOptions() { return receiverOptions; } /** * Gets whether or not the management node contains the message lock token and it has not expired. Lock tokens are * held by the management node when they are received from the management node or management operations are * performed using that {@code lockToken}. * * @param lockToken Lock token to check for. * * @return {@code true} if the management node contains the lock token and false otherwise. */ private boolean isManagementToken(String lockToken) { return managementNodeLocks.containsUnexpired(lockToken); } private Mono<Void> updateDisposition(ServiceBusReceivedMessage message, DispositionStatus dispositionStatus, String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify, ServiceBusTransactionContext transactionContext) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue()))); } else if (Objects.isNull(message)) { return monoError(logger, new NullPointerException("'message' cannot be null.")); } final String lockToken = message.getLockToken(); final String sessionId = message.getSessionId(); if (receiverOptions.getReceiveMode() != ReceiveMode.PEEK_LOCK) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format( "'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus)))); } final String sessionIdToUse; if (sessionId == null && !CoreUtils.isNullOrEmpty(receiverOptions.getSessionId())) { sessionIdToUse = receiverOptions.getSessionId(); } else { sessionIdToUse = sessionId; } logger.verbose("{}: Update started. Disposition: {}. Lock: {}. SessionId: {}.", entityPath, dispositionStatus, lockToken, sessionIdToUse); final Mono<Void> performOnManagement = connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(node -> node.updateDisposition(lockToken, dispositionStatus, deadLetterReason, deadLetterErrorDescription, propertiesToModify, sessionId, getLinkName(sessionId), transactionContext)) .then(Mono.fromRunnable(() -> { logger.info("{}: Management node Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken); managementNodeLocks.remove(lockToken); renewalContainer.remove(lockToken); })); Mono<Void> updateDispositionOperation; if (sessionManager != null) { updateDispositionOperation = sessionManager.updateDisposition(lockToken, sessionId, dispositionStatus, propertiesToModify, deadLetterReason, deadLetterErrorDescription, transactionContext) .flatMap(isSuccess -> { if (isSuccess) { renewalContainer.remove(lockToken); return Mono.empty(); } logger.info("Could not perform on session manger. Performing on management node."); return performOnManagement; }); } else { final ServiceBusAsyncConsumer existingConsumer = consumer.get(); if (isManagementToken(lockToken) || existingConsumer == null) { updateDispositionOperation = performOnManagement; } else { updateDispositionOperation = existingConsumer.updateDisposition(lockToken, dispositionStatus, deadLetterReason, deadLetterErrorDescription, propertiesToModify, transactionContext) .then(Mono.fromRunnable(() -> { logger.verbose("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken); renewalContainer.remove(lockToken); })); } } return updateDispositionOperation .onErrorMap(throwable -> { if (receiverOptions.isEnableAutoComplete() && throwable instanceof AmqpException) { switch (dispositionStatus) { case COMPLETED: return new ServiceBusReceiverException((AmqpException) throwable, ServiceBusErrorSource.COMPLETE); case ABANDONED: return new ServiceBusReceiverException((AmqpException) throwable, ServiceBusErrorSource.ABANDONED); default: } } return throwable; }); } private ServiceBusAsyncConsumer getOrCreateConsumer() { final ServiceBusAsyncConsumer existing = consumer.get(); if (existing != null) { return existing; } final String linkName = StringUtil.getRandomString(entityPath); logger.info("{}: Creating consumer for link '{}'", entityPath, linkName); final Flux<ServiceBusReceiveLink> receiveLink = connectionProcessor.flatMap(connection -> { if (receiverOptions.isSessionReceiver()) { return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(), null, entityType, receiverOptions.getSessionId()); } else { return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(), null, entityType); } }) .doOnNext(next -> { final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]" + " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]"; logger.verbose(format, next.getEntityPath(), receiverOptions.getReceiveMode(), CoreUtils.isNullOrEmpty(receiverOptions.getSessionId()), "N/A", entityType); }) .repeat(); final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions()); final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith( new ServiceBusReceiveLinkProcessor(receiverOptions.getPrefetchCount(), retryPolicy, receiverOptions.getReceiveMode())); final ServiceBusAsyncConsumer newConsumer = new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer, receiverOptions); if (consumer.compareAndSet(null, newConsumer)) { return newConsumer; } else { newConsumer.close(); return consumer.get(); } } /** * If the receiver has not connected via {@link * through the management node. * * @return The name of the receive link, or null of it has not connected via a receive link. */ private String getLinkName(String sessionId) { if (sessionManager != null && !CoreUtils.isNullOrEmpty(sessionId)) { return sessionManager.getLinkName(sessionId); } else if (!CoreUtils.isNullOrEmpty(sessionId) && !receiverOptions.isSessionReceiver()) { return null; } else { final ServiceBusAsyncConsumer existing = consumer.get(); return existing != null ? existing.getLinkName() : null; } } Mono<OffsetDateTime> renewSessionLock(String sessionId) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewSessionLock"))); } else if (!receiverOptions.isSessionReceiver()) { return monoError(logger, new IllegalStateException("Cannot renew session lock on a non-session receiver.")); } final String linkName = sessionManager != null ? sessionManager.getLinkName(sessionId) : null; return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(channel -> channel.renewSessionLock(sessionId, linkName)) .onErrorMap(throwable -> mapError(throwable, ServiceBusErrorSource.RENEW_LOCK)); } Mono<Void> renewSessionLock(String sessionId, Duration maxLockRenewalDuration) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "getAutoRenewSessionLock"))); } else if (!receiverOptions.isSessionReceiver()) { return monoError(logger, new IllegalStateException( "Cannot renew session lock on a non-session receiver.")); } else if (maxLockRenewalDuration == null) { return monoError(logger, new NullPointerException("'maxLockRenewalDuration' cannot be null.")); } else if (maxLockRenewalDuration.isNegative()) { return monoError(logger, new IllegalArgumentException( "'maxLockRenewalDuration' cannot be negative.")); } else if (Objects.isNull(sessionId)) { return monoError(logger, new NullPointerException("'sessionId' cannot be null.")); } else if (sessionId.isEmpty()) { return monoError(logger, new IllegalArgumentException("'sessionId' cannot be empty.")); } final LockRenewalOperation operation = new LockRenewalOperation(sessionId, maxLockRenewalDuration, true, this::renewSessionLock); renewalContainer.addOrUpdate(sessionId, OffsetDateTime.now().plus(maxLockRenewalDuration), operation); return operation.getCompletionOperation() .onErrorMap(throwable -> mapError(throwable, ServiceBusErrorSource.RENEW_LOCK)); } Mono<Void> setSessionState(String sessionId, byte[] sessionState) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "setSessionState"))); } else if (!receiverOptions.isSessionReceiver()) { return monoError(logger, new IllegalStateException("Cannot set session state on a non-session receiver.")); } final String linkName = sessionManager != null ? sessionManager.getLinkName(sessionId) : null; return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(channel -> channel.setSessionState(sessionId, sessionState, linkName)); } Mono<byte[]> getSessionState(String sessionId) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "getSessionState"))); } else if (!receiverOptions.isSessionReceiver()) { return monoError(logger, new IllegalStateException("Cannot get session state on a non-session receiver.")); } if (sessionManager != null) { return sessionManager.getSessionState(sessionId); } else { return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(channel -> channel.getSessionState(sessionId, getLinkName(sessionId))); } } /** * Map the error to {@link ServiceBusReceiverException} */ boolean isConnectionClosed() { return this.connectionProcessor.isChannelClosed(); } }
class ServiceBusReceiverAsyncClient implements AutoCloseable { private static final DeadLetterOptions DEFAULT_DEAD_LETTER_OPTIONS = new DeadLetterOptions(); private static final String TRANSACTION_LINK_NAME = "coordinator"; private final LockContainer<LockRenewalOperation> renewalContainer; private final AtomicBoolean isDisposed = new AtomicBoolean(); private final LockContainer<OffsetDateTime> managementNodeLocks; private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClient.class); private final String fullyQualifiedNamespace; private final String entityPath; private final MessagingEntityType entityType; private final ReceiverOptions receiverOptions; private final ServiceBusConnectionProcessor connectionProcessor; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final Runnable onClientClose; private final ServiceBusSessionManager sessionManager; private final Semaphore completionLock = new Semaphore(1); private final AtomicLong lastPeekedSequenceNumber = new AtomicLong(-1); private final AtomicReference<ServiceBusAsyncConsumer> consumer = new AtomicReference<>(); /** * Creates a receiver that listens to a Service Bus resource. * * @param fullyQualifiedNamespace The fully qualified domain name for the Service Bus resource. * @param entityPath The name of the topic or queue. * @param entityType The type of the Service Bus resource. * @param receiverOptions Options when receiving messages. * @param connectionProcessor The AMQP connection to the Service Bus resource. * @param tracerProvider Tracer for telemetry. * @param messageSerializer Serializes and deserializes Service Bus messages. * @param onClientClose Operation to run when the client completes. */ ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType, ReceiverOptions receiverOptions, ServiceBusConnectionProcessor connectionProcessor, Duration cleanupInterval, TracerProvider tracerProvider, MessageSerializer messageSerializer, Runnable onClientClose) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null."); this.entityType = Objects.requireNonNull(entityType, "'entityType' cannot be null."); this.receiverOptions = Objects.requireNonNull(receiverOptions, "'receiveOptions cannot be null.'"); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.onClientClose = Objects.requireNonNull(onClientClose, "'onClientClose' cannot be null."); this.managementNodeLocks = new LockContainer<>(cleanupInterval); this.renewalContainer = new LockContainer<>(Duration.ofMinutes(2), renewal -> { logger.verbose("Closing expired renewal operation. lockToken[{}]. status[{}]. throwable[{}].", renewal.getLockToken(), renewal.getStatus(), renewal.getThrowable()); renewal.close(); }); this.sessionManager = null; } ServiceBusReceiverAsyncClient(String fullyQualifiedNamespace, String entityPath, MessagingEntityType entityType, ReceiverOptions receiverOptions, ServiceBusConnectionProcessor connectionProcessor, Duration cleanupInterval, TracerProvider tracerProvider, MessageSerializer messageSerializer, Runnable onClientClose, ServiceBusSessionManager sessionManager) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null."); this.entityType = Objects.requireNonNull(entityType, "'entityType' cannot be null."); this.receiverOptions = Objects.requireNonNull(receiverOptions, "'receiveOptions cannot be null.'"); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.onClientClose = Objects.requireNonNull(onClientClose, "'onClientClose' cannot be null."); this.sessionManager = Objects.requireNonNull(sessionManager, "'sessionManager' cannot be null."); this.managementNodeLocks = new LockContainer<>(cleanupInterval); this.renewalContainer = new LockContainer<>(Duration.ofMinutes(2), renewal -> { logger.info("Closing expired renewal operation. sessionId[{}]. status[{}]. throwable[{}]", renewal.getSessionId(), renewal.getStatus(), renewal.getThrowable()); renewal.close(); }); } /** * Gets the fully qualified Service Bus namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Service Bus namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Service Bus resource this client interacts with. * * @return The Service Bus resource this client interacts with. */ public String getEntityPath() { return entityPath; } /** * Abandon a {@link ServiceBusReceivedMessage message}. This will make the message available * again for processing. Abandoning a message will increase the delivery count on the message. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * * @return A {@link Mono} that completes when the Service Bus abandon operation completes. * @throws NullPointerException if {@code message} is null. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. */ public Mono<Void> abandon(ServiceBusReceivedMessage message) { return updateDisposition(message, DispositionStatus.ABANDONED, null, null, null, null); } /** * Abandon a {@link ServiceBusReceivedMessage message} updates the message's properties. * This will make the message available again for processing. Abandoning a message will increase the delivery count * on the message. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * @param options to abandon the message. You can specify * {@link AbandonOptions * {@code transactionContext} can be set using * {@link AbandonOptions * created first by {@link ServiceBusReceiverAsyncClient * {@link ServiceBusSenderAsyncClient * * @return A {@link Mono} that completes when the Service Bus operation finishes. * @throws NullPointerException if {@code message} or {@code options} is null. Also if * {@code transactionContext.transactionId} is null when {@code options.transactionContext} is specified. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. */ public Mono<Void> abandon(ServiceBusReceivedMessage message, AbandonOptions options) { if (Objects.isNull(options)) { return monoError(logger, new NullPointerException("'settlementOptions' cannot be null.")); } else if (!Objects.isNull(options.getTransactionContext()) && Objects.isNull(options.getTransactionContext().getTransactionId())) { return monoError(logger, new NullPointerException( "'options.transactionContext.transactionId' cannot be null.")); } return updateDisposition(message, DispositionStatus.ABANDONED, null, null, options.getPropertiesToModify(), options.getTransactionContext()); } /** * Completes a {@link ServiceBusReceivedMessage message}. This will delete the message from the service. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * * @return A {@link Mono} that finishes when the message is completed on Service Bus. * @throws NullPointerException if {@code message} is null. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. */ public Mono<Void> complete(ServiceBusReceivedMessage message) { return updateDisposition(message, DispositionStatus.COMPLETED, null, null, null, null); } /** * Completes a {@link ServiceBusReceivedMessage message}. This will delete the message from the * service. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * @param options to complete the message. The {@code transactionContext} can be set using * {@link CompleteOptions * created first by {@link ServiceBusReceiverAsyncClient * {@link ServiceBusSenderAsyncClient * * @return A {@link Mono} that finishes when the message is completed on Service Bus. * @throws NullPointerException if {@code message} or {@code options} is null. Also if * {@code transactionContext.transactionId} is null when {@code options.transactionContext} is specified. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. */ public Mono<Void> complete(ServiceBusReceivedMessage message, CompleteOptions options) { if (Objects.isNull(options)) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } else if (!Objects.isNull(options.getTransactionContext()) && Objects.isNull(options.getTransactionContext().getTransactionId())) { return monoError(logger, new NullPointerException( "'options.transactionContext.transactionId' cannot be null.")); } return updateDisposition(message, DispositionStatus.COMPLETED, null, null, null, options.getTransactionContext()); } /** * Defers a {@link ServiceBusReceivedMessage message}. This will move message into the deferred subqueue. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * * @return A {@link Mono} that completes when the Service Bus defer operation finishes. * @throws NullPointerException if {@code message} is null. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * @see <a href="https: */ public Mono<Void> defer(ServiceBusReceivedMessage message) { return updateDisposition(message, DispositionStatus.DEFERRED, null, null, null, null); } /** * Defers a {@link ServiceBusReceivedMessage message} with modified message property. This will move message into * the deferred subqueue. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * @param options to defer the message. You can specify {@link DeferOptions * to modify on the Message. The {@code transactionContext} can be set using * {@link DeferOptions * created first by {@link ServiceBusReceiverAsyncClient * {@link ServiceBusSenderAsyncClient * * @return A {@link Mono} that completes when the defer operation finishes. * @throws NullPointerException if {@code message} or {@code options} is null. Also if * {@code transactionContext.transactionId} is null when {@code options.transactionContext} is specified. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. * @see <a href="https: */ public Mono<Void> defer(ServiceBusReceivedMessage message, DeferOptions options) { if (Objects.isNull(options)) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } else if (!Objects.isNull(options.getTransactionContext()) && Objects.isNull(options.getTransactionContext().getTransactionId())) { return monoError(logger, new NullPointerException( "'options.transactionContext.transactionId' cannot be null.")); } return updateDisposition(message, DispositionStatus.DEFERRED, null, null, options.getPropertiesToModify(), options.getTransactionContext()); } /** * Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * * @return A {@link Mono} that completes when the dead letter operation finishes. * @throws NullPointerException if {@code message} is null. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. * @see <a href="https: * queues</a> */ public Mono<Void> deadLetter(ServiceBusReceivedMessage message) { return deadLetter(message, DEFAULT_DEAD_LETTER_OPTIONS); } /** * Moves a {@link ServiceBusReceivedMessage message} to the deadletter sub-queue. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * @param options to deadLetter the message. You can specify * {@link DeadLetterOptions * {@code transactionContext} can be set using * {@link DeadLetterOptions * created first by {@link ServiceBusReceiverAsyncClient * {@link ServiceBusSenderAsyncClient * @return A {@link Mono} that completes when the dead letter operation finishes. * @throws NullPointerException if {@code message} or {@code options} is null. Also if * {@code transactionContext.transactionId} is null when {@code options.transactionContext} is specified. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. * @see <a href="https: * queues</a> */ public Mono<Void> deadLetter(ServiceBusReceivedMessage message, DeadLetterOptions options) { if (Objects.isNull(options)) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } else if (!Objects.isNull(options.getTransactionContext()) && Objects.isNull(options.getTransactionContext().getTransactionId())) { return monoError(logger, new NullPointerException( "'options.transactionContext.transactionId' cannot be null.")); } return updateDisposition(message, DispositionStatus.SUSPENDED, options.getDeadLetterReason(), options.getDeadLetterErrorDescription(), options.getPropertiesToModify(), options.getTransactionContext()); } /** * Gets the state of the session if this receiver is a session receiver. * * @return The session state or an empty Mono if there is no state set for the session. * @throws IllegalStateException if the receiver is a non-session receiver. */ public Mono<byte[]> getSessionState() { return getSessionState(receiverOptions.getSessionId()); } /** * Reads the next active message without changing the state of the receiver or the message source. The first call to * {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent * message in the entity. * * @return A peeked {@link ServiceBusReceivedMessage}. * @see <a href="https: */ public Mono<ServiceBusReceivedMessage> peekMessage() { return peekMessage(receiverOptions.getSessionId()); } /** * Reads the next active message without changing the state of the receiver or the message source. The first call to * {@code peek()} fetches the first active message for this receiver. Each subsequent call fetches the subsequent * message in the entity. * * @param sessionId Session id of the message to peek from. {@code null} if there is no session. * * @return A peeked {@link ServiceBusReceivedMessage}. * @throws IllegalStateException if the receiver is disposed. * @see <a href="https: */ Mono<ServiceBusReceivedMessage> peekMessage(String sessionId) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peek"))); } return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(channel -> { final long sequence = lastPeekedSequenceNumber.get() + 1; logger.verbose("Peek message from sequence number: {}", sequence); return channel.peek(sequence, sessionId, getLinkName(sessionId)); }) .handle((message, sink) -> { final long current = lastPeekedSequenceNumber .updateAndGet(value -> Math.max(value, message.getSequenceNumber())); logger.verbose("Updating last peeked sequence number: {}", current); sink.next(message); }); } /** * Starting from the given sequence number, reads next the active message without changing the state of the receiver * or the message source. * * @param sequenceNumber The sequence number from where to read the message. * * @return A peeked {@link ServiceBusReceivedMessage}. * @see <a href="https: */ public Mono<ServiceBusReceivedMessage> peekMessageAt(long sequenceNumber) { return peekMessageAt(sequenceNumber, receiverOptions.getSessionId()); } /** * Starting from the given sequence number, reads next the active message without changing the state of the receiver * or the message source. * * @param sequenceNumber The sequence number from where to read the message. * @param sessionId Session id of the message to peek from. {@code null} if there is no session. * * @return A peeked {@link ServiceBusReceivedMessage}. * @see <a href="https: */ Mono<ServiceBusReceivedMessage> peekMessageAt(long sequenceNumber, String sessionId) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekAt"))); } return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(node -> node.peek(sequenceNumber, sessionId, getLinkName(sessionId))); } /** * Reads the next batch of active messages without changing the state of the receiver or the message source. * * @param maxMessages The number of messages. * * @return A {@link Flux} of {@link ServiceBusReceivedMessage messages} that are peeked. * @throws IllegalArgumentException if {@code maxMessages} is not a positive integer. * @see <a href="https: */ public Flux<ServiceBusReceivedMessage> peekMessages(int maxMessages) { return peekMessages(maxMessages, receiverOptions.getSessionId()); } /** * Reads the next batch of active messages without changing the state of the receiver or the message source. * * @param maxMessages The number of messages. * @param sessionId Session id of the messages to peek from. {@code null} if there is no session. * * @return An {@link IterableStream} of {@link ServiceBusReceivedMessage messages} that are peeked. * @throws IllegalArgumentException if {@code maxMessages} is not a positive integer. * @see <a href="https: */ Flux<ServiceBusReceivedMessage> peekMessages(int maxMessages, String sessionId) { if (isDisposed.get()) { return fluxError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatch"))); } return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMapMany(node -> { final long nextSequenceNumber = lastPeekedSequenceNumber.get() + 1; logger.verbose("Peek batch from sequence number: {}", nextSequenceNumber); final Flux<ServiceBusReceivedMessage> messages = node.peek(nextSequenceNumber, sessionId, getLinkName(sessionId), maxMessages); final Mono<ServiceBusReceivedMessage> handle = messages .switchIfEmpty(Mono.fromCallable(() -> { ServiceBusReceivedMessage emptyMessage = new ServiceBusReceivedMessage(BinaryData .fromBytes(new byte[0])); emptyMessage.setSequenceNumber(lastPeekedSequenceNumber.get()); return emptyMessage; })) .last() .handle((last, sink) -> { final long current = lastPeekedSequenceNumber .updateAndGet(value -> Math.max(value, last.getSequenceNumber())); logger.verbose("Last peeked sequence number in batch: {}", current); sink.complete(); }); return Flux.merge(messages, handle); }); } /** * Starting from the given sequence number, reads the next batch of active messages without changing the state of * the receiver or the message source. * * @param maxMessages The number of messages. * @param sequenceNumber The sequence number from where to start reading messages. * * @return A {@link Flux} of {@link ServiceBusReceivedMessage} peeked. * @throws IllegalArgumentException if {@code maxMessages} is not a positive integer. * @see <a href="https: */ public Flux<ServiceBusReceivedMessage> peekMessagesAt(int maxMessages, long sequenceNumber) { return peekMessagesAt(maxMessages, sequenceNumber, receiverOptions.getSessionId()); } /** * Starting from the given sequence number, reads the next batch of active messages without changing the state of * the receiver or the message source. * * @param maxMessages The number of messages. * @param sequenceNumber The sequence number from where to start reading messages. * @param sessionId Session id of the messages to peek from. {@code null} if there is no session. * * @return An {@link IterableStream} of {@link ServiceBusReceivedMessage} peeked. * @throws IllegalArgumentException if {@code maxMessages} is not a positive integer. * @see <a href="https: */ Flux<ServiceBusReceivedMessage> peekMessagesAt(int maxMessages, long sequenceNumber, String sessionId) { if (isDisposed.get()) { return fluxError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "peekBatchAt"))); } return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMapMany(node -> node.peek(sequenceNumber, sessionId, getLinkName(sessionId), maxMessages)); } /** * Receives an <b>infinite</b> stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity. * This Flux continuously receives messages from a Service Bus entity until either: * * <ul> * <li>The receiver is closed.</li> * <li>The subscription to the Flux is disposed.</li> * <li>A terminal signal from a downstream subscriber is propagated upstream (ie. {@link Flux * {@link Flux * <li>An {@link AmqpException} occurs that causes the receive link to stop.</li> * </ul> * * @return An <b>infinite</b> stream of messages from the Service Bus entity. */ public Flux<ServiceBusReceivedMessage> receiveMessages() { return receiveMessagesWithContext() .handle((serviceBusMessageContext, sink) -> { if (serviceBusMessageContext.hasError()) { sink.error(serviceBusMessageContext.getThrowable()); return; } sink.next(serviceBusMessageContext.getMessage()); }); } /** * Receives an <b>infinite</b> stream of {@link ServiceBusReceivedMessage messages} from the Service Bus entity. * This Flux continuously receives messages from a Service Bus entity until either: * * <ul> * <li>The receiver is closed.</li> * <li>The subscription to the Flux is disposed.</li> * <li>A terminal signal from a downstream subscriber is propagated upstream (ie. {@link Flux * {@link Flux * <li>An {@link AmqpException} occurs that causes the receive link to stop.</li> * </ul> * * @return An <b>infinite</b> stream of messages from the Service Bus entity. */ Flux<ServiceBusMessageContext> receiveMessagesWithContext() { final Flux<ServiceBusMessageContext> messageFlux = sessionManager != null ? sessionManager.receive() : getOrCreateConsumer().receive().map(ServiceBusMessageContext::new); final Flux<ServiceBusMessageContext> withAutoLockRenewal; if (receiverOptions.isAutoLockRenewEnabled()) { withAutoLockRenewal = new FluxAutoLockRenew(messageFlux, receiverOptions.getMaxLockRenewDuration(), renewalContainer, this::renewMessageLock); } else { withAutoLockRenewal = messageFlux; } final Flux<ServiceBusMessageContext> withAutoComplete; if (receiverOptions.isEnableAutoComplete()) { withAutoComplete = new FluxAutoComplete(withAutoLockRenewal, completionLock, context -> context.getMessage() != null ? complete(context.getMessage()) : Mono.empty(), context -> context.getMessage() != null ? abandon(context.getMessage()) : Mono.empty()); } else { withAutoComplete = withAutoLockRenewal; } return withAutoComplete .onErrorMap(throwable -> mapError(throwable, ServiceBusErrorSource.RECEIVE)); } /** * Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using * sequence number. * * @param sequenceNumber The {@link ServiceBusReceivedMessage * message. * * @return A deferred message with the matching {@code sequenceNumber}. */ public Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber) { return receiveDeferredMessage(sequenceNumber, receiverOptions.getSessionId()); } /** * Receives a deferred {@link ServiceBusReceivedMessage message}. Deferred messages can only be received by using * sequence number. * * @param sequenceNumber The {@link ServiceBusReceivedMessage * message. * @param sessionId Session id of the deferred message. {@code null} if there is no session. * * @return A deferred message with the matching {@code sequenceNumber}. */ Mono<ServiceBusReceivedMessage> receiveDeferredMessage(long sequenceNumber, String sessionId) { return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(node -> node.receiveDeferredMessages(receiverOptions.getReceiveMode(), sessionId, getLinkName(sessionId), Collections.singleton(sequenceNumber)).last()) .map(receivedMessage -> { if (CoreUtils.isNullOrEmpty(receivedMessage.getLockToken())) { return receivedMessage; } if (receiverOptions.getReceiveMode() == ReceiveMode.PEEK_LOCK) { receivedMessage.setLockedUntil(managementNodeLocks.addOrUpdate(receivedMessage.getLockToken(), receivedMessage.getLockedUntil(), receivedMessage.getLockedUntil())); } return receivedMessage; }); } /** * Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received * by using sequence number. * * @param sequenceNumbers The sequence numbers of the deferred messages. * * @return A {@link Flux} of deferred {@link ServiceBusReceivedMessage messages}. */ public Flux<ServiceBusReceivedMessage> receiveDeferredMessages(Iterable<Long> sequenceNumbers) { return receiveDeferredMessages(sequenceNumbers, receiverOptions.getSessionId()); } /** * Receives a batch of deferred {@link ServiceBusReceivedMessage messages}. Deferred messages can only be received * by using sequence number. * * @param sequenceNumbers The sequence numbers of the deferred messages. * @param sessionId Session id of the deferred messages. {@code null} if there is no session. * * @return An {@link IterableStream} of deferred {@link ServiceBusReceivedMessage messages}. */ Flux<ServiceBusReceivedMessage> receiveDeferredMessages(Iterable<Long> sequenceNumbers, String sessionId) { if (isDisposed.get()) { return fluxError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "receiveDeferredMessageBatch"))); } return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMapMany(node -> node.receiveDeferredMessages(receiverOptions.getReceiveMode(), sessionId, getLinkName(sessionId), sequenceNumbers)) .map(receivedMessage -> { if (CoreUtils.isNullOrEmpty(receivedMessage.getLockToken())) { return receivedMessage; } if (receiverOptions.getReceiveMode() == ReceiveMode.PEEK_LOCK) { receivedMessage.setLockedUntil(managementNodeLocks.addOrUpdate(receivedMessage.getLockToken(), receivedMessage.getLockedUntil(), receivedMessage.getLockedUntil())); } return receivedMessage; }); } /** * Asynchronously renews the lock on the message. The lock will be renewed based on the setting specified on the * entity. When a message is received in {@link ReceiveMode * this receiver instance for a duration as specified during the entity creation (LockDuration). If processing of * the message requires longer than this duration, the lock needs to be renewed. For each renewal, the lock is reset * to the entity's LockDuration value. * * @param message The {@link ServiceBusReceivedMessage} to perform auto-lock renewal. * * @return The new expiration time for the message. * @throws NullPointerException if {@code message} or {@code message.getLockToken()} is null. * @throws UnsupportedOperationException if the receiver was opened in {@link ReceiveMode * mode. * @throws IllegalStateException if the receiver is a session receiver. * @throws IllegalArgumentException if {@code message.getLockToken()} is an empty value. */ public Mono<OffsetDateTime> renewMessageLock(ServiceBusReceivedMessage message) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewMessageLock"))); } else if (Objects.isNull(message)) { return monoError(logger, new NullPointerException("'message' cannot be null.")); } else if (Objects.isNull(message.getLockToken())) { return monoError(logger, new NullPointerException("'message.getLockToken()' cannot be null.")); } else if (message.getLockToken().isEmpty()) { return monoError(logger, new IllegalArgumentException("'message.getLockToken()' cannot be empty.")); } else if (receiverOptions.isSessionReceiver()) { return monoError(logger, new IllegalStateException( String.format("Cannot renew message lock [%s] for a session receiver.", message.getLockToken()))); } return renewMessageLock(message.getLockToken()) .onErrorMap(throwable -> mapError(throwable, ServiceBusErrorSource.RENEW_LOCK)); } /** * Asynchronously renews the lock on the message. The lock will be renewed based on the setting specified on the * entity. * * @param lockToken to be renewed. * * @return The new expiration time for the message. */ Mono<OffsetDateTime> renewMessageLock(String lockToken) { return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(serviceBusManagementNode -> serviceBusManagementNode.renewMessageLock(lockToken, getLinkName(null))) .map(offsetDateTime -> managementNodeLocks.addOrUpdate(lockToken, offsetDateTime, offsetDateTime)); } /** * Starts the auto lock renewal for a {@link ServiceBusReceivedMessage message}. * * @param message The {@link ServiceBusReceivedMessage} to perform this operation. * @param maxLockRenewalDuration Maximum duration to keep renewing the lock token. * * @return A lock renewal operation for the message. * @throws NullPointerException if {@code message}, {@code message.getLockToken()} or {@code * maxLockRenewalDuration} is null. * @throws IllegalStateException if the receiver is a session receiver or the receiver is disposed. * @throws IllegalArgumentException if {@code message.getLockToken()} is an empty value. */ public Mono<Void> renewMessageLock(ServiceBusReceivedMessage message, Duration maxLockRenewalDuration) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "getAutoRenewMessageLock"))); } else if (Objects.isNull(message)) { return monoError(logger, new NullPointerException("'message' cannot be null.")); } else if (Objects.isNull(message.getLockToken())) { return monoError(logger, new NullPointerException("'message.getLockToken()' cannot be null.")); } else if (message.getLockToken().isEmpty()) { return monoError(logger, new IllegalArgumentException("'message.getLockToken()' cannot be empty.")); } else if (receiverOptions.isSessionReceiver()) { return monoError(logger, new IllegalStateException( String.format("Cannot renew message lock [%s] for a session receiver.", message.getLockToken()))); } else if (maxLockRenewalDuration == null) { return monoError(logger, new NullPointerException("'maxLockRenewalDuration' cannot be null.")); } else if (maxLockRenewalDuration.isNegative()) { return monoError(logger, new IllegalArgumentException("'maxLockRenewalDuration' cannot be negative.")); } final LockRenewalOperation operation = new LockRenewalOperation(message.getLockToken(), maxLockRenewalDuration, false, ignored -> renewMessageLock(message)); renewalContainer.addOrUpdate(message.getLockToken(), OffsetDateTime.now().plus(maxLockRenewalDuration), operation); return operation.getCompletionOperation() .onErrorMap(throwable -> mapError(throwable, ServiceBusErrorSource.RENEW_LOCK)); } /** * Renews the session lock if this receiver is a session receiver. * * @return The next expiration time for the session lock. * @throws IllegalStateException if the receiver is a non-session receiver. */ public Mono<OffsetDateTime> renewSessionLock() { return renewSessionLock(receiverOptions.getSessionId()); } /** * Starts the auto lock renewal for the session this receiver works for. * * @param maxLockRenewalDuration Maximum duration to keep renewing the session lock. * * @return A lock renewal operation for the message. * @throws NullPointerException if {@code sessionId} or {@code maxLockRenewalDuration} is null. * @throws IllegalArgumentException if {@code sessionId} is an empty string. * @throws IllegalStateException if the receiver is a non-session receiver or the receiver is disposed. */ public Mono<Void> renewSessionLock(Duration maxLockRenewalDuration) { return this.renewSessionLock(receiverOptions.getSessionId(), maxLockRenewalDuration); } /** * Sets the state of the session this receiver works for. * * @param sessionState State to set on the session. * * @return A Mono that completes when the session is set * @throws IllegalStateException if the receiver is a non-session receiver. */ public Mono<Void> setSessionState(byte[] sessionState) { return this.setSessionState(receiverOptions.getSessionId(), sessionState); } /** * Starts a new service side transaction. The {@link ServiceBusTransactionContext} should be passed to all * operations that needs to be in this transaction. * * <p><strong>Create a transaction</strong></p> * {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.createTransaction} * * @return The {@link Mono} that finishes this operation on service bus resource. */ public Mono<ServiceBusTransactionContext> createTransaction() { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "createTransaction"))); } return connectionProcessor .flatMap(connection -> connection.createSession(TRANSACTION_LINK_NAME)) .flatMap(transactionSession -> transactionSession.createTransaction()) .map(transaction -> new ServiceBusTransactionContext(transaction.getTransactionId())); } /** * Commits the transaction given {@link ServiceBusTransactionContext}. This will make a call to Service Bus. * <p><strong>Commit a transaction</strong></p> * {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.commitTransaction} * * @param transactionContext to be committed. * * @return The {@link Mono} that finishes this operation on service bus resource. * @throws NullPointerException if {@code transactionContext} or {@code transactionContext.transactionId} is * null. */ public Mono<Void> commitTransaction(ServiceBusTransactionContext transactionContext) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "commitTransaction"))); } if (Objects.isNull(transactionContext)) { return monoError(logger, new NullPointerException("'transactionContext' cannot be null.")); } else if (Objects.isNull(transactionContext.getTransactionId())) { return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null.")); } return connectionProcessor .flatMap(connection -> connection.createSession(TRANSACTION_LINK_NAME)) .flatMap(transactionSession -> transactionSession.commitTransaction(new AmqpTransaction( transactionContext.getTransactionId()))); } /** * Rollbacks the transaction given {@link ServiceBusTransactionContext}. This will make a call to Service Bus. * <p><strong>Rollback a transaction</strong></p> * {@codesnippet com.azure.messaging.servicebus.servicebusasyncreceiverclient.rollbackTransaction} * * @param transactionContext to be rollbacked. * * @return The {@link Mono} that finishes this operation on service bus resource. * @throws NullPointerException if {@code transactionContext} or {@code transactionContext.transactionId} is * null. */ public Mono<Void> rollbackTransaction(ServiceBusTransactionContext transactionContext) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "rollbackTransaction"))); } if (Objects.isNull(transactionContext)) { return monoError(logger, new NullPointerException("'transactionContext' cannot be null.")); } else if (Objects.isNull(transactionContext.getTransactionId())) { return monoError(logger, new NullPointerException("'transactionContext.transactionId' cannot be null.")); } return connectionProcessor .flatMap(connection -> connection.createSession(TRANSACTION_LINK_NAME)) .flatMap(transactionSession -> transactionSession.rollbackTransaction(new AmqpTransaction( transactionContext.getTransactionId()))); } /** * Disposes of the consumer by closing the underlying connection to the service. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } try { completionLock.acquire(); } catch (InterruptedException e) { logger.info("Unable to obtain completion lock.", e); } logger.info("Removing receiver links."); final ServiceBusAsyncConsumer disposed = consumer.getAndSet(null); if (disposed != null) { disposed.close(); } if (sessionManager != null) { sessionManager.close(); } onClientClose.run(); } /** * @return receiver options set by user; */ ReceiverOptions getReceiverOptions() { return receiverOptions; } /** * Gets whether or not the management node contains the message lock token and it has not expired. Lock tokens are * held by the management node when they are received from the management node or management operations are * performed using that {@code lockToken}. * * @param lockToken Lock token to check for. * * @return {@code true} if the management node contains the lock token and false otherwise. */ private boolean isManagementToken(String lockToken) { return managementNodeLocks.containsUnexpired(lockToken); } private Mono<Void> updateDisposition(ServiceBusReceivedMessage message, DispositionStatus dispositionStatus, String deadLetterReason, String deadLetterErrorDescription, Map<String, Object> propertiesToModify, ServiceBusTransactionContext transactionContext) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, dispositionStatus.getValue()))); } else if (Objects.isNull(message)) { return monoError(logger, new NullPointerException("'message' cannot be null.")); } final String lockToken = message.getLockToken(); final String sessionId = message.getSessionId(); if (receiverOptions.getReceiveMode() != ReceiveMode.PEEK_LOCK) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format( "'%s' is not supported on a receiver opened in ReceiveMode.RECEIVE_AND_DELETE.", dispositionStatus)))); } final String sessionIdToUse; if (sessionId == null && !CoreUtils.isNullOrEmpty(receiverOptions.getSessionId())) { sessionIdToUse = receiverOptions.getSessionId(); } else { sessionIdToUse = sessionId; } logger.verbose("{}: Update started. Disposition: {}. Lock: {}. SessionId: {}.", entityPath, dispositionStatus, lockToken, sessionIdToUse); final Mono<Void> performOnManagement = connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(node -> node.updateDisposition(lockToken, dispositionStatus, deadLetterReason, deadLetterErrorDescription, propertiesToModify, sessionId, getLinkName(sessionId), transactionContext)) .then(Mono.fromRunnable(() -> { logger.info("{}: Management node Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken); managementNodeLocks.remove(lockToken); renewalContainer.remove(lockToken); })); Mono<Void> updateDispositionOperation; if (sessionManager != null) { updateDispositionOperation = sessionManager.updateDisposition(lockToken, sessionId, dispositionStatus, propertiesToModify, deadLetterReason, deadLetterErrorDescription, transactionContext) .flatMap(isSuccess -> { if (isSuccess) { renewalContainer.remove(lockToken); return Mono.empty(); } logger.info("Could not perform on session manger. Performing on management node."); return performOnManagement; }); } else { final ServiceBusAsyncConsumer existingConsumer = consumer.get(); if (isManagementToken(lockToken) || existingConsumer == null) { updateDispositionOperation = performOnManagement; } else { updateDispositionOperation = existingConsumer.updateDisposition(lockToken, dispositionStatus, deadLetterReason, deadLetterErrorDescription, propertiesToModify, transactionContext) .then(Mono.fromRunnable(() -> { logger.verbose("{}: Update completed. Disposition: {}. Lock: {}.", entityPath, dispositionStatus, lockToken); renewalContainer.remove(lockToken); })); } } return updateDispositionOperation .onErrorMap(throwable -> { if (throwable instanceof ServiceBusReceiverException) { return throwable; } switch (dispositionStatus) { case COMPLETED: return new ServiceBusReceiverException(throwable, ServiceBusErrorSource.COMPLETE); case ABANDONED: return new ServiceBusReceiverException(throwable, ServiceBusErrorSource.ABANDONED); default: return new ServiceBusReceiverException(throwable, ServiceBusErrorSource.UNKNOWN); } }); } private ServiceBusAsyncConsumer getOrCreateConsumer() { final ServiceBusAsyncConsumer existing = consumer.get(); if (existing != null) { return existing; } final String linkName = StringUtil.getRandomString(entityPath); logger.info("{}: Creating consumer for link '{}'", entityPath, linkName); final Flux<ServiceBusReceiveLink> receiveLink = connectionProcessor.flatMap(connection -> { if (receiverOptions.isSessionReceiver()) { return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(), null, entityType, receiverOptions.getSessionId()); } else { return connection.createReceiveLink(linkName, entityPath, receiverOptions.getReceiveMode(), null, entityType); } }) .doOnNext(next -> { final String format = "Created consumer for Service Bus resource: [{}] mode: [{}]" + " sessionEnabled? {} transferEntityPath: [{}], entityType: [{}]"; logger.verbose(format, next.getEntityPath(), receiverOptions.getReceiveMode(), CoreUtils.isNullOrEmpty(receiverOptions.getSessionId()), "N/A", entityType); }) .repeat(); final AmqpRetryPolicy retryPolicy = RetryUtil.getRetryPolicy(connectionProcessor.getRetryOptions()); final ServiceBusReceiveLinkProcessor linkMessageProcessor = receiveLink.subscribeWith( new ServiceBusReceiveLinkProcessor(receiverOptions.getPrefetchCount(), retryPolicy, receiverOptions.getReceiveMode())); final ServiceBusAsyncConsumer newConsumer = new ServiceBusAsyncConsumer(linkName, linkMessageProcessor, messageSerializer, receiverOptions); if (consumer.compareAndSet(null, newConsumer)) { return newConsumer; } else { newConsumer.close(); return consumer.get(); } } /** * If the receiver has not connected via {@link * through the management node. * * @return The name of the receive link, or null of it has not connected via a receive link. */ private String getLinkName(String sessionId) { if (sessionManager != null && !CoreUtils.isNullOrEmpty(sessionId)) { return sessionManager.getLinkName(sessionId); } else if (!CoreUtils.isNullOrEmpty(sessionId) && !receiverOptions.isSessionReceiver()) { return null; } else { final ServiceBusAsyncConsumer existing = consumer.get(); return existing != null ? existing.getLinkName() : null; } } Mono<OffsetDateTime> renewSessionLock(String sessionId) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "renewSessionLock"))); } else if (!receiverOptions.isSessionReceiver()) { return monoError(logger, new IllegalStateException("Cannot renew session lock on a non-session receiver.")); } final String linkName = sessionManager != null ? sessionManager.getLinkName(sessionId) : null; return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(channel -> channel.renewSessionLock(sessionId, linkName)) .onErrorMap(throwable -> mapError(throwable, ServiceBusErrorSource.RENEW_LOCK)); } Mono<Void> renewSessionLock(String sessionId, Duration maxLockRenewalDuration) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "getAutoRenewSessionLock"))); } else if (!receiverOptions.isSessionReceiver()) { return monoError(logger, new IllegalStateException( "Cannot renew session lock on a non-session receiver.")); } else if (maxLockRenewalDuration == null) { return monoError(logger, new NullPointerException("'maxLockRenewalDuration' cannot be null.")); } else if (maxLockRenewalDuration.isNegative()) { return monoError(logger, new IllegalArgumentException( "'maxLockRenewalDuration' cannot be negative.")); } else if (Objects.isNull(sessionId)) { return monoError(logger, new NullPointerException("'sessionId' cannot be null.")); } else if (sessionId.isEmpty()) { return monoError(logger, new IllegalArgumentException("'sessionId' cannot be empty.")); } final LockRenewalOperation operation = new LockRenewalOperation(sessionId, maxLockRenewalDuration, true, this::renewSessionLock); renewalContainer.addOrUpdate(sessionId, OffsetDateTime.now().plus(maxLockRenewalDuration), operation); return operation.getCompletionOperation() .onErrorMap(throwable -> mapError(throwable, ServiceBusErrorSource.RENEW_LOCK)); } Mono<Void> setSessionState(String sessionId, byte[] sessionState) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "setSessionState"))); } else if (!receiverOptions.isSessionReceiver()) { return monoError(logger, new IllegalStateException("Cannot set session state on a non-session receiver.")); } final String linkName = sessionManager != null ? sessionManager.getLinkName(sessionId) : null; return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(channel -> channel.setSessionState(sessionId, sessionState, linkName)); } Mono<byte[]> getSessionState(String sessionId) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException( String.format(INVALID_OPERATION_DISPOSED_RECEIVER, "getSessionState"))); } else if (!receiverOptions.isSessionReceiver()) { return monoError(logger, new IllegalStateException("Cannot get session state on a non-session receiver.")); } if (sessionManager != null) { return sessionManager.getSessionState(sessionId); } else { return connectionProcessor .flatMap(connection -> connection.getManagementNode(entityPath, entityType)) .flatMap(channel -> channel.getSessionState(sessionId, getLinkName(sessionId))); } } /** * Map the error to {@link ServiceBusReceiverException} */ boolean isConnectionClosed() { return this.connectionProcessor.isChannelClosed(); } }
Instead of having `flux` and `iterable` fields in this class, if an instance of IterableStream is created using an iterable, can this just be converted to `this.flux = Flux.fromIterable(Objects.requireNonNull(iterable, "'iterable' cannot be null."));`. Simplifies code in other methods too where you don't have to check if flux is null or iterable is null.
public IterableStream(Iterable<T> iterable) { this.iterable = Objects.requireNonNull(iterable, "'iterable' cannot be null."); this.flux = null; }
this.iterable = Objects.requireNonNull(iterable, "'iterable' cannot be null.");
public IterableStream(Iterable<T> iterable) { this.iterable = Objects.requireNonNull(iterable, "'iterable' cannot be null."); this.flux = null; }
class IterableStream<T> implements Iterable<T> { private final ClientLogger logger = new ClientLogger(IterableStream.class); private final Flux<T> flux; private final Iterable<T> iterable; /** * Creates an instance with the given {@link Flux}. * * @param flux Flux of items to iterate over. * @throws NullPointerException if {@code flux} is {@code null}. */ public IterableStream(Flux<T> flux) { this.flux = Objects.requireNonNull(flux, "'flux' cannot be null."); this.iterable = null; } /** * Creates an instance with the given {@link Iterable}. * * @param iterable Collection of items to iterate over. * @throws NullPointerException if {@code iterable} is {@code null}. */ /** * Utility function to provide {@link Stream} of value {@code T}. * It will provide the same stream of {@code T} values if called multiple times. * * @return {@link Stream} of value {@code T}. */ public Stream<T> stream() { if (flux != null) { return flux.toStream(); } else if (iterable != null) { return StreamSupport.stream(iterable.spliterator(), false); } else { logger.warning("IterableStream was not initialized with Iterable or Flux, returning empty stream."); return Stream.empty(); } } /** * Utility function to provide {@link Iterator} of value {@code T}. * It will provide same collection of {@code T} values if called multiple times. * * @return {@link Iterator} of value {@code T}. */ @Override public Iterator<T> iterator() { if (flux != null) { return flux.toIterable().iterator(); } else if (iterable != null) { return iterable.iterator(); } else { logger.warning("IterableStream was not initialized with Iterable or Flux, returning empty iterator."); return Collections.emptyIterator(); } } }
class IterableStream<T> implements Iterable<T> { private final ClientLogger logger = new ClientLogger(IterableStream.class); private final Flux<T> flux; private final Iterable<T> iterable; /** * Creates an instance with the given {@link Flux}. * * @param flux Flux of items to iterate over. * @throws NullPointerException if {@code flux} is {@code null}. */ public IterableStream(Flux<T> flux) { this.flux = Objects.requireNonNull(flux, "'flux' cannot be null."); this.iterable = null; } /** * Creates an instance with the given {@link Iterable}. * * @param iterable Collection of items to iterate over. * @throws NullPointerException if {@code iterable} is {@code null}. */ /** * Utility function to provide {@link Stream} of value {@code T}. * It will provide the same stream of {@code T} values if called multiple times. * * @return {@link Stream} of value {@code T}. */ public Stream<T> stream() { if (flux != null) { return flux.toStream(); } else if (iterable != null) { return StreamSupport.stream(iterable.spliterator(), false); } else { logger.warning("IterableStream was not initialized with Iterable or Flux, returning empty stream."); return Stream.empty(); } } /** * Utility function to provide {@link Iterator} of value {@code T}. * It will provide same collection of {@code T} values if called multiple times. * * @return {@link Iterator} of value {@code T}. */ @Override public Iterator<T> iterator() { if (flux != null) { return flux.toIterable().iterator(); } else if (iterable != null) { return iterable.iterator(); } else { logger.warning("IterableStream was not initialized with Iterable or Flux, returning empty iterator."); return Collections.emptyIterator(); } } }
Thanks. Method `isShadow()` in the algorithm is used to determine whether the sql contains shadow coloring. Shadow tables and shadow algorithms are separately configured and referenced by ID. If the user customizes the algorithm, he needs to get the configured shadow tables to customize his own judgment algorithm. So the beginning of `isContainTable` is not used.
public boolean isShadow(final Collection<String> shadowTableNames, final PreciseColumnShadowValue<Comparable<?>> shadowValue) { boolean containTable = shadowTableNames.contains(shadowValue.getLogicTableName()); boolean isSameOperation = shadowOperationType == shadowValue.getShadowOperationType(); boolean isSameColumnName = Objects.equals(props.get(COLUMN), shadowValue.getColumnName()); boolean isRegexMatch = String.valueOf(shadowValue.getValue()).matches(props.get(REGEX).toString()); return containTable && isSameOperation && isSameColumnName && isRegexMatch; }
boolean containTable = shadowTableNames.contains(shadowValue.getLogicTableName());
public boolean isShadow(final Collection<String> shadowTableNames, final PreciseColumnShadowValue<Comparable<?>> shadowValue) { boolean containTable = shadowTableNames.contains(shadowValue.getLogicTableName()); boolean isSameOperation = shadowOperationType == shadowValue.getShadowOperationType(); boolean isSameColumnName = Objects.equals(props.get(COLUMN), shadowValue.getColumnName()); boolean isRegexMatch = String.valueOf(shadowValue.getValue()).matches(props.get(REGEX).toString()); return containTable && isSameOperation && isSameColumnName && isRegexMatch; }
class ColumnRegexMatchShadowAlgorithm implements ColumnShadowAlgorithm<Comparable<?>> { private static final String COLUMN = "column"; private static final String OPERATION = "operation"; private static final String REGEX = "regex"; private Properties props = new Properties(); private ShadowOperationType shadowOperationType; @Override public String getType() { return "COLUMN_REGEX_MATCH"; } @Override public void init() { checkProps(); } private void checkProps() { checkOperation(); checkColumn(); checkRegex(); } private void checkRegex() { String expression = props.getProperty(REGEX); Preconditions.checkNotNull(expression, "Column regex match shadow algorithm regex cannot be null."); } private void checkColumn() { String expression = props.getProperty(COLUMN); Preconditions.checkNotNull(expression, "Column regex match shadow algorithm column cannot be null."); } private void checkOperation() { String operationType = props.getProperty(OPERATION); Preconditions.checkNotNull(operationType, "Column regex match shadow algorithm operation cannot be null."); Optional<ShadowOperationType> shadowOperationType = ShadowOperationType.contains(operationType); Preconditions.checkState(shadowOperationType.isPresent(), "Column regex match shadow algorithm operation must be one of select insert update delete."); shadowOperationType.ifPresent(type -> this.shadowOperationType = type); } @Override }
class ColumnRegexMatchShadowAlgorithm implements ColumnShadowAlgorithm<Comparable<?>> { private static final String COLUMN = "column"; private static final String OPERATION = "operation"; private static final String REGEX = "regex"; private Properties props = new Properties(); private ShadowOperationType shadowOperationType; @Override public String getType() { return "COLUMN_REGEX_MATCH"; } @Override public void init() { checkProps(); } private void checkProps() { checkOperation(); checkColumn(); checkRegex(); } private void checkRegex() { String expression = props.getProperty(REGEX); Preconditions.checkNotNull(expression, "Column regex match shadow algorithm regex cannot be null."); } private void checkColumn() { String expression = props.getProperty(COLUMN); Preconditions.checkNotNull(expression, "Column regex match shadow algorithm column cannot be null."); } private void checkOperation() { String operationType = props.getProperty(OPERATION); Preconditions.checkNotNull(operationType, "Column regex match shadow algorithm operation cannot be null."); Optional<ShadowOperationType> shadowOperationType = ShadowOperationType.contains(operationType); Preconditions.checkState(shadowOperationType.isPresent(), "Column regex match shadow algorithm operation must be one of select insert update delete."); shadowOperationType.ifPresent(type -> this.shadowOperationType = type); } @Override }
The `count != 0` is not under the lock
public void awaitZero() throws InterruptedException { if (count != 0) { latch.await(); } }
if (count != 0) {
public void awaitZero() throws InterruptedException { sync.acquireSharedInterruptibly(1); }
class CountingLatch { private int count; private final Lock lock = new ReentrantLock(); private final CountDownLatch latch; public CountingLatch(int initialValue) { this.count = initialValue; this.latch = new CountDownLatch(1); } public void increment() { lock.lock(); try { count++; if (count == 0) { latch.countDown(); } } finally { lock.unlock(); } } public void decrement() { lock.lock(); try { Preconditions.checkArgument(count > 0); count--; if (count == 0) { latch.countDown(); } } finally { lock.unlock(); } } public boolean awaitZero(long timeout, TimeUnit unit) throws InterruptedException { if (count != 0) { return latch.await(timeout, unit); } return true; } public int getCount() { lock.lock(); try { return count; } finally { lock.unlock(); } } }
class Sync extends AbstractQueuedSynchronizer { private Sync() { } private Sync(final int initialState) { setState(initialState); } int getCount() { return getState(); } protected int tryAcquireShared(final int acquires) { return getState() == 0 ? 1 : -1; } protected boolean tryReleaseShared(final int delta) { while (true) { final int st = getState(); final int nextSt = st + delta; Preconditions.checkState(nextSt >= 0); if (compareAndSetState(st, nextSt)) { return nextSt == 0; } } } }
I agree with the change but I think the comment should be: ```suggestion //We must not ignore the returned CompletionStage! ```
public void disposeStageSession(@Disposes Stage.Session reactiveSession) { if (reactiveSession != null) { reactiveSession.close().toCompletableFuture().join(); } }
public void disposeStageSession(@Disposes Stage.Session reactiveSession) { if (reactiveSession != null) { reactiveSession.close().toCompletableFuture().join(); } }
class ReactiveSessionProducer { @Inject Stage.SessionFactory reactiveSessionFactory; @Inject Mutiny.SessionFactory mutinySessionFactory; @Produces @RequestScoped @DefaultBean public Stage.Session createStageSession() { return reactiveSessionFactory.openSession(); } @Produces @RequestScoped @DefaultBean public Mutiny.Session createMutinySession() { return mutinySessionFactory.openSession(); } public void disposeMutinySession(@Disposes Mutiny.Session reactiveSession) { if (reactiveSession != null) { reactiveSession.close().subscribe().asCompletionStage().join(); } } }
class ReactiveSessionProducer { @Inject Stage.SessionFactory reactiveSessionFactory; @Inject Mutiny.SessionFactory mutinySessionFactory; @Produces @RequestScoped @DefaultBean public Stage.Session createStageSession() { return reactiveSessionFactory.openSession(); } @Produces @RequestScoped @DefaultBean public Mutiny.Session createMutinySession() { return mutinySessionFactory.openSession(); } public void disposeMutinySession(@Disposes Mutiny.Session reactiveSession) { if (reactiveSession != null) { reactiveSession.close().subscribe().asCompletionStage().join(); } } }
Could you also add some comments after the `SET`? Because it fails to parse the statement when comment is around SQL Client commands.
public void testInitFile() throws Exception { List<String> statements = Arrays.asList( "-- Define Table \n" + "CREATE TABLE source (" + "id INT," + "val STRING" + ") WITH (" + " 'connector' = 'values'" + "); " + "-- Define Table \n", "SET key = value;\n"); String initFile = createSqlFile(statements, "init-sql.sql"); String[] args = new String[] {"-i", initFile}; String output = runSqlClient(args, "SET;\nQUIT;\n"); assertThat(output, containsString("key=value")); }
"SET key = value;\n");
public void testInitFile() throws Exception { List<String> statements = Arrays.asList( "-- define table \n" + "CREATE TABLE source (" + "id INT," + "val STRING" + ") WITH (" + " 'connector' = 'values'" + "); \n", " -- define config \nSET key = value;\n"); String initFile = createSqlFile(statements, "init-sql.sql"); String[] args = new String[] {"-i", initFile}; String output = runSqlClient(args, "SET;\nQUIT;\n"); assertThat(output, containsString("key=value")); }
class SqlClientTest { @Rule public ExpectedException thrown = ExpectedException.none(); @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); private Map<String, String> originalEnv; private String historyPath; @Rule public Timeout timeout = new Timeout(1000, TimeUnit.SECONDS); @Before public void before() throws IOException { originalEnv = System.getenv(); File confFolder = tempFolder.newFolder("conf"); File confYaml = new File(confFolder, "flink-conf.yaml"); if (!confYaml.createNewFile()) { throw new IOException("Can't create testing flink-conf.yaml file."); } Map<String, String> map = new HashMap<>(System.getenv()); map.put(ENV_FLINK_CONF_DIR, confFolder.getAbsolutePath()); CommonTestUtils.setEnv(map); historyPath = tempFolder.newFile("history").toString(); } @After public void after() { CommonTestUtils.setEnv(originalEnv); } @Test public void testEmbeddedWithOptions() throws Exception { String[] args = new String[] {"embedded", "-hist", historyPath}; String actual = runSqlClient(args); assertThat(actual, containsString("Command history file path: " + historyPath)); } @Test public void testEmbeddedWithLongOptions() throws Exception { String[] args = new String[] {"embedded", "--history", historyPath}; String actual = runSqlClient(args); assertThat(actual, containsString("Command history file path: " + historyPath)); } @Test public void testEmbeddedWithoutOptions() throws Exception { String[] args = new String[] {"embedded"}; String actual = runSqlClient(args); assertThat(actual, containsString("Command history file path: ")); } @Test public void testEmptyOptions() throws Exception { String[] args = new String[] {}; String actual = runSqlClient(args); assertThat(actual, containsString("Command history file path")); } @Test public void testUnsupportedGatewayMode() { String[] args = new String[] {"gateway"}; thrown.expect(SqlClientException.class); thrown.expectMessage("Gateway mode is not supported yet."); SqlClient.main(args); } @Test public void testErrorMessage() throws Exception { String stmts = "CREATE TABLE T (a int) WITH ('connector' = 'invalid');\n" + "SELECT * FROM T;\n" + "QUIT;\n"; String[] args = new String[] {}; String output = runSqlClient(args, stmts); assertThat( output, containsString( "org.apache.flink.table.api.ValidationException: Could not find any factory for identifier 'invalid'")); String[] errorStack = new String[] { "at org.apache.flink.table.factories.FactoryUtil.discoverFactory", "at org.apache.flink.table.factories.FactoryUtil.createTableSource" }; for (String stack : errorStack) { assertThat(output, not(containsString(stack))); } } @Test public void testVerboseErrorMessage() throws Exception { String stmts = "CREATE TABLE T (a int) WITH ('connector' = 'invalid');\n" + "SET sql-client.verbose=true;\n" + "SELECT * FROM T;\n" + "QUIT;\n"; String[] args = new String[] {}; String output = runSqlClient(args, stmts); String[] errors = new String[] { "org.apache.flink.table.api.ValidationException: Could not find any factory for identifier 'invalid'", "at org.apache.flink.table.factories.FactoryUtil.discoverFactory", "at org.apache.flink.table.factories.FactoryUtil.createTableSource" }; for (String error : errors) { assertThat(output, containsString(error)); } } @Test @Test public void testExecuteSqlFile() throws Exception { List<String> statements = Collections.singletonList("HELP;\n"); String sqlFilePath = createSqlFile(statements, "test-sql.sql"); String[] args = new String[] {"-f", sqlFilePath}; String output = runSqlClient(args); final URL url = getClass().getClassLoader().getResource("sql-client-help-command.out"); final String help = FileUtils.readFileUtf8(new File(url.getFile())); for (String command : help.split("\n")) { assertThat(output, containsString(command)); } } private String runSqlClient(String[] args) throws Exception { return runSqlClient(args, "QUIT;\n"); } private String runSqlClient(String[] args, String statements) throws Exception { try (OutputStream out = new ByteArrayOutputStream(); Terminal terminal = TerminalUtils.createDumbTerminal( new ByteArrayInputStream( statements.getBytes(StandardCharsets.UTF_8)), out)) { SqlClient.startClient(args, () -> terminal); return out.toString(); } } private String createSqlFile(List<String> statements, String name) throws IOException { File sqlFileFolder = tempFolder.newFolder("sql-file"); File sqlFile = new File(sqlFileFolder, name); if (!sqlFile.createNewFile()) { throw new IOException(String.format("Can't create testing %s.", name)); } String sqlFilePath = sqlFile.getPath(); Files.write( Paths.get(sqlFilePath), statements, StandardCharsets.UTF_8, StandardOpenOption.APPEND); return sqlFilePath; } }
class SqlClientTest { @Rule public ExpectedException thrown = ExpectedException.none(); @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); private Map<String, String> originalEnv; private String historyPath; @Rule public Timeout timeout = new Timeout(1000, TimeUnit.SECONDS); @Before public void before() throws IOException { originalEnv = System.getenv(); File confFolder = tempFolder.newFolder("conf"); File confYaml = new File(confFolder, "flink-conf.yaml"); if (!confYaml.createNewFile()) { throw new IOException("Can't create testing flink-conf.yaml file."); } Map<String, String> map = new HashMap<>(System.getenv()); map.put(ENV_FLINK_CONF_DIR, confFolder.getAbsolutePath()); CommonTestUtils.setEnv(map); historyPath = tempFolder.newFile("history").toString(); } @After public void after() { CommonTestUtils.setEnv(originalEnv); } @Test public void testEmbeddedWithOptions() throws Exception { String[] args = new String[] {"embedded", "-hist", historyPath}; String actual = runSqlClient(args); assertThat(actual, containsString("Command history file path: " + historyPath)); } @Test public void testEmbeddedWithLongOptions() throws Exception { String[] args = new String[] {"embedded", "--history", historyPath}; String actual = runSqlClient(args); assertThat(actual, containsString("Command history file path: " + historyPath)); } @Test public void testEmbeddedWithoutOptions() throws Exception { String[] args = new String[] {"embedded"}; String actual = runSqlClient(args); assertThat(actual, containsString("Command history file path: ")); } @Test public void testEmptyOptions() throws Exception { String[] args = new String[] {}; String actual = runSqlClient(args); assertThat(actual, containsString("Command history file path")); } @Test public void testUnsupportedGatewayMode() { String[] args = new String[] {"gateway"}; thrown.expect(SqlClientException.class); thrown.expectMessage("Gateway mode is not supported yet."); SqlClient.main(args); } @Test public void testErrorMessage() throws Exception { String stmts = "CREATE TABLE T (a int) WITH ('connector' = 'invalid');\n" + "SELECT * FROM T;\n" + "QUIT;\n"; String[] args = new String[] {}; String output = runSqlClient(args, stmts); assertThat( output, containsString( "org.apache.flink.table.api.ValidationException: Could not find any factory for identifier 'invalid'")); String[] errorStack = new String[] { "at org.apache.flink.table.factories.FactoryUtil.discoverFactory", "at org.apache.flink.table.factories.FactoryUtil.createTableSource" }; for (String stack : errorStack) { assertThat(output, not(containsString(stack))); } } @Test public void testVerboseErrorMessage() throws Exception { String stmts = "CREATE TABLE T (a int) WITH ('connector' = 'invalid');\n" + "SET sql-client.verbose=true;\n" + "SELECT * FROM T;\n" + "QUIT;\n"; String[] args = new String[] {}; String output = runSqlClient(args, stmts); String[] errors = new String[] { "org.apache.flink.table.api.ValidationException: Could not find any factory for identifier 'invalid'", "at org.apache.flink.table.factories.FactoryUtil.discoverFactory", "at org.apache.flink.table.factories.FactoryUtil.createTableSource" }; for (String error : errors) { assertThat(output, containsString(error)); } } @Test @Test public void testExecuteSqlFile() throws Exception { List<String> statements = Collections.singletonList("HELP;\n"); String sqlFilePath = createSqlFile(statements, "test-sql.sql"); String[] args = new String[] {"-f", sqlFilePath}; String output = runSqlClient(args); final URL url = getClass().getClassLoader().getResource("sql-client-help-command.out"); final String help = FileUtils.readFileUtf8(new File(url.getFile())); for (String command : help.split("\n")) { assertThat(output, containsString(command)); } } private String runSqlClient(String[] args) throws Exception { return runSqlClient(args, "QUIT;\n"); } private String runSqlClient(String[] args, String statements) throws Exception { try (OutputStream out = new ByteArrayOutputStream(); Terminal terminal = TerminalUtils.createDumbTerminal( new ByteArrayInputStream( statements.getBytes(StandardCharsets.UTF_8)), out)) { SqlClient.startClient(args, () -> terminal); return out.toString(); } } private String createSqlFile(List<String> statements, String name) throws IOException { File sqlFileFolder = tempFolder.newFolder("sql-file"); File sqlFile = new File(sqlFileFolder, name); if (!sqlFile.createNewFile()) { throw new IOException(String.format("Can't create testing %s.", name)); } String sqlFilePath = sqlFile.getPath(); Files.write( Paths.get(sqlFilePath), statements, StandardCharsets.UTF_8, StandardOpenOption.APPEND); return sqlFilePath; } }
These code snippets would need a valid `jsonWebKeyToImport` to work. @g2vinay do we have a good working example for this or should we just keep these in the code files for now?
public void createKey() { KeyClient keyClient = createClient(); Key key = keyClient.createKey("keyName", KeyType.EC); System.out.printf("Key is created with name %s and id %s %n", key.name(), key.id()); KeyCreateOptions keyCreateOptions = new KeyCreateOptions("keyName", KeyType.RSA) .notBefore(OffsetDateTime.now().plusDays(1)) .expires(OffsetDateTime.now().plusYears(1)); Key optionsKey = keyClient.createKey(keyCreateOptions); System.out.printf("Key is created with name %s and id %s \n", optionsKey.name(), optionsKey.id()); RsaKeyCreateOptions rsaKeyCreateOptions = new RsaKeyCreateOptions("keyName") .keySize(2048) .notBefore(OffsetDateTime.now().plusDays(1)) .expires(OffsetDateTime.now().plusYears(1)); Key rsaKey = keyClient.createRsaKey(rsaKeyCreateOptions); System.out.printf("Key is created with name %s and id %s \n", rsaKey.name(), rsaKey.id()); EcKeyCreateOptions ecKeyCreateOptions = new EcKeyCreateOptions("keyName") .curve(KeyCurveName.P_384) .notBefore(OffsetDateTime.now().plusDays(1)) .expires(OffsetDateTime.now().plusYears(1)); Key ecKey = keyClient.createEcKey(ecKeyCreateOptions); System.out.printf("Key is created with name %s and id %s \n", ecKey.name(), ecKey.id()); }
public void createKey() { KeyClient keyClient = createClient(); Key key = keyClient.createKey("keyName", KeyType.EC); System.out.printf("Key is created with name %s and id %s %n", key.name(), key.id()); KeyCreateOptions keyCreateOptions = new KeyCreateOptions("keyName", KeyType.RSA) .notBefore(OffsetDateTime.now().plusDays(1)) .expires(OffsetDateTime.now().plusYears(1)); Key optionsKey = keyClient.createKey(keyCreateOptions); System.out.printf("Key is created with name %s and id %s \n", optionsKey.name(), optionsKey.id()); RsaKeyCreateOptions rsaKeyCreateOptions = new RsaKeyCreateOptions("keyName") .keySize(2048) .notBefore(OffsetDateTime.now().plusDays(1)) .expires(OffsetDateTime.now().plusYears(1)); Key rsaKey = keyClient.createRsaKey(rsaKeyCreateOptions); System.out.printf("Key is created with name %s and id %s \n", rsaKey.name(), rsaKey.id()); EcKeyCreateOptions ecKeyCreateOptions = new EcKeyCreateOptions("keyName") .curve(KeyCurveName.P_384) .notBefore(OffsetDateTime.now().plusDays(1)) .expires(OffsetDateTime.now().plusYears(1)); Key ecKey = keyClient.createEcKey(ecKeyCreateOptions); System.out.printf("Key is created with name %s and id %s \n", ecKey.name(), ecKey.id()); }
class KeyClientJavaDocCodeSnippets { private String key1 = "key1"; private String key2 = "key2"; private String value1 = "val1"; private String value2 = "val2"; /** * Generates code sample for creating a {@link KeyAsyncClient} * @return An instance of {@link KeyAsyncClient} */ public KeyAsyncClient createAsyncClientWithHttpClient() { RecordedData networkData = new RecordedData(); KeyAsyncClient keyClient = new KeyClientBuilder() .endpoint("https: .credential(new DefaultAzureCredential()) .httpLogDetailLevel(HttpLogDetailLevel.BODY_AND_HEADERS) .addPolicy(new RecordNetworkCallPolicy(networkData)) .httpClient(HttpClient.createDefault()) .buildAsyncClient(); return keyClient; } /** * Generates code sample for creating a {@link KeyClient} * @return An instance of {@link KeyClient} */ public KeyClient createClient() { KeyClient keyClient = new KeyClientBuilder() .endpoint("https: .credential(new DefaultAzureCredential()) .buildClient(); return keyClient; } /** * Generates code sample for creating a {@link KeyAsyncClient} * @return An instance of {@link KeyAsyncClient} */ public KeyAsyncClient createAsyncClient() { KeyAsyncClient keyClient = new KeyClientBuilder() .endpoint("https: .credential(new DefaultAzureCredential()) .buildAsyncClient(); return keyClient; } /** * Generates code sample for creating a {@link KeyAsyncClient} * @return An instance of {@link KeyAsyncClient} */ public KeyAsyncClient createAsyncClientWithPipeline() { RecordedData networkData = new RecordedData(); HttpPipeline pipeline = HttpPipeline.builder().policies(new RecordNetworkCallPolicy(networkData)).build(); KeyAsyncClient keyClient = new KeyClientBuilder() .pipeline(pipeline) .endpoint("https: .credential(new DefaultAzureCredential()) .buildAsyncClient(); return keyClient; } /** * Generates a code sample for using {@link KeyClient */ /** * Generates a code sample for using {@link KeyClient */ public void getKeySnippets() { KeyClient keyClient = createClient(); String keyVersion = "6A385B124DEF4096AF1361A85B16C204"; Key keyWithVersion = keyClient.getKey("keyName", keyVersion); System.out.printf("Key is returned with name %s and id %s \n", keyWithVersion.name(), keyWithVersion.id()); Key key = keyClient.getKey("keyName"); System.out.printf("Key is returned with name %s and id %s \n", key.name(), key.id()); keyClient.listKeys().forEach(keyBase -> { Key keyResponse = keyClient.getKey(keyBase); System.out.printf("Key is returned with name %s and id %s \n", keyResponse.name(), keyResponse.id()); }); } /** * Generates a code sample for using {@link KeyClient */ public void deleteKeySnippets() { KeyClient keyClient = createClient(); Key key = keyClient.getKey("keyName"); DeletedKey deletedKey = keyClient.deleteKey("keyName"); System.out.printf("Deleted Key's Recovery Id %s", deletedKey.recoveryId()); } /** * Generates a code sample for using {@link KeyClient */ public void createKeyWithResponses() { KeyClient keyClient = createClient(); KeyCreateOptions keyCreateOptions = new KeyCreateOptions("keyName", KeyType.RSA) .notBefore(OffsetDateTime.now().plusDays(1)) .expires(OffsetDateTime.now().plusYears(1)); Key optionsKey = keyClient.createKeyWithResponse(keyCreateOptions, new Context(key1, value1)).value(); System.out.printf("Key is created with name %s and id %s \n", optionsKey.name(), optionsKey.id()); RsaKeyCreateOptions rsaKeyCreateOptions = new RsaKeyCreateOptions("keyName") .keySize(2048) .notBefore(OffsetDateTime.now().plusDays(1)) .expires(OffsetDateTime.now().plusYears(1)); Key rsaKey = keyClient.createRsaKeyWithResponse(rsaKeyCreateOptions, new Context(key1, value1)).value(); System.out.printf("Key is created with name %s and id %s \n", rsaKey.name(), rsaKey.id()); EcKeyCreateOptions ecKeyCreateOptions = new EcKeyCreateOptions("keyName") .curve(KeyCurveName.P_384) .notBefore(OffsetDateTime.now().plusDays(1)) .expires(OffsetDateTime.now().plusYears(1)); Key ecKey = keyClient.createEcKeyWithResponse(ecKeyCreateOptions, new Context(key1, value1)).value(); System.out.printf("Key is created with name %s and id %s \n", ecKey.name(), ecKey.id()); } /** * Generates a code sample for using {@link KeyClient */ public void getKeyWithResponseSnippets() { KeyClient keyClient = createClient(); String keyVersion = "6A385B124DEF4096AF1361A85B16C204"; Key keyWithVersion = keyClient.getKeyWithResponse("keyName", keyVersion, new Context(key1, value1)).value(); System.out.printf("Key is returned with name %s and id %s \n", keyWithVersion.name(), keyWithVersion.id()); } /** * Generates a code sample for using {@link KeyClient */ public void updateKeyWithResponseSnippets() { KeyClient keyClient = createClient(); Key key = keyClient.getKey("keyName"); key.expires(OffsetDateTime.now().plusDays(60)); KeyBase updatedKeyBase = keyClient.updateKeyWithResponse(key, new Context(key1, value1), KeyOperation.ENCRYPT, KeyOperation.DECRYPT).value(); Key updatedKey = keyClient.getKey(updatedKeyBase.name()); System.out.printf("Key is updated with name %s and id %s \n", updatedKey.name(), updatedKey.id()); Key updateKey = keyClient.getKey("keyName"); key.expires(OffsetDateTime.now().plusDays(60)); KeyBase updatedKeyBaseValue = keyClient.updateKeyWithResponse(updateKey, new Context(key1, value1)).value(); Key updatedKeyValue = keyClient.getKey(updatedKeyBaseValue.name()); System.out.printf("Key is updated with name %s and id %s \n", updatedKeyValue.name(), updatedKeyValue.id()); } /** * Generates a code sample for using {@link KeyClient */ public void updateKeySnippets() { KeyClient keyClient = createClient(); Key key = keyClient.getKey("keyName"); key.expires(OffsetDateTime.now().plusDays(60)); KeyBase updatedKeyBase = keyClient.updateKey(key, KeyOperation.ENCRYPT, KeyOperation.DECRYPT); Key updatedKey = keyClient.getKey(updatedKeyBase.name()); System.out.printf("Key is updated with name %s and id %s \n", updatedKey.name(), updatedKey.id()); Key updateKey = keyClient.getKey("keyName"); key.expires(OffsetDateTime.now().plusDays(60)); KeyBase updatedKeyBaseValue = keyClient.updateKey(updateKey); Key updatedKeyValue = keyClient.getKey(updatedKeyBaseValue.name()); System.out.printf("Key is updated with name %s and id %s \n", updatedKeyValue.name(), updatedKeyValue.id()); } /** * Generates a code sample for using {@link KeyClient */ public void deleteKeyWithResponseSnippets() { KeyClient keyClient = createClient(); Key key = keyClient.getKey("keyName"); DeletedKey deletedKey = keyClient.deleteKeyWithResponse("keyName", new Context(key1, value1)).value(); System.out.printf("Deleted Key's Recovery Id %s", deletedKey.recoveryId()); } /** * Generates a code sample for using {@link KeyClient */ public void getDeleteKeyWithResponseSnippets() { KeyClient keyClient = createClient(); DeletedKey deletedKey = keyClient.getDeletedKeyWithResponse("keyName", new Context(key1, value1)).value(); System.out.printf("Deleted Key with recovery Id %s \n", deletedKey.recoveryId()); } /** * Generates a code sample for using {@link KeyClient */ public void purgeDeletedKeySnippets() { KeyClient keyClient = createClient(); VoidResponse purgeResponse = keyClient.purgeDeletedKey("deletedKeyName"); System.out.printf("Purge Status Code: %rsaPrivateExponent", purgeResponse.statusCode()); } /** * Generates a code sample for using {@link KeyClient */ public void recoverDeletedKeyWithResponseSnippets() { KeyClient keyClient = createClient(); Key recoveredKey = keyClient.recoverDeletedKeyWithResponse("deletedKeyName", new Context(key2, value2)).value(); System.out.printf("Recovered key with name %s", recoveredKey.name()); } /** * Generates a code sample for using {@link KeyClient */ public void recoverDeletedKeySnippets() { KeyClient keyClient = createClient(); Key recoveredKey = keyClient.recoverDeletedKey("deletedKeyName"); System.out.printf("Recovered key with name %s", recoveredKey.name()); } /** * Generates a code sample for using {@link KeyClient */ public void backupKeySnippets() { KeyClient keyClient = createClient(); byte[] keyBackup = keyClient.backupKey("keyName"); System.out.printf("Key's Backup Byte array's length %s", keyBackup.length); } /** * Generates a code sample for using {@link KeyClient */ public void backupKeyWithResponseSnippets() { KeyClient keyClient = createClient(); byte[] keyBackup = keyClient.backupKeyWithResponse("keyName", new Context(key2, value2)).value(); System.out.printf("Key's Backup Byte array's length %s", keyBackup.length); } /** * Generates a code sample for using {@link KeyClient */ public void restoreKeySnippets() { KeyClient keyClient = createClient(); byte[] keyBackupByteArray = {}; Key keyResponse = keyClient.restoreKey(keyBackupByteArray); System.out.printf("Restored Key with name %s and id %s \n", keyResponse.name(), keyResponse.id()); } /** * Generates a code sample for using {@link KeyClient */ public void restoreKeyWithResponseSnippets() { KeyClient keyClient = createClient(); byte[] keyBackupByteArray = {}; Response<Key> keyResponse = keyClient.restoreKeyWithResponse(keyBackupByteArray, new Context(key1, value1)); System.out.printf("Restored Key with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id()); } /** * Generates a code sample for using {@link KeyClient */ public void listKeySnippets() { KeyClient keyClient = createClient(); for (KeyBase key : keyClient.listKeys()) { Key keyWithMaterial = keyClient.getKey(key); System.out.printf("Received key with name %s and type %s", keyWithMaterial.name(), keyWithMaterial.keyMaterial().kty()); } for (KeyBase key : keyClient.listKeys(new Context(key2, value2))) { Key keyWithMaterial = keyClient.getKey(key); System.out.printf("Received key with name %s and type %s", keyWithMaterial.name(), keyWithMaterial.keyMaterial().kty()); } } /** * Generates a code sample for using {@link KeyClient */ public void listDeletedKeysSnippets() { KeyClient keyClient = createClient(); for (DeletedKey deletedKey : keyClient.listDeletedKeys()) { System.out.printf("Deleted key's recovery Id %s", deletedKey.recoveryId()); } for (DeletedKey deletedKey : keyClient.listDeletedKeys(new Context(key2, value2))) { System.out.printf("Deleted key's recovery Id %s", deletedKey.recoveryId()); } } /** * Generates code sample for using {@link KeyClient */ public void listKeyVersions() { KeyClient keyClient = createClient(); for (KeyBase key : keyClient.listKeyVersions("keyName")) { Key keyWithMaterial = keyClient.getKey(key); System.out.printf("Received key's version with name %s, type %s and version %s", keyWithMaterial.name(), keyWithMaterial.keyMaterial().kty(), keyWithMaterial.version()); } for (KeyBase key : keyClient.listKeyVersions("keyName", new Context(key2, value2))) { Key keyWithMaterial = keyClient.getKey(key); System.out.printf("Received key's version with name %s, type %s and version %s", keyWithMaterial.name(), keyWithMaterial.keyMaterial().kty(), keyWithMaterial.version()); } } /** * Implementation not provided for this method * @return {@code null} */ private TokenCredential getKeyVaultCredential() { return null; } }
class KeyClientJavaDocCodeSnippets { private String key1 = "key1"; private String key2 = "key2"; private String value1 = "val1"; private String value2 = "val2"; /** * Generates code sample for creating a {@link KeyClient} * @return An instance of {@link KeyClient} */ public KeyClient createClient() { KeyClient keyClient = new KeyClientBuilder() .endpoint("https: .credential(new DefaultAzureCredential()) .buildClient(); return keyClient; } /** * Generates a code sample for using {@link KeyClient */ /** * Generates a code sample for using {@link KeyClient */ public void deleteKeySnippets() { KeyClient keyClient = createClient(); Key key = keyClient.getKey("keyName"); DeletedKey deletedKey = keyClient.deleteKey("keyName"); System.out.printf("Deleted Key's Recovery Id %s", deletedKey.recoveryId()); } /** * Generates a code sample for using {@link KeyClient */ public void getDeletedKeySnippets() { KeyClient keyClient = createClient(); DeletedKey deletedKey = keyClient.getDeletedKey("keyName"); System.out.printf("Deleted Key's Recovery Id %s", deletedKey.recoveryId()); } /** * Generates a code sample for using {@link KeyClient */ public void createKeyWithResponses() { KeyClient keyClient = createClient(); KeyCreateOptions keyCreateOptions = new KeyCreateOptions("keyName", KeyType.RSA) .notBefore(OffsetDateTime.now().plusDays(1)) .expires(OffsetDateTime.now().plusYears(1)); Key optionsKey = keyClient.createKeyWithResponse(keyCreateOptions, new Context(key1, value1)).value(); System.out.printf("Key is created with name %s and id %s \n", optionsKey.name(), optionsKey.id()); RsaKeyCreateOptions rsaKeyCreateOptions = new RsaKeyCreateOptions("keyName") .keySize(2048) .notBefore(OffsetDateTime.now().plusDays(1)) .expires(OffsetDateTime.now().plusYears(1)); Key rsaKey = keyClient.createRsaKeyWithResponse(rsaKeyCreateOptions, new Context(key1, value1)).value(); System.out.printf("Key is created with name %s and id %s \n", rsaKey.name(), rsaKey.id()); EcKeyCreateOptions ecKeyCreateOptions = new EcKeyCreateOptions("keyName") .curve(KeyCurveName.P_384) .notBefore(OffsetDateTime.now().plusDays(1)) .expires(OffsetDateTime.now().plusYears(1)); Key ecKey = keyClient.createEcKeyWithResponse(ecKeyCreateOptions, new Context(key1, value1)).value(); System.out.printf("Key is created with name %s and id %s \n", ecKey.name(), ecKey.id()); } /** * Generates a code sample for using {@link KeyClient */ public void getKeyWithResponseSnippets() { KeyClient keyClient = createClient(); String keyVersion = "6A385B124DEF4096AF1361A85B16C204"; Key keyWithVersion = keyClient.getKeyWithResponse("keyName", keyVersion, new Context(key1, value1)).value(); System.out.printf("Key is returned with name %s and id %s \n", keyWithVersion.name(), keyWithVersion.id()); } /** * Generates a code sample for using {@link KeyClient */ public void getKeySnippets() { KeyClient keyClient = createClient(); String keyVersion = "6A385B124DEF4096AF1361A85B16C204"; Key keyWithVersion = keyClient.getKey("keyName", keyVersion); System.out.printf("Key is returned with name %s and id %s \n", keyWithVersion.name(), keyWithVersion.id()); Key keyWithVersionValue = keyClient.getKey("keyName"); System.out.printf("Key is returned with name %s and id %s \n", keyWithVersionValue.name(), keyWithVersionValue.id()); for (KeyBase key : keyClient.listKeys()) { Key keyResponse = keyClient.getKey(key); System.out.printf("Received key with name %s and type %s", keyResponse.name(), keyResponse.keyMaterial().kty()); } } /** * Generates a code sample for using {@link KeyClient */ public void updateKeyWithResponseSnippets() { KeyClient keyClient = createClient(); Key key = keyClient.getKey("keyName"); key.expires(OffsetDateTime.now().plusDays(60)); KeyBase updatedKeyBase = keyClient.updateKeyWithResponse(key, new Context(key1, value1), KeyOperation.ENCRYPT, KeyOperation.DECRYPT).value(); Key updatedKey = keyClient.getKey(updatedKeyBase.name()); System.out.printf("Key is updated with name %s and id %s \n", updatedKey.name(), updatedKey.id()); } /** * Generates a code sample for using {@link KeyClient */ public void updateKeySnippets() { KeyClient keyClient = createClient(); Key key = keyClient.getKey("keyName"); key.expires(OffsetDateTime.now().plusDays(60)); KeyBase updatedKeyBase = keyClient.updateKey(key, KeyOperation.ENCRYPT, KeyOperation.DECRYPT); Key updatedKey = keyClient.getKey(updatedKeyBase.name()); System.out.printf("Key is updated with name %s and id %s \n", updatedKey.name(), updatedKey.id()); Key updateKey = keyClient.getKey("keyName"); key.expires(OffsetDateTime.now().plusDays(60)); KeyBase updatedKeyBaseValue = keyClient.updateKey(updateKey); Key updatedKeyValue = keyClient.getKey(updatedKeyBaseValue.name()); System.out.printf("Key is updated with name %s and id %s \n", updatedKeyValue.name(), updatedKeyValue.id()); } /** * Generates a code sample for using {@link KeyClient */ public void deleteKeyWithResponseSnippets() { KeyClient keyClient = createClient(); Key key = keyClient.getKey("keyName"); DeletedKey deletedKey = keyClient.deleteKeyWithResponse("keyName", new Context(key1, value1)).value(); System.out.printf("Deleted Key's Recovery Id %s", deletedKey.recoveryId()); } /** * Generates a code sample for using {@link KeyClient */ public void getDeleteKeyWithResponseSnippets() { KeyClient keyClient = createClient(); DeletedKey deletedKey = keyClient.getDeletedKeyWithResponse("keyName", new Context(key1, value1)).value(); System.out.printf("Deleted Key with recovery Id %s \n", deletedKey.recoveryId()); } /** * Generates a code sample for using {@link KeyClient */ public void purgeDeletedKeySnippets() { KeyClient keyClient = createClient(); VoidResponse purgeResponse = keyClient.purgeDeletedKey("deletedKeyName"); System.out.printf("Purge Status Code: %rsaPrivateExponent", purgeResponse.statusCode()); VoidResponse purgedResponse = keyClient.purgeDeletedKey("deletedKeyName", new Context(key2, value2)); System.out.printf("Purge Status Code: %rsaPrivateExponent", purgedResponse.statusCode()); } /** * Generates a code sample for using {@link KeyClient */ public void recoverDeletedKeyWithResponseSnippets() { KeyClient keyClient = createClient(); Key recoveredKey = keyClient.recoverDeletedKeyWithResponse("deletedKeyName", new Context(key2, value2)).value(); System.out.printf("Recovered key with name %s", recoveredKey.name()); } /** * Generates a code sample for using {@link KeyClient */ public void recoverDeletedKeySnippets() { KeyClient keyClient = createClient(); Key recoveredKey = keyClient.recoverDeletedKey("deletedKeyName"); System.out.printf("Recovered key with name %s", recoveredKey.name()); } /** * Generates a code sample for using {@link KeyClient */ public void backupKeySnippets() { KeyClient keyClient = createClient(); byte[] keyBackup = keyClient.backupKey("keyName"); System.out.printf("Key's Backup Byte array's length %s", keyBackup.length); } /** * Generates a code sample for using {@link KeyClient */ public void backupKeyWithResponseSnippets() { KeyClient keyClient = createClient(); byte[] keyBackup = keyClient.backupKeyWithResponse("keyName", new Context(key2, value2)).value(); System.out.printf("Key's Backup Byte array's length %s", keyBackup.length); } /** * Generates a code sample for using {@link KeyClient */ public void restoreKeySnippets() { KeyClient keyClient = createClient(); byte[] keyBackupByteArray = {}; Key keyResponse = keyClient.restoreKey(keyBackupByteArray); System.out.printf("Restored Key with name %s and id %s \n", keyResponse.name(), keyResponse.id()); } /** * Generates a code sample for using {@link KeyClient */ public void restoreKeyWithResponseSnippets() { KeyClient keyClient = createClient(); byte[] keyBackupByteArray = {}; Response<Key> keyResponse = keyClient.restoreKeyWithResponse(keyBackupByteArray, new Context(key1, value1)); System.out.printf("Restored Key with name %s and id %s \n", keyResponse.value().name(), keyResponse.value().id()); } /** * Generates a code sample for using {@link KeyClient */ public void listKeySnippets() { KeyClient keyClient = createClient(); for (KeyBase key : keyClient.listKeys()) { Key keyWithMaterial = keyClient.getKey(key); System.out.printf("Received key with name %s and type %s", keyWithMaterial.name(), keyWithMaterial.keyMaterial().kty()); } for (KeyBase key : keyClient.listKeys(new Context(key2, value2))) { Key keyWithMaterial = keyClient.getKey(key); System.out.printf("Received key with name %s and type %s", keyWithMaterial.name(), keyWithMaterial.keyMaterial().kty()); } } /** * Generates a code sample for using {@link KeyClient */ public void listDeletedKeysSnippets() { KeyClient keyClient = createClient(); for (DeletedKey deletedKey : keyClient.listDeletedKeys()) { System.out.printf("Deleted key's recovery Id %s", deletedKey.recoveryId()); } for (DeletedKey deletedKey : keyClient.listDeletedKeys(new Context(key2, value2))) { System.out.printf("Deleted key's recovery Id %s", deletedKey.recoveryId()); } } /** * Generates code sample for using {@link KeyClient */ public void listKeyVersions() { KeyClient keyClient = createClient(); for (KeyBase key : keyClient.listKeyVersions("keyName")) { Key keyWithMaterial = keyClient.getKey(key); System.out.printf("Received key's version with name %s, type %s and version %s", keyWithMaterial.name(), keyWithMaterial.keyMaterial().kty(), keyWithMaterial.version()); } for (KeyBase key : keyClient.listKeyVersions("keyName", new Context(key2, value2))) { Key keyWithMaterial = keyClient.getKey(key); System.out.printf("Received key's version with name %s, type %s and version %s", keyWithMaterial.name(), keyWithMaterial.keyMaterial().kty(), keyWithMaterial.version()); } } /** * Implementation not provided for this method * @return {@code null} */ private TokenCredential getKeyVaultCredential() { return null; } }
Does base class method call need to be "synchronized"/single threaded as well?
public void populatePropertyBag() { super.populatePropertyBag(); synchronized(this) { setProperty( this, Constants.Properties.CHANGE_FEED_START_FROM_TYPE, ChangeFeedStartFromTypes.NOW); } }
super.populatePropertyBag();
public void populatePropertyBag() { super.populatePropertyBag(); synchronized(this) { setProperty( this, Constants.Properties.CHANGE_FEED_START_FROM_TYPE, ChangeFeedStartFromTypes.NOW); } }
class ChangeFeedStartFromNowImpl extends ChangeFeedStartFromInternal { public ChangeFeedStartFromNowImpl() { super(); } @Override @Override public boolean supportsFullFidelityRetention() { return true; } @Override public void populateRequest(RxDocumentServiceRequest request) { checkNotNull(request, "Argument 'request' must not be null."); request.getHeaders().put( HttpConstants.HttpHeaders.IF_NONE_MATCH, HttpConstants.HeaderValues.IF_NONE_MATCH_ALL); } }
class ChangeFeedStartFromNowImpl extends ChangeFeedStartFromInternal { public ChangeFeedStartFromNowImpl() { super(); } @Override @Override public boolean supportsFullFidelityRetention() { return true; } @Override public void populateRequest(RxDocumentServiceRequest request) { checkNotNull(request, "Argument 'request' must not be null."); request.getHeaders().put( HttpConstants.HttpHeaders.IF_NONE_MATCH, HttpConstants.HeaderValues.IF_NONE_MATCH_ALL); } }
except sort node, all other node should use same logical to do limit merge, right?
public PlanFragment visitPhysicalLimit(PhysicalLimit<Plan> physicalLimit, PlanTranslatorContext context) { PlanFragment inputFragment = physicalLimit.child(0).accept(this, context); PlanNode child = inputFragment.getPlanRoot(); if (child instanceof OlapScanNode) { child.setLimit(physicalLimit.getLimit() + physicalLimit.getOffset()); return inputFragment; } if (child instanceof SortNode) { ((SortNode) child).setOffset(physicalLimit.getOffset()); child.setLimit(physicalLimit.getLimit()); inputFragment.getChildren().forEach(fragment -> { PlanNode root = fragment.getPlanRoot(); if (root instanceof SortNode) { root.setLimit(physicalLimit.getLimit() + physicalLimit.getOffset()); } }); return inputFragment; } if (child instanceof AggregationNode) { child.setLimit(physicalLimit.getLimit() + physicalLimit.getOffset()); return inputFragment; } return inputFragment; }
if (child instanceof AggregationNode) {
public PlanFragment visitPhysicalLimit(PhysicalLimit<Plan> physicalLimit, PlanTranslatorContext context) { PlanFragment inputFragment = physicalLimit.child(0).accept(this, context); PlanNode child = inputFragment.getPlanRoot(); if (child instanceof SortNode) { SortNode sort = (SortNode) child; sort.setLimit(physicalLimit.getLimit()); sort.setOffset(physicalLimit.getOffset()); return inputFragment; } if (child instanceof ExchangeNode) { ExchangeNode exchangeNode = (ExchangeNode) child; exchangeNode.setLimit(physicalLimit.getLimit()); exchangeNode.setOffset(physicalLimit.getOffset()); if (exchangeNode.getChild(0) instanceof SortNode) { SortNode sort = (SortNode) exchangeNode.getChild(0); sort.setLimit(physicalLimit.getLimit() + physicalLimit.getOffset()); sort.setOffset(0); } return inputFragment; } child.setLimit(physicalLimit.getLimit() + physicalLimit.getOffset()); return inputFragment; }
class PhysicalPlanTranslator extends DefaultPlanVisitor<PlanFragment, PlanTranslatorContext> { /** * The left and right child of origin predicates need to be swap sometimes. * Case A: * select * from t1 join t2 on t2.id=t1.id * The left plan node is t1 and the right plan node is t2. * The left child of origin predicate is t2.id and the right child of origin predicate is t1.id. * In this situation, the children of predicate need to be swap => t1.id=t2.id. */ private static Expression swapEqualToForChildrenOrder(EqualTo equalTo, List<Slot> leftOutput) { Set<ExprId> leftSlots = SlotExtractor.extractSlot(equalTo.left()).stream() .map(NamedExpression::getExprId).collect(Collectors.toSet()); if (leftOutput.stream().map(NamedExpression::getExprId).collect(Collectors.toSet()).containsAll(leftSlots)) { return equalTo; } else { return new EqualTo(equalTo.right(), equalTo.left()); } } /** * Translate Nereids Physical Plan tree to Stale Planner PlanFragment tree. * * @param physicalPlan Nereids Physical Plan tree * @param context context to help translate * @return Stale Planner PlanFragment tree */ public PlanFragment translatePlan(PhysicalPlan physicalPlan, PlanTranslatorContext context) { PlanFragment rootFragment = physicalPlan.accept(this, context); if (rootFragment.isPartitioned() && rootFragment.getPlanRoot().getNumInstances() > 1) { rootFragment = exchangeToMergeFragment(rootFragment, context); } if (physicalPlan.getType() == PlanType.PHYSICAL_PROJECT) { PhysicalProject<Plan> physicalProject = (PhysicalProject<Plan>) physicalPlan; List<Expr> outputExprs = physicalProject.getProjects().stream() .map(e -> ExpressionTranslator.translate(e, context)) .collect(Collectors.toList()); rootFragment.setOutputExprs(outputExprs); } else { List<Expr> outputExprs = Lists.newArrayList(); physicalPlan.getOutput().stream().map(Slot::getExprId) .forEach(exprId -> outputExprs.add(context.findSlotRef(exprId))); rootFragment.setOutputExprs(outputExprs); } rootFragment.getPlanRoot().convertToVectoriezd(); for (PlanFragment fragment : context.getPlanFragmentList()) { fragment.finalize(null); } Collections.reverse(context.getPlanFragmentList()); return rootFragment; } /** * Translate Agg. * todo: support DISTINCT */ @Override public PlanFragment visitPhysicalAggregate(PhysicalAggregate<Plan> aggregate, PlanTranslatorContext context) { PlanFragment inputPlanFragment = aggregate.child(0).accept(this, context); List<Expression> groupByExpressionList = aggregate.getGroupByExpressions(); List<NamedExpression> outputExpressionList = aggregate.getOutputExpressions(); List<SlotReference> groupSlotList = Lists.newArrayList(); for (Expression e : groupByExpressionList) { if (e instanceof SlotReference && outputExpressionList.stream().anyMatch(o -> o.anyMatch(e::equals))) { groupSlotList.add((SlotReference) e); } else { groupSlotList.add(new SlotReference(e.toSql(), e.getDataType(), e.nullable(), Collections.emptyList())); } } ArrayList<Expr> execGroupingExpressions = groupByExpressionList.stream() .map(e -> ExpressionTranslator.translate(e, context)).collect(Collectors.toCollection(ArrayList::new)); List<Slot> aggFunctionOutput = Lists.newArrayList(); List<AggregateFunction> aggregateFunctionList = outputExpressionList.stream() .filter(o -> o.anyMatch(AggregateFunction.class::isInstance)) .peek(o -> aggFunctionOutput.add(o.toSlot())) .map(o -> o.<List<AggregateFunction>>collect(AggregateFunction.class::isInstance)) .flatMap(List::stream) .collect(Collectors.toList()); ArrayList<FunctionCallExpr> execAggregateFunctions = aggregateFunctionList.stream() .map(x -> (FunctionCallExpr) ExpressionTranslator.translate(x, context)) .collect(Collectors.toCollection(ArrayList::new)); List<Slot> slotList = Lists.newArrayList(); TupleDescriptor outputTupleDesc; if (aggregate.getAggPhase() == AggPhase.GLOBAL) { slotList.addAll(groupSlotList); slotList.addAll(aggFunctionOutput); outputTupleDesc = generateTupleDesc(slotList, null, context); } else { outputTupleDesc = generateTupleDesc(aggregate.getOutput(), null, context); } List<Expression> partitionExpressionList = aggregate.getPartitionExpressions(); List<Expr> execPartitionExpressions = partitionExpressionList.stream() .map(e -> ExpressionTranslator.translate(e, context)).collect(Collectors.toList()); DataPartition mergePartition = DataPartition.UNPARTITIONED; if (CollectionUtils.isNotEmpty(execPartitionExpressions)) { mergePartition = DataPartition.hashPartitioned(execGroupingExpressions); } if (aggregate.getAggPhase() == AggPhase.GLOBAL) { for (FunctionCallExpr execAggregateFunction : execAggregateFunctions) { execAggregateFunction.setMergeForNereids(true); } } AggregateInfo aggInfo = AggregateInfo.create(execGroupingExpressions, execAggregateFunctions, outputTupleDesc, outputTupleDesc, aggregate.getAggPhase().toExec()); AggregationNode aggregationNode = new AggregationNode(context.nextPlanNodeId(), inputPlanFragment.getPlanRoot(), aggInfo); inputPlanFragment.setPlanRoot(aggregationNode); switch (aggregate.getAggPhase()) { case LOCAL: aggregationNode.unsetNeedsFinalize(); aggregationNode.setUseStreamingPreagg(aggregate.isUsingStream()); aggregationNode.setIntermediateTuple(); return createParentFragment(inputPlanFragment, mergePartition, context); case GLOBAL: inputPlanFragment.updateDataPartition(mergePartition); return inputPlanFragment; default: throw new RuntimeException("Unsupported yet"); } } @Override public PlanFragment visitPhysicalOlapScan(PhysicalOlapScan olapScan, PlanTranslatorContext context) { List<Slot> slotList = olapScan.getOutput(); OlapTable olapTable = olapScan.getTable(); List<Expr> execConjunctsList = olapScan .getExpressions() .stream() .map(e -> ExpressionTranslator.translate(e, context)).collect(Collectors.toList()); TupleDescriptor tupleDescriptor = generateTupleDesc(slotList, olapTable, context); tupleDescriptor.setTable(olapTable); OlapScanNode olapScanNode = new OlapScanNode(context.nextPlanNodeId(), tupleDescriptor, olapTable.getName()); TableName tableName = new TableName(null, "", ""); TableRef ref = new TableRef(tableName, null, null); BaseTableRef tableRef = new BaseTableRef(ref, olapTable, tableName); tupleDescriptor.setRef(tableRef); olapScanNode.setSelectedPartitionIds(olapScan.getSelectedPartitionId()); try { olapScanNode.updateScanRangeInfoByNewMVSelector(olapScan.getSelectedIndexId(), false, ""); } catch (Exception e) { throw new AnalysisException(e.getMessage()); } Utils.execWithUncheckedException(olapScanNode::init); olapScanNode.addConjuncts(execConjunctsList); context.addScanNode(olapScanNode); PlanFragment planFragment = new PlanFragment(context.nextFragmentId(), olapScanNode, DataPartition.RANDOM); context.addPlanFragment(planFragment); return planFragment; } /** * Physical sort: * 1. Build sortInfo * There are two types of slotRef: * one is generated by the previous node, collectively called old. * the other is newly generated by the sort node, collectively called new. * Filling of sortInfo related data structures, * a. ordering use newSlotRef. * b. sortTupleSlotExprs use oldSlotRef. * 2. Create sortNode * 3. Create mergeFragment * TODO: When the slotRef of sort is currently generated, * it will be based on the expression in select and orderBy expression in to ensure the uniqueness of slotRef. * But eg: * select a+1 from table order by a+1; * the expressions of the two are inconsistent. * The former will perform an additional Alias. * Currently we cannot test whether this will have any effect. * After a+1 can be parsed , reprocessing. */ @Override public PlanFragment visitPhysicalHeapSort(PhysicalHeapSort<Plan> sort, PlanTranslatorContext context) { PlanFragment childFragment = sort.child(0).accept(this, context); List<Expr> oldOrderingExprList = Lists.newArrayList(); List<Boolean> ascOrderList = Lists.newArrayList(); List<Boolean> nullsFirstParamList = Lists.newArrayList(); List<OrderKey> orderKeyList = sort.getOrderKeys(); orderKeyList.forEach(k -> { oldOrderingExprList.add(ExpressionTranslator.translate(k.getExpr(), context)); ascOrderList.add(k.isAsc()); nullsFirstParamList.add(k.isNullFirst()); }); List<Expr> sortTupleOutputList = new ArrayList<>(); List<Slot> outputList = sort.getOutput(); outputList.forEach(k -> { sortTupleOutputList.add(ExpressionTranslator.translate(k, context)); }); TupleDescriptor tupleDesc = generateTupleDesc(outputList, orderKeyList, context, null); List<Expr> newOrderingExprList = Lists.newArrayList(); orderKeyList.forEach(k -> { newOrderingExprList.add(ExpressionTranslator.translate(k.getExpr(), context)); }); SortInfo sortInfo = new SortInfo(newOrderingExprList, ascOrderList, nullsFirstParamList, tupleDesc); PlanNode childNode = childFragment.getPlanRoot(); SortNode sortNode = new SortNode(context.nextPlanNodeId(), childNode, sortInfo, true); sortNode.finalizeForNereids(tupleDesc, sortTupleOutputList, oldOrderingExprList); childFragment.addPlanRoot(sortNode); if (!childFragment.isPartitioned()) { return childFragment; } PlanFragment mergeFragment = createParentFragment(childFragment, DataPartition.UNPARTITIONED, context); ExchangeNode exchangeNode = (ExchangeNode) mergeFragment.getPlanRoot(); exchangeNode.unsetLimit(); exchangeNode.setMergeInfo(sortNode.getSortInfo(), 0); /* SortNode childSortNode = (SortNode) childFragment.getPlanRoot(); Preconditions.checkState(sortNode == childSortNode); if (sortNode.hasLimit()) { childSortNode.unsetLimit(); childSortNode.setLimit(limit + offset); } childSortNode.setOffset(0); */ return mergeFragment; } @Override public PlanFragment visitPhysicalHashJoin(PhysicalHashJoin<Plan, Plan> hashJoin, PlanTranslatorContext context) { PlanFragment rightFragment = hashJoin.child(1).accept(this, context); PlanFragment leftFragment = hashJoin.child(0).accept(this, context); PlanNode leftFragmentPlanRoot = leftFragment.getPlanRoot(); PlanNode rightFragmentPlanRoot = rightFragment.getPlanRoot(); JoinType joinType = hashJoin.getJoinType(); if (joinType.equals(JoinType.CROSS_JOIN) || (joinType.equals(JoinType.INNER_JOIN) && !hashJoin.getCondition().isPresent())) { throw new RuntimeException("Physical hash join could not execute without equal join condition."); } else { Expression eqJoinExpression = hashJoin.getCondition().get(); List<Expr> execEqConjunctList = ExpressionUtils.extractConjunction(eqJoinExpression).stream() .map(EqualTo.class::cast) .map(e -> swapEqualToForChildrenOrder(e, hashJoin.left().getOutput())) .map(e -> ExpressionTranslator.translate(e, context)) .collect(Collectors.toList()); TupleDescriptor outputDescriptor = context.generateTupleDesc(); List<Expr> srcToOutput = hashJoin.getOutput().stream() .map(SlotReference.class::cast) .peek(s -> context.createSlotDesc(outputDescriptor, s)) .map(e -> ExpressionTranslator.translate(e, context)) .collect(Collectors.toList()); HashJoinNode hashJoinNode = new HashJoinNode(context.nextPlanNodeId(), leftFragmentPlanRoot, rightFragmentPlanRoot, JoinType.toJoinOperator(joinType), execEqConjunctList, Lists.newArrayList(), srcToOutput, outputDescriptor, outputDescriptor); hashJoinNode.setDistributionMode(DistributionMode.BROADCAST); hashJoinNode.setChild(0, leftFragmentPlanRoot); connectChildFragment(hashJoinNode, 1, leftFragment, rightFragment, context); leftFragment.setPlanRoot(hashJoinNode); return leftFragment; } } @Override public PlanFragment visitPhysicalProject(PhysicalProject<Plan> project, PlanTranslatorContext context) { PlanFragment inputFragment = project.child(0).accept(this, context); List<Expr> execExprList = project.getProjects() .stream() .map(e -> ExpressionTranslator.translate(e, context)) .collect(Collectors.toList()); PlanNode inputPlanNode = inputFragment.getPlanRoot(); List<Expr> predicateList = inputPlanNode.getConjuncts(); Set<Integer> requiredSlotIdList = new HashSet<>(); for (Expr expr : predicateList) { extractExecSlot(expr, requiredSlotIdList); } for (Expr expr : execExprList) { if (expr instanceof SlotRef) { requiredSlotIdList.add(((SlotRef) expr).getDesc().getId().asInt()); } } return inputFragment; } @Override public PlanFragment visitPhysicalFilter(PhysicalFilter<Plan> filter, PlanTranslatorContext context) { PlanFragment inputFragment = filter.child(0).accept(this, context); PlanNode planNode = inputFragment.getPlanRoot(); Expression expression = filter.getPredicates(); List<Expression> expressionList = ExpressionUtils.extractConjunction(expression); expressionList.stream().map(e -> ExpressionTranslator.translate(e, context)).forEach(planNode::addConjunct); return inputFragment; } @Override private void extractExecSlot(Expr root, Set<Integer> slotRefList) { if (root instanceof SlotRef) { slotRefList.add(((SlotRef) root).getDesc().getId().asInt()); return; } for (Expr child : root.getChildren()) { extractExecSlot(child, slotRefList); } } private TupleDescriptor generateTupleDesc(List<Slot> slotList, Table table, PlanTranslatorContext context) { TupleDescriptor tupleDescriptor = context.generateTupleDesc(); tupleDescriptor.setTable(table); for (Slot slot : slotList) { context.createSlotDesc(tupleDescriptor, (SlotReference) slot); } return tupleDescriptor; } private TupleDescriptor generateTupleDesc(List<Slot> slotList, List<OrderKey> orderKeyList, PlanTranslatorContext context, Table table) { TupleDescriptor tupleDescriptor = context.generateTupleDesc(); tupleDescriptor.setTable(table); Set<ExprId> alreadyExists = Sets.newHashSet(); for (OrderKey orderKey : orderKeyList) { if (orderKey.getExpr() instanceof SlotReference) { SlotReference slotReference = (SlotReference) orderKey.getExpr(); if (alreadyExists.contains(slotReference.getExprId())) { continue; } context.createSlotDesc(tupleDescriptor, (SlotReference) orderKey.getExpr()); alreadyExists.add(slotReference.getExprId()); } } for (Slot slot : slotList) { if (alreadyExists.contains(slot.getExprId())) { continue; } context.createSlotDesc(tupleDescriptor, (SlotReference) slot); alreadyExists.add(slot.getExprId()); } return tupleDescriptor; } private PlanFragment createParentFragment(PlanFragment childFragment, DataPartition parentPartition, PlanTranslatorContext context) { ExchangeNode exchangeNode = new ExchangeNode(context.nextPlanNodeId(), childFragment.getPlanRoot(), false); exchangeNode.setNumInstances(childFragment.getPlanRoot().getNumInstances()); PlanFragment parentFragment = new PlanFragment(context.nextFragmentId(), exchangeNode, parentPartition); childFragment.setDestination(exchangeNode); childFragment.setOutputPartition(parentPartition); context.addPlanFragment(parentFragment); return parentFragment; } private void connectChildFragment(PlanNode node, int childIdx, PlanFragment parentFragment, PlanFragment childFragment, PlanTranslatorContext context) { ExchangeNode exchangeNode = new ExchangeNode(context.nextPlanNodeId(), childFragment.getPlanRoot(), false); exchangeNode.setNumInstances(childFragment.getPlanRoot().getNumInstances()); exchangeNode.setFragment(parentFragment); node.setChild(childIdx, exchangeNode); childFragment.setDestination(exchangeNode); } /** * Return unpartitioned fragment that merges the input fragment's output via * an ExchangeNode. * Requires that input fragment be partitioned. */ private PlanFragment exchangeToMergeFragment(PlanFragment inputFragment, PlanTranslatorContext context) { Preconditions.checkState(inputFragment.isPartitioned()); ExchangeNode mergePlan = new ExchangeNode(context.nextPlanNodeId(), inputFragment.getPlanRoot(), false); mergePlan.setNumInstances(inputFragment.getPlanRoot().getNumInstances()); PlanFragment fragment = new PlanFragment(context.nextFragmentId(), mergePlan, DataPartition.UNPARTITIONED); inputFragment.setDestination(mergePlan); context.addPlanFragment(fragment); return fragment; } }
class PhysicalPlanTranslator extends DefaultPlanVisitor<PlanFragment, PlanTranslatorContext> { /** * The left and right child of origin predicates need to be swap sometimes. * Case A: * select * from t1 join t2 on t2.id=t1.id * The left plan node is t1 and the right plan node is t2. * The left child of origin predicate is t2.id and the right child of origin predicate is t1.id. * In this situation, the children of predicate need to be swap => t1.id=t2.id. */ private static Expression swapEqualToForChildrenOrder(EqualTo equalTo, List<Slot> leftOutput) { Set<ExprId> leftSlots = SlotExtractor.extractSlot(equalTo.left()).stream() .map(NamedExpression::getExprId).collect(Collectors.toSet()); if (leftOutput.stream().map(NamedExpression::getExprId).collect(Collectors.toSet()).containsAll(leftSlots)) { return equalTo; } else { return new EqualTo(equalTo.right(), equalTo.left()); } } /** * Translate Nereids Physical Plan tree to Stale Planner PlanFragment tree. * * @param physicalPlan Nereids Physical Plan tree * @param context context to help translate * @return Stale Planner PlanFragment tree */ public PlanFragment translatePlan(PhysicalPlan physicalPlan, PlanTranslatorContext context) { PlanFragment rootFragment = physicalPlan.accept(this, context); if (rootFragment.isPartitioned() && rootFragment.getPlanRoot().getNumInstances() > 1) { rootFragment = exchangeToMergeFragment(rootFragment, context); } if (physicalPlan.getType() == PlanType.PHYSICAL_PROJECT) { PhysicalProject<Plan> physicalProject = (PhysicalProject<Plan>) physicalPlan; List<Expr> outputExprs = physicalProject.getProjects().stream() .map(e -> ExpressionTranslator.translate(e, context)) .collect(Collectors.toList()); rootFragment.setOutputExprs(outputExprs); } else { List<Expr> outputExprs = Lists.newArrayList(); physicalPlan.getOutput().stream().map(Slot::getExprId) .forEach(exprId -> outputExprs.add(context.findSlotRef(exprId))); rootFragment.setOutputExprs(outputExprs); } rootFragment.getPlanRoot().convertToVectoriezd(); for (PlanFragment fragment : context.getPlanFragmentList()) { fragment.finalize(null); } Collections.reverse(context.getPlanFragmentList()); return rootFragment; } /** * Translate Agg. * todo: support DISTINCT */ @Override public PlanFragment visitPhysicalAggregate(PhysicalAggregate<Plan> aggregate, PlanTranslatorContext context) { PlanFragment inputPlanFragment = aggregate.child(0).accept(this, context); List<Expression> groupByExpressionList = aggregate.getGroupByExpressions(); List<NamedExpression> outputExpressionList = aggregate.getOutputExpressions(); List<SlotReference> groupSlotList = Lists.newArrayList(); for (Expression e : groupByExpressionList) { if (e instanceof SlotReference && outputExpressionList.stream().anyMatch(o -> o.anyMatch(e::equals))) { groupSlotList.add((SlotReference) e); } else { groupSlotList.add(new SlotReference(e.toSql(), e.getDataType(), e.nullable(), Collections.emptyList())); } } ArrayList<Expr> execGroupingExpressions = groupByExpressionList.stream() .map(e -> ExpressionTranslator.translate(e, context)).collect(Collectors.toCollection(ArrayList::new)); List<Slot> aggFunctionOutput = Lists.newArrayList(); List<AggregateFunction> aggregateFunctionList = outputExpressionList.stream() .filter(o -> o.anyMatch(AggregateFunction.class::isInstance)) .peek(o -> aggFunctionOutput.add(o.toSlot())) .map(o -> o.<List<AggregateFunction>>collect(AggregateFunction.class::isInstance)) .flatMap(List::stream) .collect(Collectors.toList()); ArrayList<FunctionCallExpr> execAggregateFunctions = aggregateFunctionList.stream() .map(x -> (FunctionCallExpr) ExpressionTranslator.translate(x, context)) .collect(Collectors.toCollection(ArrayList::new)); List<Slot> slotList = Lists.newArrayList(); TupleDescriptor outputTupleDesc; if (aggregate.getAggPhase() == AggPhase.GLOBAL) { slotList.addAll(groupSlotList); slotList.addAll(aggFunctionOutput); outputTupleDesc = generateTupleDesc(slotList, null, context); } else { outputTupleDesc = generateTupleDesc(aggregate.getOutput(), null, context); } List<Expression> partitionExpressionList = aggregate.getPartitionExpressions(); List<Expr> execPartitionExpressions = partitionExpressionList.stream() .map(e -> ExpressionTranslator.translate(e, context)).collect(Collectors.toList()); DataPartition mergePartition = DataPartition.UNPARTITIONED; if (CollectionUtils.isNotEmpty(execPartitionExpressions)) { mergePartition = DataPartition.hashPartitioned(execGroupingExpressions); } if (aggregate.getAggPhase() == AggPhase.GLOBAL) { for (FunctionCallExpr execAggregateFunction : execAggregateFunctions) { execAggregateFunction.setMergeForNereids(true); } } AggregateInfo aggInfo = AggregateInfo.create(execGroupingExpressions, execAggregateFunctions, outputTupleDesc, outputTupleDesc, aggregate.getAggPhase().toExec()); AggregationNode aggregationNode = new AggregationNode(context.nextPlanNodeId(), inputPlanFragment.getPlanRoot(), aggInfo); inputPlanFragment.setPlanRoot(aggregationNode); switch (aggregate.getAggPhase()) { case LOCAL: aggregationNode.unsetNeedsFinalize(); aggregationNode.setUseStreamingPreagg(aggregate.isUsingStream()); aggregationNode.setIntermediateTuple(); return createParentFragment(inputPlanFragment, mergePartition, context); case GLOBAL: inputPlanFragment.updateDataPartition(mergePartition); return inputPlanFragment; default: throw new RuntimeException("Unsupported yet"); } } @Override public PlanFragment visitPhysicalOlapScan(PhysicalOlapScan olapScan, PlanTranslatorContext context) { List<Slot> slotList = olapScan.getOutput(); OlapTable olapTable = olapScan.getTable(); List<Expr> execConjunctsList = olapScan .getExpressions() .stream() .map(e -> ExpressionTranslator.translate(e, context)).collect(Collectors.toList()); TupleDescriptor tupleDescriptor = generateTupleDesc(slotList, olapTable, context); tupleDescriptor.setTable(olapTable); OlapScanNode olapScanNode = new OlapScanNode(context.nextPlanNodeId(), tupleDescriptor, olapTable.getName()); TableName tableName = new TableName(null, "", ""); TableRef ref = new TableRef(tableName, null, null); BaseTableRef tableRef = new BaseTableRef(ref, olapTable, tableName); tupleDescriptor.setRef(tableRef); olapScanNode.setSelectedPartitionIds(olapScan.getSelectedPartitionId()); try { olapScanNode.updateScanRangeInfoByNewMVSelector(olapScan.getSelectedIndexId(), false, ""); } catch (Exception e) { throw new AnalysisException(e.getMessage()); } Utils.execWithUncheckedException(olapScanNode::init); olapScanNode.addConjuncts(execConjunctsList); context.addScanNode(olapScanNode); PlanFragment planFragment = new PlanFragment(context.nextFragmentId(), olapScanNode, DataPartition.RANDOM); context.addPlanFragment(planFragment); return planFragment; } /** * Physical sort: * 1. Build sortInfo * There are two types of slotRef: * one is generated by the previous node, collectively called old. * the other is newly generated by the sort node, collectively called new. * Filling of sortInfo related data structures, * a. ordering use newSlotRef. * b. sortTupleSlotExprs use oldSlotRef. * 2. Create sortNode * 3. Create mergeFragment * TODO: When the slotRef of sort is currently generated, * it will be based on the expression in select and orderBy expression in to ensure the uniqueness of slotRef. * But eg: * select a+1 from table order by a+1; * the expressions of the two are inconsistent. * The former will perform an additional Alias. * Currently we cannot test whether this will have any effect. * After a+1 can be parsed , reprocessing. */ @Override public PlanFragment visitLogicalSort(LogicalSort<Plan> sort, PlanTranslatorContext context) { return super.visitLogicalSort(sort, context); } @Override public PlanFragment visitPhysicalHeapSort(PhysicalHeapSort<Plan> sort, PlanTranslatorContext context) { PlanFragment childFragment = sort.child(0).accept(this, context); List<Expr> oldOrderingExprList = Lists.newArrayList(); List<Boolean> ascOrderList = Lists.newArrayList(); List<Boolean> nullsFirstParamList = Lists.newArrayList(); List<OrderKey> orderKeyList = sort.getOrderKeys(); orderKeyList.forEach(k -> { oldOrderingExprList.add(ExpressionTranslator.translate(k.getExpr(), context)); ascOrderList.add(k.isAsc()); nullsFirstParamList.add(k.isNullFirst()); }); List<Expr> sortTupleOutputList = new ArrayList<>(); List<Slot> outputList = sort.getOutput(); outputList.forEach(k -> { sortTupleOutputList.add(ExpressionTranslator.translate(k, context)); }); TupleDescriptor tupleDesc = generateTupleDesc(outputList, orderKeyList, context, null); List<Expr> newOrderingExprList = Lists.newArrayList(); orderKeyList.forEach(k -> { newOrderingExprList.add(ExpressionTranslator.translate(k.getExpr(), context)); }); SortInfo sortInfo = new SortInfo(newOrderingExprList, ascOrderList, nullsFirstParamList, tupleDesc); PlanNode childNode = childFragment.getPlanRoot(); SortNode sortNode = new SortNode(context.nextPlanNodeId(), childNode, sortInfo, true); sortNode.finalizeForNereids(tupleDesc, sortTupleOutputList, oldOrderingExprList); childFragment.addPlanRoot(sortNode); if (!childFragment.isPartitioned()) { return childFragment; } PlanFragment mergeFragment = createParentFragment(childFragment, DataPartition.UNPARTITIONED, context); ExchangeNode exchangeNode = (ExchangeNode) mergeFragment.getPlanRoot(); exchangeNode.setMergeInfo(sortNode.getSortInfo()); return mergeFragment; } @Override public PlanFragment visitPhysicalHashJoin(PhysicalHashJoin<Plan, Plan> hashJoin, PlanTranslatorContext context) { PlanFragment rightFragment = hashJoin.child(1).accept(this, context); PlanFragment leftFragment = hashJoin.child(0).accept(this, context); PlanNode leftFragmentPlanRoot = leftFragment.getPlanRoot(); PlanNode rightFragmentPlanRoot = rightFragment.getPlanRoot(); JoinType joinType = hashJoin.getJoinType(); if (joinType.equals(JoinType.CROSS_JOIN) || (joinType.equals(JoinType.INNER_JOIN) && !hashJoin.getCondition().isPresent())) { throw new RuntimeException("Physical hash join could not execute without equal join condition."); } else { Expression eqJoinExpression = hashJoin.getCondition().get(); List<Expr> execEqConjunctList = ExpressionUtils.extractConjunction(eqJoinExpression).stream() .map(EqualTo.class::cast) .map(e -> swapEqualToForChildrenOrder(e, hashJoin.left().getOutput())) .map(e -> ExpressionTranslator.translate(e, context)) .collect(Collectors.toList()); TupleDescriptor outputDescriptor = context.generateTupleDesc(); List<Expr> srcToOutput = hashJoin.getOutput().stream() .map(SlotReference.class::cast) .peek(s -> context.createSlotDesc(outputDescriptor, s)) .map(e -> ExpressionTranslator.translate(e, context)) .collect(Collectors.toList()); HashJoinNode hashJoinNode = new HashJoinNode(context.nextPlanNodeId(), leftFragmentPlanRoot, rightFragmentPlanRoot, JoinType.toJoinOperator(joinType), execEqConjunctList, Lists.newArrayList(), srcToOutput, outputDescriptor, outputDescriptor); hashJoinNode.setDistributionMode(DistributionMode.BROADCAST); hashJoinNode.setChild(0, leftFragmentPlanRoot); connectChildFragment(hashJoinNode, 1, leftFragment, rightFragment, context); leftFragment.setPlanRoot(hashJoinNode); return leftFragment; } } @Override public PlanFragment visitPhysicalProject(PhysicalProject<Plan> project, PlanTranslatorContext context) { PlanFragment inputFragment = project.child(0).accept(this, context); List<Expr> execExprList = project.getProjects() .stream() .map(e -> ExpressionTranslator.translate(e, context)) .collect(Collectors.toList()); PlanNode inputPlanNode = inputFragment.getPlanRoot(); List<Expr> predicateList = inputPlanNode.getConjuncts(); Set<Integer> requiredSlotIdList = new HashSet<>(); for (Expr expr : predicateList) { extractExecSlot(expr, requiredSlotIdList); } for (Expr expr : execExprList) { if (expr instanceof SlotRef) { requiredSlotIdList.add(((SlotRef) expr).getDesc().getId().asInt()); } } return inputFragment; } @Override public PlanFragment visitPhysicalFilter(PhysicalFilter<Plan> filter, PlanTranslatorContext context) { PlanFragment inputFragment = filter.child(0).accept(this, context); PlanNode planNode = inputFragment.getPlanRoot(); Expression expression = filter.getPredicates(); List<Expression> expressionList = ExpressionUtils.extractConjunction(expression); expressionList.stream().map(e -> ExpressionTranslator.translate(e, context)).forEach(planNode::addConjunct); return inputFragment; } @Override private void extractExecSlot(Expr root, Set<Integer> slotRefList) { if (root instanceof SlotRef) { slotRefList.add(((SlotRef) root).getDesc().getId().asInt()); return; } for (Expr child : root.getChildren()) { extractExecSlot(child, slotRefList); } } private TupleDescriptor generateTupleDesc(List<Slot> slotList, Table table, PlanTranslatorContext context) { TupleDescriptor tupleDescriptor = context.generateTupleDesc(); tupleDescriptor.setTable(table); for (Slot slot : slotList) { context.createSlotDesc(tupleDescriptor, (SlotReference) slot); } return tupleDescriptor; } private TupleDescriptor generateTupleDesc(List<Slot> slotList, List<OrderKey> orderKeyList, PlanTranslatorContext context, Table table) { TupleDescriptor tupleDescriptor = context.generateTupleDesc(); tupleDescriptor.setTable(table); Set<ExprId> alreadyExists = Sets.newHashSet(); for (OrderKey orderKey : orderKeyList) { if (orderKey.getExpr() instanceof SlotReference) { SlotReference slotReference = (SlotReference) orderKey.getExpr(); if (alreadyExists.contains(slotReference.getExprId())) { continue; } context.createSlotDesc(tupleDescriptor, (SlotReference) orderKey.getExpr()); alreadyExists.add(slotReference.getExprId()); } } for (Slot slot : slotList) { if (alreadyExists.contains(slot.getExprId())) { continue; } context.createSlotDesc(tupleDescriptor, (SlotReference) slot); alreadyExists.add(slot.getExprId()); } return tupleDescriptor; } private PlanFragment createParentFragment(PlanFragment childFragment, DataPartition parentPartition, PlanTranslatorContext context) { ExchangeNode exchangeNode = new ExchangeNode(context.nextPlanNodeId(), childFragment.getPlanRoot(), false); exchangeNode.setNumInstances(childFragment.getPlanRoot().getNumInstances()); PlanFragment parentFragment = new PlanFragment(context.nextFragmentId(), exchangeNode, parentPartition); childFragment.setDestination(exchangeNode); childFragment.setOutputPartition(parentPartition); context.addPlanFragment(parentFragment); return parentFragment; } private void connectChildFragment(PlanNode node, int childIdx, PlanFragment parentFragment, PlanFragment childFragment, PlanTranslatorContext context) { ExchangeNode exchangeNode = new ExchangeNode(context.nextPlanNodeId(), childFragment.getPlanRoot(), false); exchangeNode.setNumInstances(childFragment.getPlanRoot().getNumInstances()); exchangeNode.setFragment(parentFragment); node.setChild(childIdx, exchangeNode); childFragment.setDestination(exchangeNode); } /** * Return unpartitioned fragment that merges the input fragment's output via * an ExchangeNode. * Requires that input fragment be partitioned. */ private PlanFragment exchangeToMergeFragment(PlanFragment inputFragment, PlanTranslatorContext context) { Preconditions.checkState(inputFragment.isPartitioned()); ExchangeNode mergePlan = new ExchangeNode(context.nextPlanNodeId(), inputFragment.getPlanRoot(), false); mergePlan.setNumInstances(inputFragment.getPlanRoot().getNumInstances()); PlanFragment fragment = new PlanFragment(context.nextFragmentId(), mergePlan, DataPartition.UNPARTITIONED); inputFragment.setDestination(mergePlan); context.addPlanFragment(fragment); return fragment; } }
Use () -> new RuntimeException(...). The reason is that your approach captures the stack trace at assembly time, not really useful.
public Iterable<ConfigSource> getConfigSources(ClassLoader cl) { Map<String, ValueType> keys = config.keysAsMap(); if (keys.isEmpty()) { log.debug("No keys were configured for config source lookup"); return Collections.emptyList(); } List<ConfigSource> result = new ArrayList<>(keys.size()); List<Uni<?>> allUnis = new ArrayList<>(); for (Map.Entry<String, ValueType> entry : keys.entrySet()) { String fullKey = config.prefix.isPresent() ? config.prefix.get() + "/" + entry.getKey() : entry.getKey(); allUnis.add(consulConfigGateway.getValue(fullKey).chain(new Function<Response, Uni<?>>() { @Override public Uni<?> apply(Response response) { if (response != null) { result.add( responseConfigSourceUtil.toConfigSource(response, entry.getValue(), config.prefix)); } else { String message = "Key '" + fullKey + "' not found in Consul."; if (config.failOnMissingKey) { return Uni.createFrom().failure(new RuntimeException(message)); } else { log.info(message); return Uni.createFrom().nullItem(); } } return Uni.createFrom().nullItem(); } })); } try { Uni.combine().all().unis(allUnis).discardItems().await() .atMost(config.agent.connectionTimeout.plus(config.agent.readTimeout.multipliedBy(2))); } catch (CompletionException e) { throw new RuntimeException("An error occurred while attempting to fetch configuration from Consul.", e); } finally { consulConfigGateway.close(); } return result; }
return Uni.createFrom().failure(new RuntimeException(message));
public Iterable<ConfigSource> getConfigSources(ClassLoader cl) { Map<String, ValueType> keys = config.keysAsMap(); if (keys.isEmpty()) { log.debug("No keys were configured for config source lookup"); return Collections.emptyList(); } List<ConfigSource> result = new ArrayList<>(keys.size()); List<Uni<?>> allUnis = new ArrayList<>(); for (Map.Entry<String, ValueType> entry : keys.entrySet()) { String fullKey = config.prefix.isPresent() ? config.prefix.get() + "/" + entry.getKey() : entry.getKey(); allUnis.add(consulConfigGateway.getValue(fullKey).invoke(new Consumer<Response>() { @Override public void accept(Response response) { if (response != null) { result.add( responseConfigSourceUtil.toConfigSource(response, entry.getValue(), config.prefix)); } else { String message = "Key '" + fullKey + "' not found in Consul."; if (config.failOnMissingKey) { throw new RuntimeException(message); } else { log.info(message); } } } })); } try { Uni.combine().all().unis(allUnis).discardItems().await() .atMost(config.agent.connectionTimeout.plus(config.agent.readTimeout.multipliedBy(2))); } catch (CompletionException e) { throw new RuntimeException("An error occurred while attempting to fetch configuration from Consul.", e); } finally { consulConfigGateway.close(); } return result; }
class ConsulConfigSourceProvider implements ConfigSourceProvider { private static final Logger log = Logger.getLogger(ConsulConfigSourceProvider.class); private final ConsulConfig config; private final ConsulConfigGateway consulConfigGateway; private final ResponseConfigSourceUtil responseConfigSourceUtil; public ConsulConfigSourceProvider(ConsulConfig config) { this(config, new VertxConsulConfigGateway(config), new ResponseConfigSourceUtil()); } ConsulConfigSourceProvider(ConsulConfig config, ConsulConfigGateway consulConfigGateway) { this(config, consulConfigGateway, new ResponseConfigSourceUtil()); } private ConsulConfigSourceProvider(ConsulConfig config, ConsulConfigGateway consulConfigGateway, ResponseConfigSourceUtil responseConfigSourceUtil) { this.config = config; this.consulConfigGateway = consulConfigGateway; this.responseConfigSourceUtil = responseConfigSourceUtil; } @Override }
class ConsulConfigSourceProvider implements ConfigSourceProvider { private static final Logger log = Logger.getLogger(ConsulConfigSourceProvider.class); private final ConsulConfig config; private final ConsulConfigGateway consulConfigGateway; private final ResponseConfigSourceUtil responseConfigSourceUtil; public ConsulConfigSourceProvider(ConsulConfig config) { this(config, new VertxConsulConfigGateway(config), new ResponseConfigSourceUtil()); } ConsulConfigSourceProvider(ConsulConfig config, ConsulConfigGateway consulConfigGateway) { this(config, consulConfigGateway, new ResponseConfigSourceUtil()); } private ConsulConfigSourceProvider(ConsulConfig config, ConsulConfigGateway consulConfigGateway, ResponseConfigSourceUtil responseConfigSourceUtil) { this.config = config; this.consulConfigGateway = consulConfigGateway; this.responseConfigSourceUtil = responseConfigSourceUtil; } @Override }
but the user can only pass binaryData, right? so it is already serialized.
public CloudEvent(String source, String type, BinaryData data, CloudEventDataFormat format, String dataContentType) { if (Objects.isNull(source)) { throw LOGGER.logExceptionAsError(new NullPointerException("'source' cannot be null.")); } if (Objects.isNull(type)) { throw LOGGER.logExceptionAsError(new NullPointerException("'type' cannot be null.")); } if (Objects.isNull(data)) { throw LOGGER.logExceptionAsError(new NullPointerException("'data' cannot be null.")); } if (Objects.isNull(format)) { throw LOGGER.logExceptionAsError(new NullPointerException("'format' cannot be null.")); } this.source = source; this.type = type; if (CloudEventDataFormat.BYTES == format) { this.dataBase64 = Base64.getEncoder().encodeToString(data.toBytes()); } else { this.data = data.toString(); } this.dataContentType = dataContentType; this.id = UUID.randomUUID().toString(); this.specVersion = CloudEvent.SPEC_VERSION; }
this.data = data.toString();
public CloudEvent(String source, String type, BinaryData data, CloudEventDataFormat format, String dataContentType) { Objects.requireNonNull(source, "'source' cannot be null."); Objects.requireNonNull(type, "'type' cannot be null."); this.source = source; this.type = type; if (data != null) { Objects.requireNonNull(format, "'format' cannot be null when 'data' isn't null."); if (CloudEventDataFormat.BYTES == format) { this.dataBase64 = Base64.getEncoder().encodeToString(data.toBytes()); } else { try { this.data = BINARY_DATA_OBJECT_MAPPER.readTree(data.toBytes()); } catch (IOException e) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'data' isn't in valid Json format", e)); } } } this.dataContentType = dataContentType; this.id = UUID.randomUUID().toString(); this.specVersion = CloudEvent.SPEC_VERSION; this.binaryData = data; }
class CloudEvent { private static final String SPEC_VERSION = "1.0"; private static final JsonSerializer SERIALIZER; static { JsonSerializer tmp; try { tmp = JsonSerializerProviders.createInstance(); } catch (IllegalStateException e) { tmp = new JacksonSerializer(); } SERIALIZER = tmp; } private static final ClientLogger LOGGER = new ClientLogger(CloudEvent.class); private static final Set<String> RESERVED_ATTRIBUTE_NAMES = new HashSet<>(Arrays.asList( "specversion", "id", "source", "type", "datacontenttype", "dataschema", "subject", "time", "data" )); /* * An identifier for the event. The combination of id and source must be * unique for each distinct event. */ @JsonProperty(value = "id", required = true) private String id; /* * Identifies the context in which an event happened. The combination of id * and source must be unique for each distinct event. */ @JsonProperty(value = "source", required = true) private String source; /* * Event data specific to the event type. */ @JsonProperty(value = "data") private Object data; /* * Event data specific to the event type, encoded as a base64 string. */ @JsonProperty(value = "data_base64") private String dataBase64; /* * Type of event related to the originating occurrence. */ @JsonProperty(value = "type", required = true) private String type; /* * The time (in UTC) the event was generated, in RFC3339 format. */ @JsonProperty(value = "time") private OffsetDateTime time; /* * The version of the CloudEvents specification which the event uses. */ @JsonProperty(value = "specversion", required = true) private String specVersion; /* * Identifies the schema that data adheres to. */ @JsonProperty(value = "dataschema") private String dataSchema; /* * Content type of data value. */ @JsonProperty(value = "datacontenttype") private String dataContentType; /* * This describes the subject of the event in the context of the event * producer (identified by source). */ @JsonProperty(value = "subject") private String subject; @JsonIgnore private Map<String, Object> extensionAttributes; /* * Cache serialized data for getData() */ @JsonIgnore private BinaryData binaryData; /** * * @param source Identifies the context in which an event happened. The combination of id and source must be unique * for each distinct event. * @param type Type of event related to the originating occurrence. * @param data A {@link BinaryData} that wraps the original data, which can be a String, byte[], or model class. * @param format Set to {@link CloudEventDataFormat * {@link CloudEventDataFormat * @param dataContentType The content type of the data. It has no impact on how the data is serialized but tells * the event subscriber how to use the data. Typically the value is of MIME types such as * "application/json", "text/plain", "text/xml", "application/+avro", etc. It can be null. * @throws NullPointerException if source, type, data, or format is null. */ private CloudEvent() { } /** * Deserialize a list of {@link CloudEvent CloudEvents} from a JSON string and validate whether any CloudEvents have * null id', 'source', or 'type'. If you want to skip this validation, use {@link * @param cloudEventsJson the JSON payload containing one or more events. * * @return all of the events in the payload deserialized as {@link CloudEvent CloudEvents}. * @throws NullPointerException if cloudEventsJson is null. * @throws IllegalArgumentException if the input parameter isn't a correct JSON string for a cloud event * or an array of it, or any deserialized CloudEvents have null 'id', 'source', or 'type'. */ public static List<CloudEvent> fromString(String cloudEventsJson) { return fromString(cloudEventsJson, false); } /** * Deserialize a list of {@link CloudEvent CloudEvents} from a JSON string. * @param cloudEventsJson the JSON payload containing one or more events. * @param skipValidation set to true if you'd like to skip the validation for the deserialized CloudEvents. A valid * CloudEvent should have 'id', 'source' and 'type' not null. * * @return all of the events in the payload deserialized as {@link CloudEvent CloudEvents}. * @throws NullPointerException if cloudEventsJson is null. * @throws IllegalArgumentException if the input parameter isn't a JSON string for a cloud event or an array of it, * or skipValidation is false and any CloudEvents have null id', 'source', or 'type'. */ public static List<CloudEvent> fromString(String cloudEventsJson, boolean skipValidation) { if (cloudEventsJson == null) { throw LOGGER.logExceptionAsError(new NullPointerException("'cloudEventsJson' cannot be null")); } try { List<CloudEvent> events = Arrays.asList(SERIALIZER.deserialize( new ByteArrayInputStream(cloudEventsJson.getBytes(StandardCharsets.UTF_8)), TypeReference.createInstance(CloudEvent[].class))); if (!skipValidation) { for (CloudEvent event : events) { if (event.getId() == null || event.getSource() == null || event.getType() == null) { throw LOGGER.logExceptionAsError(new IllegalArgumentException( "'id', 'source' and 'type' are mandatory attributes for a CloudEvent. " + "Check if the input param is a JSON string for a CloudEvent or an array of it.")); } } } return events; } catch (UncheckedIOException uncheckedIOException) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("The input parameter isn't a JSON string.", uncheckedIOException.getCause())); } } /** * Get the id of the cloud event. * @return the id. */ public String getId() { return this.id; } /** * Set a custom id. Note that a random id is already set by default. * @param id the id to set. * * @return the cloud event itself. * @throws NullPointerException if id is null. * @throws IllegalArgumentException if id is empty. */ public CloudEvent setId(String id) { if (Objects.isNull(id)) { throw LOGGER.logExceptionAsError(new NullPointerException("id cannot be null")); } if (id.isEmpty()) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("id cannot be empty")); } this.id = id; return this; } /** * Get the URI source of the event. * @return the source. */ public String getSource() { return this.source; } /** * Get the data associated with this event as a {@link BinaryData}, which has API to deserialize the data into * a String, an Object, or a byte[]. * @return A {@link BinaryData} that wraps the this event's data payload. */ public BinaryData getData() { if (this.binaryData == null) { if (this.data != null) { if (this.data instanceof String) { this.binaryData = BinaryData.fromString((String) this.data); } else if (this.data instanceof byte[]) { this.binaryData = BinaryData.fromBytes((byte[]) this.data); } else { this.binaryData = BinaryData.fromObject(this.data, SERIALIZER); } } else if (this.dataBase64 != null) { this.binaryData = BinaryData.fromString(this.dataBase64); } } return this.binaryData; } /** * Get the type of event, e.g. "Contoso.Items.ItemReceived". * @return the type of the event. */ public String getType() { return this.type; } /** * Get the time associated with the occurrence of the event. * @return the event time, or null if the time is not set. */ public OffsetDateTime getTime() { return this.time; } /** * Set the time associated with the occurrence of the event. * @param time the time to set. * * @return the cloud event itself. */ public CloudEvent setTime(OffsetDateTime time) { this.time = time; return this; } /** * Get the content MIME type that the data is in. A null value indicates that the data is either nonexistent or in the * "application/json" type. Note that "application/json" is still a possible value for this field. * @return the content type the data is in, or null if the data is nonexistent or in "application/json" format. */ public String getDataContentType() { return this.dataContentType; } /** * Get the schema that the data adheres to. * @return a URI of the data schema, or null if it is not set. */ public String getDataSchema() { return this.dataSchema; } /** * Set the schema that the data adheres to. * @param dataSchema a URI identifying the schema of the data. * * @return the cloud event itself. */ public CloudEvent setDataSchema(String dataSchema) { this.dataSchema = dataSchema; return this; } /** * Get the subject associated with this event. * @return the subject, or null if the subject was not set. */ public String getSubject() { return this.subject; } /** * Set the subject of the event. * @param subject the subject to set. * * @return the cloud event itself. */ public CloudEvent setSubject(String subject) { this.subject = subject; return this; } /** * Get a map of the additional user-defined attributes associated with this event. * @return the extension attributes as an unmodifiable map. */ @JsonAnyGetter public Map<String, Object> getExtensionAttributes() { return this.extensionAttributes; } /** * Add/Overwrite a single extension attribute to the cloud event. * @param name the name of the attribute. It must contains only alphanumeric characters and not be be any * CloudEvent reserved attribute names. * @param value the value to associate with the name. * * @return the cloud event itself. * @throws IllegalArgumentException if name format isn't correct. */ @JsonAnySetter public CloudEvent addExtensionAttribute(String name, Object value) { if (Objects.isNull(name)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'name' cannot be null.")); } if (Objects.isNull(value)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'value' cannot be null.")); } if (!validateAttributeName(name)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException( "'name' must have only small-case alphanumeric characters and not be one of the CloudEvent reserved " + "attribute names")); } if (this.extensionAttributes == null) { this.extensionAttributes = new HashMap<>(); } this.extensionAttributes.put(name.toLowerCase(Locale.ENGLISH), value); return this; } /** * Get the spec version. Users don't need to access it because it's always 1.0. * Make it package level to test deserialization. * @return The spec version. */ String getSpecVersion() { return this.specVersion; } /** * Set the spec version. Users don't need to access it because it's always 1.0. * Make it package level to test serialization. * @return the cloud event itself. */ CloudEvent setSpecVersion(String specVersion) { this.specVersion = specVersion; return this; } private static boolean validateAttributeName(String name) { if (RESERVED_ATTRIBUTE_NAMES.contains(name)) { return false; } for (int i = 0; i < name.length(); i++) { char c = name.charAt(i); if (!((c >= 'a' && c <= 'z') || (c >= '0' && c <= '9'))) { return false; } } return true; } static class JacksonSerializer implements JsonSerializer { private final JacksonAdapter jacksonAdapter = new JacksonAdapter(); @Override public <T> T deserialize(InputStream stream, TypeReference<T> typeReference) { try { return jacksonAdapter.deserialize(stream, typeReference.getJavaType(), SerializerEncoding.JSON); } catch (IOException e) { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } @Override public <T> Mono<T> deserializeAsync(InputStream stream, TypeReference<T> typeReference) { return Mono.defer(() -> Mono.just(deserialize(stream, typeReference))); } @Override public void serialize(OutputStream stream, Object value) { try { jacksonAdapter.serialize(value, SerializerEncoding.JSON, stream); } catch (IOException e) { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } @Override public Mono<Void> serializeAsync(OutputStream stream, Object value) { return Mono.fromRunnable(() -> serialize(stream, value)); } JacksonAdapter getJacksonAdapter() { return jacksonAdapter; } } }
class accepts any String for compatibility with legacy systems. * @param type Type of event related to the originating occurrence. * @param data A {@link BinaryData}
Can you align the output with the above lines?
public void execute() { BuildWorkerParams params = getParameters(); Properties props = buildSystemProperties(); ResolvedDependency appArtifact = params.getAppModel().get().getAppArtifact(); String gav = appArtifact.getGroupId() + ":" + appArtifact.getArtifactId() + ":" + appArtifact.getVersion(); LOGGER.info("Building Quarkus application {}", gav); LOGGER.info(" base name: {}", params.getBaseName().get()); LOGGER.info(" target directory: {}", params.getTargetDirectory().getAsFile().get()); LOGGER.info(" configured package type: {}", props.getProperty(QuarkusPlugin.QUARKUS_PACKAGE_TYPE)); LOGGER.info(" configured output directory: {}", props.getProperty("quarkus.package.output-directory")); LOGGER.info(" configured output name: {}", props.getProperty("quarkus.package.output-name")); LOGGER.info(" Gradle version: {}", params.getGradleVersion().get()); AnalyticsService analyticsService = new AnalyticsService( FileLocationsImpl.INSTANCE, new Log4JMessageWriter(LOGGER)); try (CuratedApplication appCreationContext = createAppCreationContext()) { AugmentAction augmentor = appCreationContext .createAugmentor("io.quarkus.deployment.pkg.builditem.ProcessInheritIODisabled$Factory", Collections.emptyMap()); AugmentResult result = augmentor.createProductionApplication(); if (result == null) { System.err.println("createProductionApplication() returned 'null' AugmentResult"); } else { Map<String, Object> buildInfo = new HashMap<>(result.getGraalVMInfo()); buildInfo.put(GRADLE_VERSION, params.getGradleVersion().get()); analyticsService.sendAnalytics( TrackEventType.BUILD, appCreationContext.getApplicationModel(), buildInfo, params.getTargetDirectory().getAsFile().get()); Path nativeResult = result.getNativeResult(); LOGGER.info("AugmentResult.nativeResult = {}", nativeResult); List<ArtifactResult> results = result.getResults(); if (results == null) { LOGGER.warn("AugmentResult.results = null"); } else { LOGGER.info("AugmentResult.results = {}", results.stream().map(ArtifactResult::getPath) .map(r -> r == null ? "null" : r.toString()).collect(Collectors.joining("\n ", "\n ", ""))); } JarResult jar = result.getJar(); LOGGER.info("AugmentResult:"); if (jar == null) { LOGGER.info(" .jar = null"); } else { LOGGER.info(" .jar.path = {}", jar.getPath()); LOGGER.info(" .jar.libraryDir = {}", jar.getLibraryDir()); LOGGER.info(" .jar.originalArtifact = {}", jar.getOriginalArtifact()); LOGGER.info(" .jar.uberJar = {}", jar.isUberJar()); } } LOGGER.info("Quarkus application build was successful"); } catch (BootstrapException e) { throw new GradleException("Failed to build Quarkus application for " + gav + " due to " + e, e); } finally { analyticsService.cleanup(); } }
LOGGER.info(" Gradle version: {}", params.getGradleVersion().get());
public void execute() { BuildWorkerParams params = getParameters(); Properties props = buildSystemProperties(); ResolvedDependency appArtifact = params.getAppModel().get().getAppArtifact(); String gav = appArtifact.getGroupId() + ":" + appArtifact.getArtifactId() + ":" + appArtifact.getVersion(); LOGGER.info("Building Quarkus application {}", gav); LOGGER.info(" base name: {}", params.getBaseName().get()); LOGGER.info(" target directory: {}", params.getTargetDirectory().getAsFile().get()); LOGGER.info(" configured package type: {}", props.getProperty(QuarkusPlugin.QUARKUS_PACKAGE_TYPE)); LOGGER.info(" configured output directory: {}", props.getProperty("quarkus.package.output-directory")); LOGGER.info(" configured output name: {}", props.getProperty("quarkus.package.output-name")); LOGGER.info(" Gradle version: {}", params.getGradleVersion().get()); try (CuratedApplication appCreationContext = createAppCreationContext(); AnalyticsService analyticsService = new AnalyticsService( FileLocationsImpl.INSTANCE, new Slf4JMessageWriter(LOGGER))) { AugmentAction augmentor = appCreationContext .createAugmentor("io.quarkus.deployment.pkg.builditem.ProcessInheritIODisabled$Factory", Collections.emptyMap()); AugmentResult result = augmentor.createProductionApplication(); if (result == null) { System.err.println("createProductionApplication() returned 'null' AugmentResult"); } else { Map<String, Object> buildInfo = new HashMap<>(result.getGraalVMInfo()); buildInfo.put(GRADLE_VERSION, params.getGradleVersion().get()); analyticsService.sendAnalytics( TrackEventType.BUILD, appCreationContext.getApplicationModel(), buildInfo, params.getTargetDirectory().getAsFile().get()); Path nativeResult = result.getNativeResult(); LOGGER.info("AugmentResult.nativeResult = {}", nativeResult); List<ArtifactResult> results = result.getResults(); if (results == null) { LOGGER.warn("AugmentResult.results = null"); } else { LOGGER.info("AugmentResult.results = {}", results.stream().map(ArtifactResult::getPath) .map(r -> r == null ? "null" : r.toString()).collect(Collectors.joining("\n ", "\n ", ""))); } JarResult jar = result.getJar(); LOGGER.info("AugmentResult:"); if (jar == null) { LOGGER.info(" .jar = null"); } else { LOGGER.info(" .jar.path = {}", jar.getPath()); LOGGER.info(" .jar.libraryDir = {}", jar.getLibraryDir()); LOGGER.info(" .jar.originalArtifact = {}", jar.getOriginalArtifact()); LOGGER.info(" .jar.uberJar = {}", jar.isUberJar()); } } LOGGER.info("Quarkus application build was successful"); } catch (BootstrapException e) { throw new GradleException("Failed to build Quarkus application for " + gav + " due to " + e, e); } }
class BuildWorker extends QuarkusWorker<BuildWorkerParams> { private static final Logger LOGGER = LoggerFactory.getLogger(BuildWorker.class); @Override private static class Log4JMessageWriter implements MessageWriter { private final Logger LOGGER; public Log4JMessageWriter(final Logger logger) { this.LOGGER = logger; } @Override public void info(String msg) { this.LOGGER.info(msg); } @Override public void error(String msg) { this.LOGGER.error(msg); } @Override public boolean isDebugEnabled() { return this.LOGGER.isDebugEnabled(); } @Override public void debug(String msg) { this.LOGGER.debug(msg); } @Override public void warn(String msg) { this.LOGGER.warn(msg); } } }
class BuildWorker extends QuarkusWorker<BuildWorkerParams> { private static final Logger LOGGER = LoggerFactory.getLogger(BuildWorker.class); @Override private static class Slf4JMessageWriter implements MessageWriter { private final Logger LOGGER; public Slf4JMessageWriter(final Logger logger) { this.LOGGER = logger; } @Override public void info(String msg) { this.LOGGER.info(msg); } @Override public void error(String msg) { this.LOGGER.error(msg); } @Override public boolean isDebugEnabled() { return this.LOGGER.isDebugEnabled(); } @Override public void debug(String msg) { this.LOGGER.debug(msg); } @Override public void warn(String msg) { this.LOGGER.warn(msg); } } }
`BuildContext` could only be null in tests. ISE is fine.
public Collection<AnnotationInstance> getAllAnnotations() { if (annotationStore == null) { throw new IllegalStateException( "Attempted to use TransformationContext } return annotationStore.getAnnotations(getTarget()); }
return annotationStore.getAnnotations(getTarget());
public Collection<AnnotationInstance> getAllAnnotations() { if (annotationStore == null) { throw new IllegalStateException( "Attempted to use TransformationContext } return annotationStore.getAnnotations(getTarget()); }
class TransformationContextImpl implements InjectionPointsTransformer.TransformationContext { private AnnotationTarget target; private Set<AnnotationInstance> qualifiers; private AnnotationStore annotationStore; TransformationContextImpl(AnnotationTarget target, Set<AnnotationInstance> qualifiers, AnnotationStore annotationStore) { this.target = target; this.qualifiers = qualifiers; this.annotationStore = annotationStore; } @Override public AnnotationTarget getTarget() { return target; } @Override public Set<AnnotationInstance> getQualifiers() { return qualifiers; } @Override @Override public InjectionPointsTransformer.Transformation transform() { return new InjectionPointsTransformer.Transformation(this); } @Override public <V> V get(BuildExtension.Key<V> key) { return buildContext.get(key); } @Override public <V> V put(BuildExtension.Key<V> key, V value) { return buildContext.put(key, value); } public void setQualifiers(Set<AnnotationInstance> qualifiers) { this.qualifiers = qualifiers; } }
class TransformationContextImpl implements InjectionPointsTransformer.TransformationContext { private AnnotationTarget target; private Set<AnnotationInstance> qualifiers; private AnnotationStore annotationStore; TransformationContextImpl(AnnotationTarget target, Set<AnnotationInstance> qualifiers, AnnotationStore annotationStore) { this.target = target; this.qualifiers = qualifiers; this.annotationStore = annotationStore; } @Override public AnnotationTarget getTarget() { return target; } @Override public Set<AnnotationInstance> getQualifiers() { return qualifiers; } @Override @Override public InjectionPointsTransformer.Transformation transform() { return new InjectionPointsTransformer.Transformation(this); } @Override public <V> V get(BuildExtension.Key<V> key) { return buildContext.get(key); } @Override public <V> V put(BuildExtension.Key<V> key, V value) { return buildContext.put(key, value); } public void setQualifiers(Set<AnnotationInstance> qualifiers) { this.qualifiers = qualifiers; } }
We should use logger.error here, and everywhere else in the tests.
public void staledLeaseAcquiring() { final String ownerFirst = "Owner_First"; final String ownerSecond = "Owner_Second"; final String leasePrefix = "TEST"; ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.Builder() .hostName(ownerFirst) .handleChanges(docs -> { ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst); ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst); }) .feedContainer(createdFeedCollection) .leaseContainer(createdLeaseCollection) .options(new ChangeFeedProcessorOptions() .leasePrefix(leasePrefix) ) .build(); ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.Builder() .hostName(ownerSecond) .handleChanges(docs -> { ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond); for (CosmosItemProperties item : docs) { processItem(item); } ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond); }) .feedContainer(createdFeedCollection) .leaseContainer(createdLeaseCollection) .options(new ChangeFeedProcessorOptions() .leaseRenewInterval(Duration.ofSeconds(10)) .leaseAcquireInterval(Duration.ofSeconds(5)) .leaseExpirationInterval(Duration.ofSeconds(20)) .feedPollDelay(Duration.ofSeconds(2)) .leasePrefix(leasePrefix) .maxItemCount(10) .startFromBeginning(true) .maxScaleCount(0) ) .build(); receivedDocuments = new ConcurrentHashMap<>(); try { changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic()) .timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)) .then(Mono.just(changeFeedProcessorFirst) .delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)) .flatMap( value -> changeFeedProcessorFirst.stop() .subscribeOn(Schedulers.elastic()) .timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)) )) .then(Mono.just(changeFeedProcessorFirst) .delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)) ) .doOnSuccess(aVoid -> { try { Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2); } catch (InterruptedException e) { e.printStackTrace(); } ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first"); SqlParameter param = new SqlParameter(); param.name("@PartitionLeasePrefix"); param.value(leasePrefix); SqlQuerySpec querySpec = new SqlQuerySpec( "SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", new SqlParameterList(param)); FeedOptions feedOptions = new FeedOptions(); feedOptions.enableCrossPartitionQuery(true); createdLeaseCollection.queryItems(querySpec, feedOptions) .delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2)) .flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.results())) .flatMap(doc -> { BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER"); CosmosItemRequestOptions options = new CosmosItemRequestOptions(); options.partitionKey(new PartitionKey(doc.id())); return createdLeaseCollection.getItem(doc.id(), "/id") .replace(doc, options) .map(CosmosItemResponse::properties); }) .map(ServiceItemLease::fromDocument) .map(leaseDocument -> { ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner()); return leaseDocument; }) .last() .delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2)) .flatMap(leaseDocument -> { ChangeFeedProcessorTest.log.info("Start creating documents"); List<CosmosItemProperties> docDefList = new ArrayList<>(); for(int i = 0; i < FEED_COUNT; i++) { docDefList.add(getDocumentDefinition()); } return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT) .last() .delayElement(Duration.ofMillis(1000)) .flatMap(cosmosItemResponse -> { ChangeFeedProcessorTest.log.info("Start second Change feed processor"); return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic()) .timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)); }); }) .subscribe(); }) .subscribe(); } catch (Exception ex) { log.error("First change feed processor did not start in the expected time", ex); } long remainingWork = 40 * CHANGE_FEED_PROCESSOR_TIMEOUT; while (remainingWork > 0 && receivedDocuments.size() < FEED_COUNT) { remainingWork -= 100; try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } } assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue(); changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe(); try { Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT); } catch (InterruptedException e) { e.printStackTrace(); } receivedDocuments.clear(); }
e.printStackTrace();
public void staledLeaseAcquiring() { final String ownerFirst = "Owner_First"; final String ownerSecond = "Owner_Second"; final String leasePrefix = "TEST"; ChangeFeedProcessor changeFeedProcessorFirst = ChangeFeedProcessor.Builder() .hostName(ownerFirst) .handleChanges(docs -> { ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst); ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerFirst); }) .feedContainer(createdFeedCollection) .leaseContainer(createdLeaseCollection) .options(new ChangeFeedProcessorOptions() .leasePrefix(leasePrefix) ) .build(); ChangeFeedProcessor changeFeedProcessorSecond = ChangeFeedProcessor.Builder() .hostName(ownerSecond) .handleChanges(docs -> { ChangeFeedProcessorTest.log.info("START processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond); for (CosmosItemProperties item : docs) { processItem(item); } ChangeFeedProcessorTest.log.info("END processing from thread {} using host {}", Thread.currentThread().getId(), ownerSecond); }) .feedContainer(createdFeedCollection) .leaseContainer(createdLeaseCollection) .options(new ChangeFeedProcessorOptions() .leaseRenewInterval(Duration.ofSeconds(10)) .leaseAcquireInterval(Duration.ofSeconds(5)) .leaseExpirationInterval(Duration.ofSeconds(20)) .feedPollDelay(Duration.ofSeconds(2)) .leasePrefix(leasePrefix) .maxItemCount(10) .startFromBeginning(true) .maxScaleCount(0) ) .build(); receivedDocuments = new ConcurrentHashMap<>(); try { changeFeedProcessorFirst.start().subscribeOn(Schedulers.elastic()) .timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)) .then(Mono.just(changeFeedProcessorFirst) .delayElement(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)) .flatMap( value -> changeFeedProcessorFirst.stop() .subscribeOn(Schedulers.elastic()) .timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)) )) .then(Mono.just(changeFeedProcessorFirst) .delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)) ) .doOnSuccess(aVoid -> { try { Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT / 2); } catch (InterruptedException e) { log.error(e.getMessage()); } ChangeFeedProcessorTest.log.info("Update leases for Change feed processor in thread {} using host {}", Thread.currentThread().getId(), "Owner_first"); SqlParameter param = new SqlParameter(); param.name("@PartitionLeasePrefix"); param.value(leasePrefix); SqlQuerySpec querySpec = new SqlQuerySpec( "SELECT * FROM c WHERE STARTSWITH(c.id, @PartitionLeasePrefix)", new SqlParameterList(param)); FeedOptions feedOptions = new FeedOptions(); feedOptions.enableCrossPartitionQuery(true); createdLeaseCollection.queryItems(querySpec, feedOptions) .delayElements(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2)) .flatMap(documentFeedResponse -> reactor.core.publisher.Flux.fromIterable(documentFeedResponse.results())) .flatMap(doc -> { BridgeInternal.setProperty(doc, "Owner", "TEMP_OWNER"); CosmosItemRequestOptions options = new CosmosItemRequestOptions(); options.partitionKey(new PartitionKey(doc.id())); return createdLeaseCollection.getItem(doc.id(), "/id") .replace(doc, options) .map(CosmosItemResponse::properties); }) .map(ServiceItemLease::fromDocument) .map(leaseDocument -> { ChangeFeedProcessorTest.log.info("QueryItems after Change feed processor processing; found host {}", leaseDocument.getOwner()); return leaseDocument; }) .last() .delayElement(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT / 2)) .flatMap(leaseDocument -> { ChangeFeedProcessorTest.log.info("Start creating documents"); List<CosmosItemProperties> docDefList = new ArrayList<>(); for(int i = 0; i < FEED_COUNT; i++) { docDefList.add(getDocumentDefinition()); } return bulkInsert(createdFeedCollection, docDefList, FEED_COUNT) .last() .delayElement(Duration.ofMillis(1000)) .flatMap(cosmosItemResponse -> { ChangeFeedProcessorTest.log.info("Start second Change feed processor"); return changeFeedProcessorSecond.start().subscribeOn(Schedulers.elastic()) .timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)); }); }) .subscribe(); }) .subscribe(); } catch (Exception ex) { log.error("First change feed processor did not start in the expected time", ex); } long remainingWork = 40 * CHANGE_FEED_PROCESSOR_TIMEOUT; while (remainingWork > 0 && receivedDocuments.size() < FEED_COUNT) { remainingWork -= 100; try { Thread.sleep(100); } catch (InterruptedException e) { log.error(e.getMessage()); } } assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue(); changeFeedProcessorSecond.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe(); try { Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT); } catch (InterruptedException e) { log.error(e.getMessage()); } receivedDocuments.clear(); }
class ChangeFeedProcessorTest extends TestSuiteBase { private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class); private CosmosDatabase createdDatabase; private CosmosContainer createdFeedCollection; private CosmosContainer createdLeaseCollection; private List<CosmosItemProperties> createdDocuments; private static Map<String, CosmosItemProperties> receivedDocuments; private final String hostName = RandomStringUtils.randomAlphabetic(6); private final int FEED_COUNT = 10; private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000; private CosmosClient client; private ChangeFeedProcessor changeFeedProcessor; @Factory(dataProvider = "clientBuilders") public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) { super(clientBuilder); } @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void readFeedDocumentsStartFromBeginning() { setupReadFeedDocuments(); changeFeedProcessor = ChangeFeedProcessor.Builder() .hostName(hostName) .handleChanges(docs -> { ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId()); for (CosmosItemProperties item : docs) { processItem(item); } ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId()); }) .feedContainer(createdFeedCollection) .leaseContainer(createdLeaseCollection) .options(new ChangeFeedProcessorOptions() .leaseRenewInterval(Duration.ofSeconds(20)) .leaseAcquireInterval(Duration.ofSeconds(10)) .leaseExpirationInterval(Duration.ofSeconds(30)) .feedPollDelay(Duration.ofSeconds(2)) .leasePrefix("TEST") .maxItemCount(10) .startFromBeginning(true) .maxScaleCount(0) .discardExistingLeases(true) ) .build(); try { changeFeedProcessor.start().subscribeOn(Schedulers.elastic()) .timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)) .subscribe(); } catch (Exception ex) { log.error("Change feed processor did not start in the expected time", ex); } try { Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT); } catch (InterruptedException e) { e.printStackTrace(); } changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe(); for (CosmosItemProperties item : createdDocuments) { assertThat(receivedDocuments.containsKey(item.id())).as("Document with id: " + item.id()).isTrue(); } try { Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT); } catch (InterruptedException e) { e.printStackTrace(); } receivedDocuments.clear(); } @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void readFeedDocumentsStartFromCustomDate() { ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.Builder() .hostName(hostName) .handleChanges(docs -> { ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId()); for (CosmosItemProperties item : docs) { processItem(item); } ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId()); }) .feedContainer(createdFeedCollection) .leaseContainer(createdLeaseCollection) .options(new ChangeFeedProcessorOptions() .leaseRenewInterval(Duration.ofSeconds(20)) .leaseAcquireInterval(Duration.ofSeconds(10)) .leaseExpirationInterval(Duration.ofSeconds(30)) .feedPollDelay(Duration.ofSeconds(1)) .leasePrefix("TEST") .maxItemCount(10) .startTime(OffsetDateTime.now().minusDays(1)) .minScaleCount(1) .maxScaleCount(3) .discardExistingLeases(true) ) .build(); try { changeFeedProcessor.start().subscribeOn(Schedulers.elastic()) .timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)) .subscribe(); } catch (Exception ex) { log.error("Change feed processor did not start in the expected time", ex); } setupReadFeedDocuments(); long remainingWork = FEED_TIMEOUT; while (remainingWork > 0 && receivedDocuments.size() < FEED_COUNT) { remainingWork -= 100; try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } } assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue(); changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe(); for (CosmosItemProperties item : createdDocuments) { assertThat(receivedDocuments.containsKey(item.id())).as("Document with id: " + item.id()).isTrue(); } try { Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT); } catch (InterruptedException e) { e.printStackTrace(); } receivedDocuments.clear(); } @Test(groups = { "emulator" }, timeOut = 40 * CHANGE_FEED_PROCESSOR_TIMEOUT) @BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true) public void beforeMethod() { createdFeedCollection = createFeedCollection(); createdLeaseCollection = createLeaseCollection(); } @BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true) public void beforeClass() { client = clientBuilder().build(); createdDatabase = getSharedCosmosDatabase(client); } @AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true) public void afterMethod() { safeDeleteCollection(createdFeedCollection); safeDeleteCollection(createdLeaseCollection); try { Thread.sleep(500); } catch (Exception e){ } } @AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true) public void afterClass() { safeClose(client); } private void setupReadFeedDocuments() { receivedDocuments = new ConcurrentHashMap<>(); List<CosmosItemProperties> docDefList = new ArrayList<>(); for(int i = 0; i < FEED_COUNT; i++) { docDefList.add(getDocumentDefinition()); } createdDocuments = bulkInsertBlocking(createdFeedCollection, docDefList); waitIfNeededForReplicasToCatchUp(clientBuilder()); } private CosmosItemProperties getDocumentDefinition() { String uuid = UUID.randomUUID().toString(); CosmosItemProperties doc = new CosmosItemProperties(String.format("{ " + "\"id\": \"%s\", " + "\"mypk\": \"%s\", " + "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]" + "}" , uuid, uuid)); return doc; } private CosmosContainer createFeedCollection() { CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions(); return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, 10100); } private CosmosContainer createLeaseCollection() { CosmosContainerRequestOptions options = new CosmosContainerRequestOptions(); CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(UUID.randomUUID().toString(), "/id"); return createCollection(createdDatabase, collectionDefinition, options, 400); } private static synchronized void processItem(CosmosItemProperties item) { ChangeFeedProcessorTest.log.info("RECEIVED {}", item.toJson(SerializationFormattingPolicy.INDENTED)); receivedDocuments.put(item.id(), item); } }
class ChangeFeedProcessorTest extends TestSuiteBase { private final static Logger log = LoggerFactory.getLogger(ChangeFeedProcessorTest.class); private CosmosDatabase createdDatabase; private CosmosContainer createdFeedCollection; private CosmosContainer createdLeaseCollection; private List<CosmosItemProperties> createdDocuments; private static Map<String, CosmosItemProperties> receivedDocuments; private final String hostName = RandomStringUtils.randomAlphabetic(6); private final int FEED_COUNT = 10; private final int CHANGE_FEED_PROCESSOR_TIMEOUT = 5000; private CosmosClient client; private ChangeFeedProcessor changeFeedProcessor; @Factory(dataProvider = "clientBuilders") public ChangeFeedProcessorTest(CosmosClientBuilder clientBuilder) { super(clientBuilder); } @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void readFeedDocumentsStartFromBeginning() { setupReadFeedDocuments(); changeFeedProcessor = ChangeFeedProcessor.Builder() .hostName(hostName) .handleChanges(docs -> { ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId()); for (CosmosItemProperties item : docs) { processItem(item); } ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId()); }) .feedContainer(createdFeedCollection) .leaseContainer(createdLeaseCollection) .options(new ChangeFeedProcessorOptions() .leaseRenewInterval(Duration.ofSeconds(20)) .leaseAcquireInterval(Duration.ofSeconds(10)) .leaseExpirationInterval(Duration.ofSeconds(30)) .feedPollDelay(Duration.ofSeconds(2)) .leasePrefix("TEST") .maxItemCount(10) .startFromBeginning(true) .maxScaleCount(0) .discardExistingLeases(true) ) .build(); try { changeFeedProcessor.start().subscribeOn(Schedulers.elastic()) .timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)) .subscribe(); } catch (Exception ex) { log.error("Change feed processor did not start in the expected time", ex); } try { Thread.sleep(2 * CHANGE_FEED_PROCESSOR_TIMEOUT); } catch (InterruptedException e) { log.error(e.getMessage()); } changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe(); for (CosmosItemProperties item : createdDocuments) { assertThat(receivedDocuments.containsKey(item.id())).as("Document with id: " + item.id()).isTrue(); } try { Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT); } catch (InterruptedException e) { log.error(e.getMessage()); } receivedDocuments.clear(); } @Test(groups = { "emulator" }, timeOut = TIMEOUT) public void readFeedDocumentsStartFromCustomDate() { ChangeFeedProcessor changeFeedProcessor = ChangeFeedProcessor.Builder() .hostName(hostName) .handleChanges(docs -> { ChangeFeedProcessorTest.log.info("START processing from thread {}", Thread.currentThread().getId()); for (CosmosItemProperties item : docs) { processItem(item); } ChangeFeedProcessorTest.log.info("END processing from thread {}", Thread.currentThread().getId()); }) .feedContainer(createdFeedCollection) .leaseContainer(createdLeaseCollection) .options(new ChangeFeedProcessorOptions() .leaseRenewInterval(Duration.ofSeconds(20)) .leaseAcquireInterval(Duration.ofSeconds(10)) .leaseExpirationInterval(Duration.ofSeconds(30)) .feedPollDelay(Duration.ofSeconds(1)) .leasePrefix("TEST") .maxItemCount(10) .startTime(OffsetDateTime.now().minusDays(1)) .minScaleCount(1) .maxScaleCount(3) .discardExistingLeases(true) ) .build(); try { changeFeedProcessor.start().subscribeOn(Schedulers.elastic()) .timeout(Duration.ofMillis(CHANGE_FEED_PROCESSOR_TIMEOUT)) .subscribe(); } catch (Exception ex) { log.error("Change feed processor did not start in the expected time", ex); } setupReadFeedDocuments(); long remainingWork = FEED_TIMEOUT; while (remainingWork > 0 && receivedDocuments.size() < FEED_COUNT) { remainingWork -= 100; try { Thread.sleep(100); } catch (InterruptedException e) { log.error(e.getMessage()); } } assertThat(remainingWork >= 0).as("Failed to receive all the feed documents").isTrue(); changeFeedProcessor.stop().subscribeOn(Schedulers.elastic()).timeout(Duration.ofMillis(2 * CHANGE_FEED_PROCESSOR_TIMEOUT)).subscribe(); for (CosmosItemProperties item : createdDocuments) { assertThat(receivedDocuments.containsKey(item.id())).as("Document with id: " + item.id()).isTrue(); } try { Thread.sleep(CHANGE_FEED_PROCESSOR_TIMEOUT); } catch (InterruptedException e) { log.error(e.getMessage()); } receivedDocuments.clear(); } @Test(groups = { "emulator" }, timeOut = 40 * CHANGE_FEED_PROCESSOR_TIMEOUT) @BeforeMethod(groups = { "emulator" }, timeOut = 2 * SETUP_TIMEOUT, alwaysRun = true) public void beforeMethod() { createdFeedCollection = createFeedCollection(); createdLeaseCollection = createLeaseCollection(); } @BeforeClass(groups = { "emulator" }, timeOut = SETUP_TIMEOUT, alwaysRun = true) public void beforeClass() { client = clientBuilder().build(); createdDatabase = getSharedCosmosDatabase(client); } @AfterMethod(groups = { "emulator" }, timeOut = 3 * SHUTDOWN_TIMEOUT, alwaysRun = true) public void afterMethod() { safeDeleteCollection(createdFeedCollection); safeDeleteCollection(createdLeaseCollection); try { Thread.sleep(500); } catch (Exception e){ } } @AfterClass(groups = { "emulator" }, timeOut = 2 * SHUTDOWN_TIMEOUT, alwaysRun = true) public void afterClass() { safeClose(client); } private void setupReadFeedDocuments() { receivedDocuments = new ConcurrentHashMap<>(); List<CosmosItemProperties> docDefList = new ArrayList<>(); for(int i = 0; i < FEED_COUNT; i++) { docDefList.add(getDocumentDefinition()); } createdDocuments = bulkInsertBlocking(createdFeedCollection, docDefList); waitIfNeededForReplicasToCatchUp(clientBuilder()); } private CosmosItemProperties getDocumentDefinition() { String uuid = UUID.randomUUID().toString(); CosmosItemProperties doc = new CosmosItemProperties(String.format("{ " + "\"id\": \"%s\", " + "\"mypk\": \"%s\", " + "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]" + "}" , uuid, uuid)); return doc; } private CosmosContainer createFeedCollection() { CosmosContainerRequestOptions optionsFeedCollection = new CosmosContainerRequestOptions(); return createCollection(createdDatabase, getCollectionDefinition(), optionsFeedCollection, 10100); } private CosmosContainer createLeaseCollection() { CosmosContainerRequestOptions options = new CosmosContainerRequestOptions(); CosmosContainerProperties collectionDefinition = new CosmosContainerProperties(UUID.randomUUID().toString(), "/id"); return createCollection(createdDatabase, collectionDefinition, options, 400); } private static synchronized void processItem(CosmosItemProperties item) { ChangeFeedProcessorTest.log.info("RECEIVED {}", item.toJson(SerializationFormattingPolicy.INDENTED)); receivedDocuments.put(item.id(), item); } }
It's better put after `getAuthority`
public void assertLoadEmptyConfiguration() throws IOException { YamlProxyConfiguration actual = ProxyConfigurationLoader.load("/conf/empty/"); YamlProxyServerConfiguration serverConfig = actual.getServerConfiguration(); assertNull(serverConfig.getMode()); assertNull(serverConfig.getAuthority()); assertNull(serverConfig.getLabels()); assertTrue(serverConfig.getProps().isEmpty()); assertTrue(serverConfig.getRules().isEmpty()); assertNull(serverConfig.getCdc()); assertTrue(actual.getDatabaseConfigurations().isEmpty()); }
assertNull(serverConfig.getCdc());
public void assertLoadEmptyConfiguration() throws IOException { YamlProxyConfiguration actual = ProxyConfigurationLoader.load("/conf/empty/"); YamlProxyServerConfiguration serverConfig = actual.getServerConfiguration(); assertNull(serverConfig.getMode()); assertNull(serverConfig.getAuthority()); assertNull(serverConfig.getCdc()); assertNull(serverConfig.getLabels()); assertTrue(serverConfig.getProps().isEmpty()); assertTrue(serverConfig.getRules().isEmpty()); assertTrue(actual.getDatabaseConfigurations().isEmpty()); }
class ProxyConfigurationLoaderTest { @Test @Test public void assertLoad() throws IOException { YamlProxyConfiguration actual = ProxyConfigurationLoader.load("/conf/config_loader/"); Iterator<YamlRuleConfiguration> actualGlobalRules = actual.getServerConfiguration().getRules().iterator(); actualGlobalRules.next(); assertThat(actual.getDatabaseConfigurations().size(), is(3)); assertShardingRuleConfiguration(actual.getDatabaseConfigurations().get("sharding_db")); assertReadwriteSplittingRuleConfiguration(actual.getDatabaseConfigurations().get("readwrite_splitting_db")); assertEncryptRuleConfiguration(actual.getDatabaseConfigurations().get("encrypt_db")); } private void assertShardingRuleConfiguration(final YamlProxyDatabaseConfiguration actual) { assertThat(actual.getDatabaseName(), is("sharding_db")); assertThat(actual.getDataSources().size(), is(2)); assertDataSourceConfiguration(actual.getDataSources().get("ds_0"), "jdbc:mysql: assertDataSourceConfiguration(actual.getDataSources().get("ds_1"), "jdbc:mysql: Optional<YamlShardingRuleConfiguration> shardingRuleConfig = actual.getRules().stream() .filter(each -> each instanceof YamlShardingRuleConfiguration).findFirst().map(each -> (YamlShardingRuleConfiguration) each); assertTrue(shardingRuleConfig.isPresent()); assertShardingRuleConfiguration(shardingRuleConfig.get()); assertFalse(actual.getRules().stream().filter(each -> each instanceof YamlEncryptRuleConfiguration).findFirst().map(each -> (YamlEncryptRuleConfiguration) each).isPresent()); } private void assertShardingRuleConfiguration(final YamlShardingRuleConfiguration actual) { assertThat(actual.getTables().size(), is(1)); assertThat(actual.getTables().get("t_order").getActualDataNodes(), is("ds_${0..1}.t_order_${0..1}")); assertThat(actual.getTables().get("t_order").getDatabaseStrategy().getStandard().getShardingColumn(), is("user_id")); assertThat(actual.getTables().get("t_order").getDatabaseStrategy().getStandard().getShardingAlgorithmName(), is("database_inline")); assertThat(actual.getTables().get("t_order").getTableStrategy().getStandard().getShardingColumn(), is("order_id")); assertThat(actual.getTables().get("t_order").getTableStrategy().getStandard().getShardingAlgorithmName(), is("table_inline")); assertNotNull(actual.getDefaultDatabaseStrategy().getNone()); } private void assertReadwriteSplittingRuleConfiguration(final YamlProxyDatabaseConfiguration actual) { assertThat(actual.getDatabaseName(), is("readwrite_splitting_db")); assertThat(actual.getDataSources().size(), is(3)); assertDataSourceConfiguration(actual.getDataSources().get("write_ds"), "jdbc:mysql: assertDataSourceConfiguration(actual.getDataSources().get("read_ds_0"), "jdbc:mysql: assertDataSourceConfiguration(actual.getDataSources().get("read_ds_1"), "jdbc:mysql: assertFalse(actual.getRules().stream().filter(each -> each instanceof YamlShardingRuleConfiguration).findFirst().map(each -> (YamlShardingRuleConfiguration) each).isPresent()); assertFalse(actual.getRules().stream().filter(each -> each instanceof YamlEncryptRuleConfiguration).findFirst().map(each -> (YamlEncryptRuleConfiguration) each).isPresent()); Optional<YamlReadwriteSplittingRuleConfiguration> ruleConfig = actual.getRules().stream() .filter(each -> each instanceof YamlReadwriteSplittingRuleConfiguration).findFirst().map(each -> (YamlReadwriteSplittingRuleConfiguration) each); assertTrue(ruleConfig.isPresent()); for (YamlReadwriteSplittingDataSourceRuleConfiguration each : ruleConfig.get().getDataSources().values()) { assertReadwriteSplittingRuleConfiguration(each); } } private void assertReadwriteSplittingRuleConfiguration(final YamlReadwriteSplittingDataSourceRuleConfiguration actual) { assertNotNull(actual.getStaticStrategy()); assertThat(actual.getStaticStrategy().getWriteDataSourceName(), is("write_ds")); assertThat(actual.getStaticStrategy().getReadDataSourceNames(), is(Arrays.asList("read_ds_0", "read_ds_1"))); } private void assertEncryptRuleConfiguration(final YamlProxyDatabaseConfiguration actual) { assertThat(actual.getDatabaseName(), is("encrypt_db")); assertThat(actual.getDataSources().size(), is(1)); assertDataSourceConfiguration(actual.getDataSources().get("ds_0"), "jdbc:mysql: assertFalse(actual.getRules().stream() .filter(each -> each instanceof YamlShardingRuleConfiguration).findFirst().map(each -> (YamlShardingRuleConfiguration) each).isPresent()); Optional<YamlEncryptRuleConfiguration> encryptRuleConfig = actual.getRules().stream() .filter(each -> each instanceof YamlEncryptRuleConfiguration).findFirst().map(each -> (YamlEncryptRuleConfiguration) each); assertTrue(encryptRuleConfig.isPresent()); assertEncryptRuleConfiguration(encryptRuleConfig.get()); } private void assertEncryptRuleConfiguration(final YamlEncryptRuleConfiguration actual) { assertThat(actual.getEncryptors().size(), is(2)); assertTrue(actual.getEncryptors().containsKey("aes_encryptor")); assertTrue(actual.getEncryptors().containsKey("md5_encryptor")); YamlAlgorithmConfiguration aesEncryptAlgorithmConfig = actual.getEncryptors().get("aes_encryptor"); assertThat(aesEncryptAlgorithmConfig.getType(), is("AES")); assertThat(aesEncryptAlgorithmConfig.getProps().getProperty("aes-key-value"), is("123456abc")); YamlAlgorithmConfiguration md5EncryptAlgorithmConfig = actual.getEncryptors().get("md5_encryptor"); assertThat(md5EncryptAlgorithmConfig.getType(), is("MD5")); } private void assertDataSourceConfiguration(final YamlProxyDataSourceConfiguration actual, final String expectedURL) { assertThat(actual.getUrl(), is(expectedURL)); assertThat(actual.getUsername(), is("root")); assertNull(actual.getPassword()); assertThat(actual.getConnectionTimeoutMilliseconds(), is(30000L)); assertThat(actual.getIdleTimeoutMilliseconds(), is(60000L)); assertThat(actual.getMaxLifetimeMilliseconds(), is(1800000L)); assertThat(actual.getMaxPoolSize(), is(50)); } }
class ProxyConfigurationLoaderTest { @Test @Test public void assertLoad() throws IOException { YamlProxyConfiguration actual = ProxyConfigurationLoader.load("/conf/config_loader/"); Iterator<YamlRuleConfiguration> actualGlobalRules = actual.getServerConfiguration().getRules().iterator(); actualGlobalRules.next(); assertThat(actual.getDatabaseConfigurations().size(), is(3)); assertShardingRuleConfiguration(actual.getDatabaseConfigurations().get("sharding_db")); assertReadwriteSplittingRuleConfiguration(actual.getDatabaseConfigurations().get("readwrite_splitting_db")); assertEncryptRuleConfiguration(actual.getDatabaseConfigurations().get("encrypt_db")); } private void assertShardingRuleConfiguration(final YamlProxyDatabaseConfiguration actual) { assertThat(actual.getDatabaseName(), is("sharding_db")); assertThat(actual.getDataSources().size(), is(2)); assertDataSourceConfiguration(actual.getDataSources().get("ds_0"), "jdbc:mysql: assertDataSourceConfiguration(actual.getDataSources().get("ds_1"), "jdbc:mysql: Optional<YamlShardingRuleConfiguration> shardingRuleConfig = actual.getRules().stream() .filter(each -> each instanceof YamlShardingRuleConfiguration).findFirst().map(each -> (YamlShardingRuleConfiguration) each); assertTrue(shardingRuleConfig.isPresent()); assertShardingRuleConfiguration(shardingRuleConfig.get()); assertFalse(actual.getRules().stream().filter(each -> each instanceof YamlEncryptRuleConfiguration).findFirst().map(each -> (YamlEncryptRuleConfiguration) each).isPresent()); } private void assertShardingRuleConfiguration(final YamlShardingRuleConfiguration actual) { assertThat(actual.getTables().size(), is(1)); assertThat(actual.getTables().get("t_order").getActualDataNodes(), is("ds_${0..1}.t_order_${0..1}")); assertThat(actual.getTables().get("t_order").getDatabaseStrategy().getStandard().getShardingColumn(), is("user_id")); assertThat(actual.getTables().get("t_order").getDatabaseStrategy().getStandard().getShardingAlgorithmName(), is("database_inline")); assertThat(actual.getTables().get("t_order").getTableStrategy().getStandard().getShardingColumn(), is("order_id")); assertThat(actual.getTables().get("t_order").getTableStrategy().getStandard().getShardingAlgorithmName(), is("table_inline")); assertNotNull(actual.getDefaultDatabaseStrategy().getNone()); } private void assertReadwriteSplittingRuleConfiguration(final YamlProxyDatabaseConfiguration actual) { assertThat(actual.getDatabaseName(), is("readwrite_splitting_db")); assertThat(actual.getDataSources().size(), is(3)); assertDataSourceConfiguration(actual.getDataSources().get("write_ds"), "jdbc:mysql: assertDataSourceConfiguration(actual.getDataSources().get("read_ds_0"), "jdbc:mysql: assertDataSourceConfiguration(actual.getDataSources().get("read_ds_1"), "jdbc:mysql: assertFalse(actual.getRules().stream().filter(each -> each instanceof YamlShardingRuleConfiguration).findFirst().map(each -> (YamlShardingRuleConfiguration) each).isPresent()); assertFalse(actual.getRules().stream().filter(each -> each instanceof YamlEncryptRuleConfiguration).findFirst().map(each -> (YamlEncryptRuleConfiguration) each).isPresent()); Optional<YamlReadwriteSplittingRuleConfiguration> ruleConfig = actual.getRules().stream() .filter(each -> each instanceof YamlReadwriteSplittingRuleConfiguration).findFirst().map(each -> (YamlReadwriteSplittingRuleConfiguration) each); assertTrue(ruleConfig.isPresent()); for (YamlReadwriteSplittingDataSourceRuleConfiguration each : ruleConfig.get().getDataSources().values()) { assertReadwriteSplittingRuleConfiguration(each); } } private void assertReadwriteSplittingRuleConfiguration(final YamlReadwriteSplittingDataSourceRuleConfiguration actual) { assertNotNull(actual.getStaticStrategy()); assertThat(actual.getStaticStrategy().getWriteDataSourceName(), is("write_ds")); assertThat(actual.getStaticStrategy().getReadDataSourceNames(), is(Arrays.asList("read_ds_0", "read_ds_1"))); } private void assertEncryptRuleConfiguration(final YamlProxyDatabaseConfiguration actual) { assertThat(actual.getDatabaseName(), is("encrypt_db")); assertThat(actual.getDataSources().size(), is(1)); assertDataSourceConfiguration(actual.getDataSources().get("ds_0"), "jdbc:mysql: assertFalse(actual.getRules().stream() .filter(each -> each instanceof YamlShardingRuleConfiguration).findFirst().map(each -> (YamlShardingRuleConfiguration) each).isPresent()); Optional<YamlEncryptRuleConfiguration> encryptRuleConfig = actual.getRules().stream() .filter(each -> each instanceof YamlEncryptRuleConfiguration).findFirst().map(each -> (YamlEncryptRuleConfiguration) each); assertTrue(encryptRuleConfig.isPresent()); assertEncryptRuleConfiguration(encryptRuleConfig.get()); } private void assertEncryptRuleConfiguration(final YamlEncryptRuleConfiguration actual) { assertThat(actual.getEncryptors().size(), is(2)); assertTrue(actual.getEncryptors().containsKey("aes_encryptor")); assertTrue(actual.getEncryptors().containsKey("md5_encryptor")); YamlAlgorithmConfiguration aesEncryptAlgorithmConfig = actual.getEncryptors().get("aes_encryptor"); assertThat(aesEncryptAlgorithmConfig.getType(), is("AES")); assertThat(aesEncryptAlgorithmConfig.getProps().getProperty("aes-key-value"), is("123456abc")); YamlAlgorithmConfiguration md5EncryptAlgorithmConfig = actual.getEncryptors().get("md5_encryptor"); assertThat(md5EncryptAlgorithmConfig.getType(), is("MD5")); } private void assertDataSourceConfiguration(final YamlProxyDataSourceConfiguration actual, final String expectedURL) { assertThat(actual.getUrl(), is(expectedURL)); assertThat(actual.getUsername(), is("root")); assertNull(actual.getPassword()); assertThat(actual.getConnectionTimeoutMilliseconds(), is(30000L)); assertThat(actual.getIdleTimeoutMilliseconds(), is(60000L)); assertThat(actual.getMaxLifetimeMilliseconds(), is(1800000L)); assertThat(actual.getMaxPoolSize(), is(50)); } }
I think there was a typo here maybe this should have been: ``` if (instance.name().equals(MOCKITO_CONFIG) && instance.target().asField().hasAnnotation(DEPRECATED_INJECT_MOCK)) { continue; } ``` As I think that we shouldn't process `@MockConfig` and the deprecated `@InjectMock` twice as it sets the same configs, but we should process the `@MockConfig` if being used in conjunction with the new `@InjectMock` to tweak the mock configuration. Since now using `@MockitoConfig` to convert scopes doesn't work =/ Also saw that there is no test for converting scopes, so this it wasn't caught before. Good thing is that there is still the deprecated `@InjectMock` as we can use it do to scope conversion.
public Consumer<BuildChainBuilder> produce(Index testClassesIndex) { return new Consumer<>() { @Override public void accept(BuildChainBuilder buildChainBuilder) { buildChainBuilder.addBuildStep(new BuildStep() { @Override public void execute(BuildContext context) { Set<DotName> mockTypes = new HashSet<>(); List<AnnotationInstance> instances = new ArrayList<>(); instances.addAll(testClassesIndex.getAnnotations(DEPRECATED_INJECT_MOCK)); instances.addAll(testClassesIndex.getAnnotations(INJECT_SPY)); instances.addAll(testClassesIndex.getAnnotations(MOCKITO_CONFIG)); for (AnnotationInstance instance : instances) { if (instance.target().kind() != AnnotationTarget.Kind.FIELD) { continue; } if (instance.name().equals(MOCKITO_CONFIG) && instance.target().asField().hasAnnotation(INJECT_MOCK)) { continue; } AnnotationValue allowScopeConversionValue = instance.value("convertScopes"); if ((allowScopeConversionValue != null) && allowScopeConversionValue.asBoolean()) { mockTypes.add(instance.target().asField().type().name()); } } if (mockTypes.isEmpty()) { return; } CustomScopeAnnotationsBuildItem scopes = context.consume(CustomScopeAnnotationsBuildItem.class); Set<DotName> singletonBeanDefiningAnnotations = new HashSet<>(); for (BeanDefiningAnnotationBuildItem annotation : context .consumeMulti(BeanDefiningAnnotationBuildItem.class)) { if (DotNames.SINGLETON.equals(annotation.getDefaultScope())) { singletonBeanDefiningAnnotations.add(annotation.getName()); } } context.produce(new AnnotationsTransformerBuildItem(new AnnotationsTransformer() { @Override public boolean appliesTo(AnnotationTarget.Kind kind) { return (kind == AnnotationTarget.Kind.CLASS) || (kind == AnnotationTarget.Kind.METHOD); } @Override public void transform(TransformationContext transformationContext) { AnnotationTarget target = transformationContext.getTarget(); if (target.kind() == AnnotationTarget.Kind.CLASS) { ClassInfo classInfo = target.asClass(); if (isMatchingBean(classInfo)) { if (Annotations.contains(transformationContext.getAnnotations(), DotNames.SINGLETON) || hasSingletonBeanDefiningAnnotation(transformationContext)) { replaceSingletonWithApplicationScoped(transformationContext); } } } else if (target.kind() == AnnotationTarget.Kind.METHOD) { MethodInfo methodInfo = target.asMethod(); if ((methodInfo.annotation(DotNames.PRODUCES) != null) && (Annotations.contains(transformationContext.getAnnotations(), DotNames.SINGLETON) || hasSingletonBeanDefiningAnnotation(transformationContext))) { DotName returnType = methodInfo.returnType().name(); if (mockTypes.contains(returnType)) { replaceSingletonWithApplicationScoped(transformationContext); } } } } private void replaceSingletonWithApplicationScoped(TransformationContext transformationContext) { transformationContext.transform().remove(new IsSingletonPredicate()) .add(DotNames.APPLICATION_SCOPED).done(); } private boolean isMatchingBean(ClassInfo classInfo) { if (mockTypes.contains(classInfo.name())) { return true; } if (mockTypes.contains(classInfo.superName())) { return true; } for (DotName iface : classInfo.interfaceNames()) { if (mockTypes.contains(iface)) { return true; } } return false; } private boolean hasSingletonBeanDefiningAnnotation(TransformationContext transformationContext) { if (singletonBeanDefiningAnnotations.isEmpty() || scopes.isScopeIn(transformationContext.getAnnotations())) { return false; } return Annotations.containsAny(transformationContext.getAnnotations(), singletonBeanDefiningAnnotations); } })); } }).produces(AnnotationsTransformerBuildItem.class).consumes(CustomScopeAnnotationsBuildItem.class) .consumes(BeanDefiningAnnotationBuildItem.class).build(); } }; }
if (instance.name().equals(MOCKITO_CONFIG)
public Consumer<BuildChainBuilder> produce(Index testClassesIndex) { return new Consumer<>() { @Override public void accept(BuildChainBuilder buildChainBuilder) { buildChainBuilder.addBuildStep(new BuildStep() { @Override public void execute(BuildContext context) { Set<DotName> mockTypes = new HashSet<>(); List<AnnotationInstance> instances = new ArrayList<>(); instances.addAll(testClassesIndex.getAnnotations(DEPRECATED_INJECT_MOCK)); instances.addAll(testClassesIndex.getAnnotations(INJECT_SPY)); instances.addAll(testClassesIndex.getAnnotations(MOCKITO_CONFIG)); for (AnnotationInstance instance : instances) { if (instance.target().kind() != AnnotationTarget.Kind.FIELD) { continue; } if (instance.name().equals(MOCKITO_CONFIG) && instance.target().asField().hasAnnotation(INJECT_MOCK)) { continue; } AnnotationValue allowScopeConversionValue = instance.value("convertScopes"); if ((allowScopeConversionValue != null) && allowScopeConversionValue.asBoolean()) { mockTypes.add(instance.target().asField().type().name()); } } if (mockTypes.isEmpty()) { return; } CustomScopeAnnotationsBuildItem scopes = context.consume(CustomScopeAnnotationsBuildItem.class); Set<DotName> singletonBeanDefiningAnnotations = new HashSet<>(); for (BeanDefiningAnnotationBuildItem annotation : context .consumeMulti(BeanDefiningAnnotationBuildItem.class)) { if (DotNames.SINGLETON.equals(annotation.getDefaultScope())) { singletonBeanDefiningAnnotations.add(annotation.getName()); } } context.produce(new AnnotationsTransformerBuildItem(new AnnotationsTransformer() { @Override public boolean appliesTo(AnnotationTarget.Kind kind) { return (kind == AnnotationTarget.Kind.CLASS) || (kind == AnnotationTarget.Kind.METHOD); } @Override public void transform(TransformationContext transformationContext) { AnnotationTarget target = transformationContext.getTarget(); if (target.kind() == AnnotationTarget.Kind.CLASS) { ClassInfo classInfo = target.asClass(); if (isMatchingBean(classInfo)) { if (Annotations.contains(transformationContext.getAnnotations(), DotNames.SINGLETON) || hasSingletonBeanDefiningAnnotation(transformationContext)) { replaceSingletonWithApplicationScoped(transformationContext); } } } else if (target.kind() == AnnotationTarget.Kind.METHOD) { MethodInfo methodInfo = target.asMethod(); if ((methodInfo.annotation(DotNames.PRODUCES) != null) && (Annotations.contains(transformationContext.getAnnotations(), DotNames.SINGLETON) || hasSingletonBeanDefiningAnnotation(transformationContext))) { DotName returnType = methodInfo.returnType().name(); if (mockTypes.contains(returnType)) { replaceSingletonWithApplicationScoped(transformationContext); } } } } private void replaceSingletonWithApplicationScoped(TransformationContext transformationContext) { transformationContext.transform().remove(new IsSingletonPredicate()) .add(DotNames.APPLICATION_SCOPED).done(); } private boolean isMatchingBean(ClassInfo classInfo) { if (mockTypes.contains(classInfo.name())) { return true; } if (mockTypes.contains(classInfo.superName())) { return true; } for (DotName iface : classInfo.interfaceNames()) { if (mockTypes.contains(iface)) { return true; } } return false; } private boolean hasSingletonBeanDefiningAnnotation(TransformationContext transformationContext) { if (singletonBeanDefiningAnnotations.isEmpty() || scopes.isScopeIn(transformationContext.getAnnotations())) { return false; } return Annotations.containsAny(transformationContext.getAnnotations(), singletonBeanDefiningAnnotations); } })); } }).produces(AnnotationsTransformerBuildItem.class).consumes(CustomScopeAnnotationsBuildItem.class) .consumes(BeanDefiningAnnotationBuildItem.class).build(); } }; }
class SingletonToApplicationScopedTestBuildChainCustomizerProducer implements TestBuildChainCustomizerProducer { static final DotName INJECT_MOCK = DotName.createSimple(io.quarkus.test.InjectMock.class.getName()); static final DotName DEPRECATED_INJECT_MOCK = DotName.createSimple(InjectMock.class.getName()); static final DotName INJECT_SPY = DotName.createSimple(InjectSpy.class.getName()); static final DotName MOCKITO_CONFIG = DotName.createSimple(MockitoConfig.class.getName()); @Override private static class IsSingletonPredicate implements Predicate<AnnotationInstance> { @Override public boolean test(AnnotationInstance annotationInstance) { return annotationInstance.name().equals(DotNames.SINGLETON); } } }
class SingletonToApplicationScopedTestBuildChainCustomizerProducer implements TestBuildChainCustomizerProducer { static final DotName INJECT_MOCK = DotName.createSimple(io.quarkus.test.InjectMock.class.getName()); static final DotName DEPRECATED_INJECT_MOCK = DotName.createSimple(InjectMock.class.getName()); static final DotName INJECT_SPY = DotName.createSimple(InjectSpy.class.getName()); static final DotName MOCKITO_CONFIG = DotName.createSimple(MockitoConfig.class.getName()); @Override private static class IsSingletonPredicate implements Predicate<AnnotationInstance> { @Override public boolean test(AnnotationInstance annotationInstance) { return annotationInstance.name().equals(DotNames.SINGLETON); } } }
Yep, done. (I originally tried to do this the functional way using streams, but Java doesn't really let you throw checked exceptions from lambdas. 😢 )
public ClassLoader createClassLoader(List<String> inputJarPaths) throws IOException { List<File> localJars = new ArrayList<>(); for (String inputJar : inputJarPaths) { localJars.add(getLocalJar(inputJar)); } List<URL> urls = new ArrayList<>(); for (File file : localJars) { urls.add(file.toURI().toURL()); } return new URLClassLoader(urls.toArray(new URL[0])); }
localJars.add(getLocalJar(inputJar));
public ClassLoader createClassLoader(List<String> inputJarPaths) throws IOException { List<URL> urls = new ArrayList<>(); for (String inputJar : inputJarPaths) { urls.add(getLocalJar(inputJar).toURI().toURL()); } return createUrlClassLoader(urls.toArray(new URL[0])); }
class implementing %s and annotate it with @AutoService(%s.class).%n" + " 2. Add function %s to the class's userDefinedScalarFunctions implementation.", functionFullName, jarPath, UdfProvider.class.getSimpleName(), UdfProvider.class.getSimpleName(), functionFullName)); } return functionDefinitions.scalarFunctions().get(functionPath); } catch (IOException e) { throw new RuntimeException( String.format( "Failed to load user-defined scalar function %s from %s", functionFullName, jarPath), e); }
class implementing %s and annotate it with @AutoService(%s.class).%n" + " 2. Add function %s to the class's userDefinedScalarFunctions implementation.", functionFullName, jarPath, UdfProvider.class.getSimpleName(), UdfProvider.class.getSimpleName(), functionFullName)); } return functionDefinitions.scalarFunctions().get(functionPath); } catch (IOException e) { throw new RuntimeException( String.format( "Failed to load user-defined scalar function %s from %s", functionFullName, jarPath), e); }
Flink generates its MetricGroup from the operator name which contains the step id. I think it is fine to omit the step name from the generated name.
static String getFlinkMetricNameString(MetricResult<?> metricResult) { return METRIC_KEY_SEPARATOR + metricResult.getStep() + METRIC_KEY_SEPARATOR + metricResult.getName().getNamespace() + METRIC_KEY_SEPARATOR + metricResult.getName().getName(); }
return METRIC_KEY_SEPARATOR
static String getFlinkMetricNameString(MetricResult<?> metricResult) { MetricName metricName = metricResult.getName(); return metricName.getNamespace() + METRIC_KEY_SEPARATOR + metricName.getName(); }
class FlinkMetricContainer { public static final String ACCUMULATOR_NAME = "__metricscontainers"; private static final Logger LOG = LoggerFactory.getLogger(FlinkMetricContainer.class); private static final String METRIC_KEY_SEPARATOR = GlobalConfiguration.loadConfiguration().getString(MetricOptions.SCOPE_DELIMITER); private final RuntimeContext runtimeContext; private final Map<String, Counter> flinkCounterCache; private final Map<String, FlinkDistributionGauge> flinkDistributionGaugeCache; private final Map<String, FlinkGauge> flinkGaugeCache; private final MetricsAccumulator metricsAccumulator; public FlinkMetricContainer(RuntimeContext runtimeContext) { this.runtimeContext = runtimeContext; this.flinkCounterCache = new HashMap<>(); this.flinkDistributionGaugeCache = new HashMap<>(); this.flinkGaugeCache = new HashMap<>(); Accumulator<MetricsContainerStepMap, MetricsContainerStepMap> metricsAccumulator = runtimeContext.getAccumulator(ACCUMULATOR_NAME); if (metricsAccumulator == null) { metricsAccumulator = new MetricsAccumulator(); try { runtimeContext.addAccumulator(ACCUMULATOR_NAME, metricsAccumulator); } catch (Exception e) { LOG.error("Failed to create metrics accumulator.", e); } } this.metricsAccumulator = (MetricsAccumulator) metricsAccumulator; } MetricsContainer getMetricsContainer(String stepName) { return metricsAccumulator != null ? metricsAccumulator.getLocalValue().getContainer(stepName) : null; } void updateMetrics(String stepName) { MetricResults metricResults = asAttemptedOnlyMetricResults(metricsAccumulator.getLocalValue()); MetricQueryResults metricQueryResults = metricResults.queryMetrics(MetricsFilter.builder().addStep(stepName).build()); updateCounters(metricQueryResults.getCounters()); updateDistributions(metricQueryResults.getDistributions()); updateGauge(metricQueryResults.getGauges()); } private void updateCounters(Iterable<MetricResult<Long>> counters) { for (MetricResult<Long> metricResult : counters) { String flinkMetricName = getFlinkMetricNameString(metricResult); Long update = metricResult.getAttempted(); Counter counter = flinkCounterCache.computeIfAbsent( flinkMetricName, n -> runtimeContext.getMetricGroup().counter(n)); counter.dec(counter.getCount()); counter.inc(update); } } private void updateDistributions(Iterable<MetricResult<DistributionResult>> distributions) { for (MetricResult<DistributionResult> metricResult : distributions) { String flinkMetricName = getFlinkMetricNameString(metricResult); DistributionResult update = metricResult.getAttempted(); FlinkDistributionGauge gauge = flinkDistributionGaugeCache.get(flinkMetricName); if (gauge == null) { gauge = runtimeContext .getMetricGroup() .gauge(flinkMetricName, new FlinkDistributionGauge(update)); flinkDistributionGaugeCache.put(flinkMetricName, gauge); } else { gauge.update(update); } } } private void updateGauge(Iterable<MetricResult<GaugeResult>> gauges) { for (MetricResult<GaugeResult> metricResult : gauges) { String flinkMetricName = getFlinkMetricNameString(metricResult); GaugeResult update = metricResult.getAttempted(); FlinkGauge gauge = flinkGaugeCache.get(flinkMetricName); if (gauge == null) { gauge = runtimeContext.getMetricGroup().gauge(flinkMetricName, new FlinkGauge(update)); flinkGaugeCache.put(flinkMetricName, gauge); } else { gauge.update(update); } } } @VisibleForTesting /** Flink {@link Gauge} for {@link DistributionResult}. */ public static class FlinkDistributionGauge implements Gauge<DistributionResult> { DistributionResult data; FlinkDistributionGauge(DistributionResult data) { this.data = data; } void update(DistributionResult data) { this.data = data; } @Override public DistributionResult getValue() { return data; } } /** Flink {@link Gauge} for {@link GaugeResult}. */ public static class FlinkGauge implements Gauge<GaugeResult> { GaugeResult data; FlinkGauge(GaugeResult data) { this.data = data; } void update(GaugeResult update) { this.data = update; } @Override public GaugeResult getValue() { return data; } } }
class FlinkMetricContainer { public static final String ACCUMULATOR_NAME = "__metricscontainers"; private static final Logger LOG = LoggerFactory.getLogger(FlinkMetricContainer.class); private static final String METRIC_KEY_SEPARATOR = GlobalConfiguration.loadConfiguration().getString(MetricOptions.SCOPE_DELIMITER); private final RuntimeContext runtimeContext; private final Map<String, Counter> flinkCounterCache; private final Map<String, FlinkDistributionGauge> flinkDistributionGaugeCache; private final Map<String, FlinkGauge> flinkGaugeCache; private final MetricsAccumulator metricsAccumulator; public FlinkMetricContainer(RuntimeContext runtimeContext) { this.runtimeContext = runtimeContext; this.flinkCounterCache = new HashMap<>(); this.flinkDistributionGaugeCache = new HashMap<>(); this.flinkGaugeCache = new HashMap<>(); Accumulator<MetricsContainerStepMap, MetricsContainerStepMap> metricsAccumulator = runtimeContext.getAccumulator(ACCUMULATOR_NAME); if (metricsAccumulator == null) { metricsAccumulator = new MetricsAccumulator(); try { runtimeContext.addAccumulator(ACCUMULATOR_NAME, metricsAccumulator); } catch (Exception e) { LOG.error("Failed to create metrics accumulator.", e); } } this.metricsAccumulator = (MetricsAccumulator) metricsAccumulator; } MetricsContainer getMetricsContainer(String stepName) { return metricsAccumulator != null ? metricsAccumulator.getLocalValue().getContainer(stepName) : null; } void updateMetrics(String stepName) { MetricResults metricResults = asAttemptedOnlyMetricResults(metricsAccumulator.getLocalValue()); MetricQueryResults metricQueryResults = metricResults.queryMetrics(MetricsFilter.builder().addStep(stepName).build()); updateCounters(metricQueryResults.getCounters()); updateDistributions(metricQueryResults.getDistributions()); updateGauge(metricQueryResults.getGauges()); } private void updateCounters(Iterable<MetricResult<Long>> counters) { for (MetricResult<Long> metricResult : counters) { String flinkMetricName = getFlinkMetricNameString(metricResult); Long update = metricResult.getAttempted(); Counter counter = flinkCounterCache.computeIfAbsent( flinkMetricName, n -> runtimeContext.getMetricGroup().counter(n)); counter.dec(counter.getCount()); counter.inc(update); } } private void updateDistributions(Iterable<MetricResult<DistributionResult>> distributions) { for (MetricResult<DistributionResult> metricResult : distributions) { String flinkMetricName = getFlinkMetricNameString(metricResult); DistributionResult update = metricResult.getAttempted(); FlinkDistributionGauge gauge = flinkDistributionGaugeCache.get(flinkMetricName); if (gauge == null) { gauge = runtimeContext .getMetricGroup() .gauge(flinkMetricName, new FlinkDistributionGauge(update)); flinkDistributionGaugeCache.put(flinkMetricName, gauge); } else { gauge.update(update); } } } private void updateGauge(Iterable<MetricResult<GaugeResult>> gauges) { for (MetricResult<GaugeResult> metricResult : gauges) { String flinkMetricName = getFlinkMetricNameString(metricResult); GaugeResult update = metricResult.getAttempted(); FlinkGauge gauge = flinkGaugeCache.get(flinkMetricName); if (gauge == null) { gauge = runtimeContext.getMetricGroup().gauge(flinkMetricName, new FlinkGauge(update)); flinkGaugeCache.put(flinkMetricName, gauge); } else { gauge.update(update); } } } @VisibleForTesting /** Flink {@link Gauge} for {@link DistributionResult}. */ public static class FlinkDistributionGauge implements Gauge<DistributionResult> { DistributionResult data; FlinkDistributionGauge(DistributionResult data) { this.data = data; } void update(DistributionResult data) { this.data = data; } @Override public DistributionResult getValue() { return data; } } /** Flink {@link Gauge} for {@link GaugeResult}. */ public static class FlinkGauge implements Gauge<GaugeResult> { GaugeResult data; FlinkGauge(GaugeResult data) { this.data = data; } void update(GaugeResult update) { this.data = update; } @Override public GaugeResult getValue() { return data; } } }
onNext should always contain an item... also, you haven't set `.verify();` on this, so it is not running this test at all.
public void testClaimOwnership() { List<PartitionOwnership> partitionOwnershipList = new ArrayList<>(); StepVerifier.create(store.claimOwnership(partitionOwnershipList)) .assertNext(partitionOwnership -> { Assertions.assertNull(partitionOwnership); }); }
Assertions.assertNull(partitionOwnership);
public void testClaimOwnership() { List<PartitionOwnership> partitionOwnershipList = new ArrayList<>(); StepVerifier.create(store.claimOwnership(partitionOwnershipList)) .verifyComplete(); }
class JedisRedisCheckpointStoreTests { private JedisPool jedisPool; private JedisRedisCheckpointStore store; private Jedis jedis; private JsonSerializer jsonSerializer; private static final String FULLY_QUALIFIED_NAMESPACE = "fullyQualifiedNamespace"; private static final String EVENT_HUB_NAME = "eventHubName"; private static final String CONSUMER_GROUP = "consumerGroup"; private static final String PARTITION_ID = "1"; private static final String PREFIX = JedisRedisCheckpointStore.prefixBuilder(FULLY_QUALIFIED_NAMESPACE, EVENT_HUB_NAME, CONSUMER_GROUP); private static final String KEY = JedisRedisCheckpointStore.keyBuilder(PREFIX, PARTITION_ID); @BeforeEach public void setup() { jedisPool = mock(JedisPool.class); jedis = mock(Jedis.class); store = new JedisRedisCheckpointStore(jedisPool); jsonSerializer = JsonSerializerProviders.createInstance(true); } @Test public void testListCheckpoints() { Checkpoint checkpoint = new Checkpoint() .setConsumerGroup(CONSUMER_GROUP) .setEventHubName(EVENT_HUB_NAME) .setFullyQualifiedNamespace(FULLY_QUALIFIED_NAMESPACE) .setPartitionId(PARTITION_ID) .setSequenceNumber(1L); Set<String> value = new HashSet<>(); value.add(KEY); byte[] bytes = jsonSerializer.serializeToBytes(checkpoint); List<String> list = Collections.singletonList(new String(bytes, StandardCharsets.UTF_8)); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.smembers(PREFIX)).thenReturn(value); when(jedis.hmget(eq(KEY), eq(JedisRedisCheckpointStore.CHECKPOINT))).thenReturn(list); StepVerifier.create(store.listCheckpoints(FULLY_QUALIFIED_NAMESPACE, EVENT_HUB_NAME, CONSUMER_GROUP)) .assertNext(checkpointTest -> { assertEquals(FULLY_QUALIFIED_NAMESPACE, checkpointTest.getFullyQualifiedNamespace()); assertEquals(EVENT_HUB_NAME, checkpointTest.getEventHubName()); assertEquals(CONSUMER_GROUP, checkpointTest.getConsumerGroup()); }) .verifyComplete(); } @Test public void testListCheckpointsEmptyList() { when(jedisPool.getResource()).thenReturn(jedis); when(jedis.smembers(PREFIX)).thenReturn(new HashSet<>()); StepVerifier.create(store.listCheckpoints(FULLY_QUALIFIED_NAMESPACE, EVENT_HUB_NAME, CONSUMER_GROUP)) .assertNext(checkpointTest -> { Assertions.assertNull(checkpointTest); } ); } @Test public void testCheckpointKeyNotStored() { Set<String> value = new HashSet<>(); value.add(KEY); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.smembers(PREFIX)).thenReturn(value); when(jedis.hmget(eq(KEY), eq(JedisRedisCheckpointStore.CHECKPOINT))).thenReturn(null); StepVerifier.create(store.listCheckpoints(FULLY_QUALIFIED_NAMESPACE, EVENT_HUB_NAME, CONSUMER_GROUP)) .expectError(IllegalStateException.class) .verify(); } @Test public void testListOwnership() { PartitionOwnership partitionOwnership = new PartitionOwnership() .setFullyQualifiedNamespace(FULLY_QUALIFIED_NAMESPACE) .setEventHubName(EVENT_HUB_NAME) .setConsumerGroup(CONSUMER_GROUP) .setPartitionId(PARTITION_ID) .setOwnerId("ownerOne") .setETag("eTag"); Set<String> value = new HashSet<>(); value.add(KEY); byte[] bytes = jsonSerializer.serializeToBytes(partitionOwnership); List<String> list = Collections.singletonList(new String(bytes, StandardCharsets.UTF_8)); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.smembers(PREFIX)).thenReturn(value); when(jedis.hmget(eq(KEY), eq(JedisRedisCheckpointStore.PARTITION_OWNERSHIP))).thenReturn(list); StepVerifier.create(store.listOwnership(FULLY_QUALIFIED_NAMESPACE, EVENT_HUB_NAME, CONSUMER_GROUP)) .assertNext(partitionOwnershipTest -> { assertEquals(FULLY_QUALIFIED_NAMESPACE, partitionOwnershipTest.getFullyQualifiedNamespace()); assertEquals(EVENT_HUB_NAME, partitionOwnershipTest.getEventHubName()); assertEquals(CONSUMER_GROUP, partitionOwnershipTest.getConsumerGroup()); assertEquals("ownerOne", partitionOwnershipTest.getOwnerId()); }) .verifyComplete(); } @Test public void testListOwnershipEmptyList() { when(jedisPool.getResource()).thenReturn(jedis); when(jedis.smembers(PREFIX)).thenReturn(new HashSet<>()); StepVerifier.create(store.listOwnership(FULLY_QUALIFIED_NAMESPACE, EVENT_HUB_NAME, CONSUMER_GROUP)) .assertNext(partitionOwnershipTest -> { Assertions.assertNull(partitionOwnershipTest); } ); } @Test public void testListOwnershipKeyNotStored() { Set<String> value = new HashSet<>(); value.add(KEY); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.smembers(PREFIX)).thenReturn(value); when(jedis.hmget(eq(KEY), eq(JedisRedisCheckpointStore.PARTITION_OWNERSHIP))).thenReturn(null); StepVerifier.create(store.listOwnership(FULLY_QUALIFIED_NAMESPACE, EVENT_HUB_NAME, CONSUMER_GROUP)) .expectError(IllegalStateException.class) .verify(); } @Test @Test public void testUpdateCheckpoint() { Checkpoint checkpoint = new Checkpoint() .setConsumerGroup(CONSUMER_GROUP) .setEventHubName(EVENT_HUB_NAME) .setFullyQualifiedNamespace(FULLY_QUALIFIED_NAMESPACE) .setPartitionId(PARTITION_ID) .setSequenceNumber((long) 1); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.exists(PREFIX)).thenReturn(true); } }
class JedisRedisCheckpointStoreTests { private JedisPool jedisPool; private JedisRedisCheckpointStore store; private Jedis jedis; private JsonSerializer jsonSerializer; private static final String FULLY_QUALIFIED_NAMESPACE = "fullyQualifiedNamespace"; private static final String EVENT_HUB_NAME = "eventHubName"; private static final String CONSUMER_GROUP = "consumerGroup"; private static final String PARTITION_ID = "1"; private static final String PREFIX = JedisRedisCheckpointStore.prefixBuilder(FULLY_QUALIFIED_NAMESPACE, EVENT_HUB_NAME, CONSUMER_GROUP); private static final String KEY = JedisRedisCheckpointStore.keyBuilder(PREFIX, PARTITION_ID); @BeforeEach public void setup() { jedisPool = mock(JedisPool.class); jedis = mock(Jedis.class); store = new JedisRedisCheckpointStore(jedisPool); jsonSerializer = JsonSerializerProviders.createInstance(true); } @Test public void testListCheckpoints() { Checkpoint checkpoint = new Checkpoint() .setConsumerGroup(CONSUMER_GROUP) .setEventHubName(EVENT_HUB_NAME) .setFullyQualifiedNamespace(FULLY_QUALIFIED_NAMESPACE) .setPartitionId(PARTITION_ID) .setSequenceNumber(1L); Set<String> value = new HashSet<>(); value.add(KEY); byte[] bytes = jsonSerializer.serializeToBytes(checkpoint); List<String> list = Collections.singletonList(new String(bytes, StandardCharsets.UTF_8)); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.smembers(PREFIX)).thenReturn(value); when(jedis.hmget(eq(KEY), eq(JedisRedisCheckpointStore.CHECKPOINT))).thenReturn(list); StepVerifier.create(store.listCheckpoints(FULLY_QUALIFIED_NAMESPACE, EVENT_HUB_NAME, CONSUMER_GROUP)) .assertNext(checkpointTest -> { assertEquals(FULLY_QUALIFIED_NAMESPACE, checkpointTest.getFullyQualifiedNamespace()); assertEquals(EVENT_HUB_NAME, checkpointTest.getEventHubName()); assertEquals(CONSUMER_GROUP, checkpointTest.getConsumerGroup()); }) .verifyComplete(); } @Test public void testListCheckpointsEmptyList() { when(jedisPool.getResource()).thenReturn(jedis); when(jedis.smembers(PREFIX)).thenReturn(new HashSet<>()); StepVerifier.create(store.listCheckpoints(FULLY_QUALIFIED_NAMESPACE, EVENT_HUB_NAME, CONSUMER_GROUP)) .verifyComplete(); } @Test public void testCheckpointKeyNotStored() { Set<String> value = new HashSet<>(); List<String> nullList = Collections.singletonList(null); value.add(KEY); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.smembers(PREFIX)).thenReturn(value); when(jedis.hmget(eq(KEY), eq(JedisRedisCheckpointStore.CHECKPOINT))).thenReturn(nullList); StepVerifier.create(store.listCheckpoints(FULLY_QUALIFIED_NAMESPACE, EVENT_HUB_NAME, CONSUMER_GROUP)) .verifyComplete(); } @Test public void testListOwnership() { PartitionOwnership partitionOwnership = new PartitionOwnership() .setFullyQualifiedNamespace(FULLY_QUALIFIED_NAMESPACE) .setEventHubName(EVENT_HUB_NAME) .setConsumerGroup(CONSUMER_GROUP) .setPartitionId(PARTITION_ID) .setOwnerId("ownerOne") .setETag("eTag"); Set<String> value = new HashSet<>(); value.add(KEY); byte[] bytes = jsonSerializer.serializeToBytes(partitionOwnership); List<String> list = Collections.singletonList(new String(bytes, StandardCharsets.UTF_8)); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.smembers(PREFIX)).thenReturn(value); when(jedis.hmget(eq(KEY), eq(JedisRedisCheckpointStore.PARTITION_OWNERSHIP))).thenReturn(list); StepVerifier.create(store.listOwnership(FULLY_QUALIFIED_NAMESPACE, EVENT_HUB_NAME, CONSUMER_GROUP)) .assertNext(partitionOwnershipTest -> { assertEquals(FULLY_QUALIFIED_NAMESPACE, partitionOwnershipTest.getFullyQualifiedNamespace()); assertEquals(EVENT_HUB_NAME, partitionOwnershipTest.getEventHubName()); assertEquals(CONSUMER_GROUP, partitionOwnershipTest.getConsumerGroup()); assertEquals("ownerOne", partitionOwnershipTest.getOwnerId()); }) .verifyComplete(); } @Test public void testListOwnershipEmptyList() { when(jedisPool.getResource()).thenReturn(jedis); when(jedis.smembers(PREFIX)).thenReturn(new HashSet<>()); StepVerifier.create(store.listOwnership(FULLY_QUALIFIED_NAMESPACE, EVENT_HUB_NAME, CONSUMER_GROUP)) .verifyComplete(); } @Test @Test public void testUpdateCheckpoint() { Checkpoint checkpoint = new Checkpoint() .setConsumerGroup(CONSUMER_GROUP) .setEventHubName(EVENT_HUB_NAME) .setFullyQualifiedNamespace(FULLY_QUALIFIED_NAMESPACE) .setPartitionId(PARTITION_ID) .setSequenceNumber((long) 1); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.exists(PREFIX)).thenReturn(true); StepVerifier.create(store.updateCheckpoint(checkpoint)) .verifyComplete(); } }
Aaaaaha! Yes it does, I suspected there's a good reason for it, I just couldn't see it. I guess it's because `@All` is a regular qualifier -- if it was a special annotation, things would probably look a lot different.
AnnotationsTransformerBuildItem transformListAllInjectionPoints() { return new AnnotationsTransformerBuildItem(new AnnotationsTransformer() { @Override public int getPriority() { return Integer.MIN_VALUE; } @Override public boolean appliesTo(Kind kind) { return kind == Kind.FIELD || kind == Kind.METHOD; } @Override public void transform(TransformationContext ctx) { if (Annotations.contains(ctx.getAnnotations(), DotNames.ALL)) { AnnotationTarget target = ctx.getTarget(); if (target.kind() == Kind.FIELD) { String id = HashUtil .sha1(target.asField().type().toString() + target.asField().annotations().toString()); ctx.transform().add(DotNames.IDENTIFIED, AnnotationValue.createStringValue("value", id)).done(); } else { MethodInfo method = target.asMethod(); Set<AnnotationInstance> alls = Annotations.getAnnotations(Kind.METHOD_PARAMETER, DotNames.ALL, ctx.getAnnotations()); Set<AnnotationInstance> paramsAnnotations = Annotations.getAnnotations(Kind.METHOD_PARAMETER, ctx.getAnnotations()); List<AnnotationInstance> toAdd = new ArrayList<>(); for (AnnotationInstance annotation : alls) { short position = annotation.target().asMethodParameter().position(); Set<AnnotationInstance> paramAnnotations = new HashSet<>(); for (AnnotationInstance paramAnnotation : paramsAnnotations) { if (paramAnnotation.target().asMethodParameter().position() == position) { paramAnnotations.add(paramAnnotation); } } String id = HashUtil.sha1(method.parameters().get(position) + paramAnnotations.toString()); toAdd.add( AnnotationInstance.create(DotNames.IDENTIFIED, MethodParameterInfo.create(method, annotation.target().asMethodParameter().position()), new AnnotationValue[] { AnnotationValue.createStringValue("value", id) })); } Transformation transform = ctx.transform(); toAdd.forEach(transform::add); transform.done(); } } } }); }
AnnotationsTransformerBuildItem transformListAllInjectionPoints() { return new AnnotationsTransformerBuildItem(new AnnotationsTransformer() { @Override public int getPriority() { return Integer.MIN_VALUE; } @Override public boolean appliesTo(Kind kind) { return kind == Kind.FIELD || kind == Kind.METHOD; } @Override public void transform(TransformationContext ctx) { if (Annotations.contains(ctx.getAnnotations(), DotNames.ALL)) { AnnotationTarget target = ctx.getTarget(); if (target.kind() == Kind.FIELD) { String id = HashUtil .sha1(target.asField().type().toString() + target.asField().annotations().toString()); ctx.transform().add(DotNames.IDENTIFIED, AnnotationValue.createStringValue("value", id)).done(); } else { MethodInfo method = target.asMethod(); Set<AnnotationInstance> alls = Annotations.getAnnotations(Kind.METHOD_PARAMETER, DotNames.ALL, ctx.getAnnotations()); Set<AnnotationInstance> paramsAnnotations = Annotations.getAnnotations(Kind.METHOD_PARAMETER, ctx.getAnnotations()); List<AnnotationInstance> toAdd = new ArrayList<>(); for (AnnotationInstance annotation : alls) { short position = annotation.target().asMethodParameter().position(); Set<AnnotationInstance> paramAnnotations = new HashSet<>(); for (AnnotationInstance paramAnnotation : paramsAnnotations) { if (paramAnnotation.target().asMethodParameter().position() == position) { paramAnnotations.add(paramAnnotation); } } String id = HashUtil.sha1(method.parameters().get(position) + paramAnnotations.toString()); toAdd.add( AnnotationInstance.create(DotNames.IDENTIFIED, MethodParameterInfo.create(method, annotation.target().asMethodParameter().position()), new AnnotationValue[] { AnnotationValue.createStringValue("value", id) })); } Transformation transform = ctx.transform(); toAdd.forEach(transform::add); transform.done(); } } } }); }
class would be ignored during bean discovery transformationContext.transform().add(ADDITIONAL_BEAN).done(); } } } }); builder.setBeanArchiveIndex(index); builder.setApplicationIndex(combinedIndex.getIndex()); List<BeanDefiningAnnotation> beanDefiningAnnotations = additionalBeanDefiningAnnotations.stream() .map((s) -> new BeanDefiningAnnotation(s.getName(), s.getDefaultScope())).collect(Collectors.toList()); beanDefiningAnnotations.add(new BeanDefiningAnnotation(ADDITIONAL_BEAN, null)); builder.setAdditionalBeanDefiningAnnotations(beanDefiningAnnotations); final Map<DotName, Collection<AnnotationInstance>> additionalStereotypes = new HashMap<>(); for (final AdditionalStereotypeBuildItem item : additionalStereotypeBuildItems) { additionalStereotypes.putAll(item.getStereotypes()); }
class would be ignored during bean discovery transformationContext.transform().add(ADDITIONAL_BEAN).done(); } } } }); builder.setBeanArchiveIndex(index); builder.setApplicationIndex(combinedIndex.getIndex()); List<BeanDefiningAnnotation> beanDefiningAnnotations = additionalBeanDefiningAnnotations.stream() .map((s) -> new BeanDefiningAnnotation(s.getName(), s.getDefaultScope())).collect(Collectors.toList()); beanDefiningAnnotations.add(new BeanDefiningAnnotation(ADDITIONAL_BEAN, null)); builder.setAdditionalBeanDefiningAnnotations(beanDefiningAnnotations); final Map<DotName, Collection<AnnotationInstance>> additionalStereotypes = new HashMap<>(); for (final AdditionalStereotypeBuildItem item : additionalStereotypeBuildItems) { additionalStereotypes.putAll(item.getStereotypes()); }
Shouldn't we ideally pass the errors related to WebSocket connection in the onError resource? IMO all other internal errors should be logged in the internal log but should not appear in onError resource since they are not related to WebSocket connection. WDYT?
public static void dispatchError(WebSocketOpenConnectionInfo connectionInfo, Throwable throwable) { WebSocketService webSocketService = connectionInfo.getService(); Resource onErrorResource = webSocketService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_ERROR); if (isUnexpectedError(throwable)) { log.error("Unexpected error", throwable); return; } if (onErrorResource == null) { ErrorHandlerUtils.printError(throwable); return; } BValue[] bValues = new BValue[onErrorResource.getParamDetails().size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = getError(webSocketService, throwable); CallableUnitCallback onErrorCallback = new CallableUnitCallback() { @Override public void notifySuccess() { } @Override public void notifyFailure(BStruct error) { ErrorHandlerUtils.printError("error: " + BLangVMErrors.getPrintableStackTrace(error)); } }; Executor.submit(onErrorResource, onErrorCallback, null, null, bValues); }
if (isUnexpectedError(throwable)) {
public static void dispatchError(WebSocketOpenConnectionInfo connectionInfo, Throwable throwable) { WebSocketService webSocketService = connectionInfo.getService(); Resource onErrorResource = webSocketService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_ERROR); if (isUnexpectedError(throwable)) { log.error("Unexpected error", throwable); } if (onErrorResource == null) { ErrorHandlerUtils.printError(throwable); return; } BValue[] bValues = new BValue[onErrorResource.getParamDetails().size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = getError(webSocketService, throwable); CallableUnitCallback onErrorCallback = new CallableUnitCallback() { @Override public void notifySuccess() { } @Override public void notifyFailure(BStruct error) { ErrorHandlerUtils.printError("error: " + BLangVMErrors.getPrintableStackTrace(error)); } }; Executor.submit(onErrorResource, onErrorCallback, null, null, bValues); }
class WebSocketDispatcher { private static final Logger log = LoggerFactory.getLogger(WebSocketDispatcher.class); /** * This will find the best matching service for given web socket request. * * @param webSocketMessage incoming message. * @return matching service. */ public static WebSocketService findService(WebSocketServicesRegistry servicesRegistry, Map<String, String> pathParams, WebSocketInitMessage webSocketMessage, HTTPCarbonMessage msg) { try { String serviceUri = webSocketMessage.getTarget(); serviceUri = WebSocketUtil.refactorUri(serviceUri); URI requestUri; try { requestUri = URI.create(serviceUri); } catch (IllegalArgumentException e) { throw new BallerinaConnectorException(e.getMessage()); } WebSocketService service = servicesRegistry.getUriTemplate().matches(requestUri.getPath(), pathParams, webSocketMessage); if (service == null) { throw new BallerinaConnectorException("no Service found to handle the service request: " + serviceUri); } msg.setProperty(HttpConstants.QUERY_STR, requestUri.getRawQuery()); return service; } catch (Throwable throwable) { String message = "No Service found to handle the service request"; webSocketMessage.cancelHandshake(404, message); throw new BallerinaConnectorException(message, throwable); } } public static void dispatchTextMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketTextMessage textMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onTextMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_TEXT); if (onTextMessageResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onTextMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BString(textMessage.getText()); if (paramDetails.size() == 3) { bValues[2] = new BBoolean(textMessage.isFinalFragment()); } Executor.submit(onTextMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } public static void dispatchBinaryMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketBinaryMessage binaryMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onBinaryMessageResource = wsService.getResourceByName( WebSocketConstants.RESOURCE_NAME_ON_BINARY); if (onBinaryMessageResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onBinaryMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BBlob(binaryMessage.getByteArray()); if (paramDetails.size() == 3) { bValues[2] = new BBoolean(binaryMessage.isFinalFragment()); } Executor.submit(onBinaryMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } public static void dispatchControlMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { if (controlMessage.getControlSignal() == WebSocketControlSignal.PING) { WebSocketDispatcher.dispatchPingMessage(connectionInfo, controlMessage); } else if (controlMessage.getControlSignal() == WebSocketControlSignal.PONG) { WebSocketDispatcher.dispatchPongMessage(connectionInfo, controlMessage); } else { throw new BallerinaConnectorException("Received unknown control signal"); } } private static void dispatchPingMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onPingMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_PING); if (onPingMessageResource == null) { pingAutomatically(controlMessage); return; } List<ParamDetail> paramDetails = onPingMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BBlob(controlMessage.getByteArray()); Executor.submit(onPingMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } private static void dispatchPongMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onPongMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_PONG); if (onPongMessageResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onPongMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BBlob(controlMessage.getByteArray()); Executor.submit(onPongMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } public static void dispatchCloseMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketCloseMessage closeMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onCloseResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_CLOSE); int closeCode = closeMessage.getCloseCode(); String closeReason = closeMessage.getCloseReason(); if (onCloseResource == null) { if (webSocketConnection.getSession().isOpen()) { webSocketConnection.finishConnectionClosure(closeCode, null); } return; } List<ParamDetail> paramDetails = onCloseResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BInteger(closeCode); bValues[2] = new BString(closeReason); CallableUnitCallback onCloseCallback = new CallableUnitCallback() { @Override public void notifySuccess() { if (closeMessage.getCloseCode() != WebSocketConstants.STATUS_CODE_ABNORMAL_CLOSURE && webSocketConnection.getSession().isOpen()) { webSocketConnection.finishConnectionClosure(closeCode, null).addListener( closeFuture -> connectionInfo.getWebSocketEndpoint().setBooleanField(0, 0)); } } @Override public void notifyFailure(BStruct error) { ErrorHandlerUtils.printError("error: " + BLangVMErrors.getPrintableStackTrace(error)); } }; Executor.submit(onCloseResource, onCloseCallback, null, null, bValues); } private static boolean isUnexpectedError(Throwable throwable) { return !(throwable instanceof CorruptedFrameException); } private static BStruct getError(WebSocketService webSocketService, Throwable throwable) { ProgramFile programFile = webSocketService.getServiceInfo().getPackageInfo().getProgramFile(); PackageInfo errorPackageInfo = programFile.getPackageInfo(BLangVMErrors.PACKAGE_BUILTIN); StructureTypeInfo errorStructInfo = errorPackageInfo.getStructInfo(BLangVMErrors.STRUCT_GENERIC_ERROR); return BLangVMStructs.createBStruct(errorStructInfo, throwable.getMessage()); } public static void dispatchIdleTimeout(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onIdleTimeoutResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_IDLE_TIMEOUT); if (onIdleTimeoutResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onIdleTimeoutResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); CallableUnitCallback onIdleTimeoutCallback = new CallableUnitCallback() { @Override public void notifySuccess() { } @Override public void notifyFailure(BStruct error) { ErrorHandlerUtils.printError("error: " + BLangVMErrors.getPrintableStackTrace(error)); } }; Executor.submit(onIdleTimeoutResource, onIdleTimeoutCallback, null, null, bValues); } private static void pingAutomatically(WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = controlMessage.getWebSocketConnection(); webSocketConnection.pong(controlMessage.getPayload()).addListener(future -> { Throwable cause = future.cause(); if (!future.isSuccess() && cause != null) { ErrorHandlerUtils.printError(cause); } webSocketConnection.readNextFrame(); }); } public static void setPathParams(BValue[] bValues, List<ParamDetail> paramDetails, Map<String, String> pathParams, int defaultArgSize) { int parameterDetailsSize = paramDetails.size(); if (parameterDetailsSize > defaultArgSize) { for (int i = defaultArgSize; i < parameterDetailsSize; i++) { bValues[i] = new BString(pathParams.get(paramDetails.get(i).getVarName())); } } } }
class WebSocketDispatcher { private static final Logger log = LoggerFactory.getLogger(WebSocketDispatcher.class); /** * This will find the best matching service for given web socket request. * * @param webSocketMessage incoming message. * @return matching service. */ public static WebSocketService findService(WebSocketServicesRegistry servicesRegistry, Map<String, String> pathParams, WebSocketInitMessage webSocketMessage, HTTPCarbonMessage msg) { try { String serviceUri = webSocketMessage.getTarget(); serviceUri = WebSocketUtil.refactorUri(serviceUri); URI requestUri; try { requestUri = URI.create(serviceUri); } catch (IllegalArgumentException e) { throw new BallerinaConnectorException(e.getMessage()); } WebSocketService service = servicesRegistry.getUriTemplate().matches(requestUri.getPath(), pathParams, webSocketMessage); if (service == null) { throw new BallerinaConnectorException("no Service found to handle the service request: " + serviceUri); } msg.setProperty(HttpConstants.QUERY_STR, requestUri.getRawQuery()); return service; } catch (Throwable throwable) { String message = "No Service found to handle the service request"; webSocketMessage.cancelHandshake(404, message); throw new BallerinaConnectorException(message, throwable); } } public static void dispatchTextMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketTextMessage textMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onTextMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_TEXT); if (onTextMessageResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onTextMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BString(textMessage.getText()); if (paramDetails.size() == 3) { bValues[2] = new BBoolean(textMessage.isFinalFragment()); } Executor.submit(onTextMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } public static void dispatchBinaryMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketBinaryMessage binaryMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onBinaryMessageResource = wsService.getResourceByName( WebSocketConstants.RESOURCE_NAME_ON_BINARY); if (onBinaryMessageResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onBinaryMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BBlob(binaryMessage.getByteArray()); if (paramDetails.size() == 3) { bValues[2] = new BBoolean(binaryMessage.isFinalFragment()); } Executor.submit(onBinaryMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } public static void dispatchControlMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { if (controlMessage.getControlSignal() == WebSocketControlSignal.PING) { WebSocketDispatcher.dispatchPingMessage(connectionInfo, controlMessage); } else if (controlMessage.getControlSignal() == WebSocketControlSignal.PONG) { WebSocketDispatcher.dispatchPongMessage(connectionInfo, controlMessage); } else { throw new BallerinaConnectorException("Received unknown control signal"); } } private static void dispatchPingMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onPingMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_PING); if (onPingMessageResource == null) { pingAutomatically(controlMessage); return; } List<ParamDetail> paramDetails = onPingMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BBlob(controlMessage.getByteArray()); Executor.submit(onPingMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } private static void dispatchPongMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onPongMessageResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_PONG); if (onPongMessageResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onPongMessageResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BBlob(controlMessage.getByteArray()); Executor.submit(onPongMessageResource, new WebSocketResourceCallableUnitCallback(webSocketConnection), null, null, bValues); } public static void dispatchCloseMessage(WebSocketOpenConnectionInfo connectionInfo, WebSocketCloseMessage closeMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onCloseResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_CLOSE); int closeCode = closeMessage.getCloseCode(); String closeReason = closeMessage.getCloseReason(); if (onCloseResource == null) { if (webSocketConnection.getSession().isOpen()) { webSocketConnection.finishConnectionClosure(closeCode, null); } return; } List<ParamDetail> paramDetails = onCloseResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); bValues[1] = new BInteger(closeCode); bValues[2] = new BString(closeReason); CallableUnitCallback onCloseCallback = new CallableUnitCallback() { @Override public void notifySuccess() { if (closeMessage.getCloseCode() != WebSocketConstants.STATUS_CODE_ABNORMAL_CLOSURE && webSocketConnection.getSession().isOpen()) { webSocketConnection.finishConnectionClosure(closeCode, null).addListener( closeFuture -> connectionInfo.getWebSocketEndpoint().setBooleanField(0, 0)); } } @Override public void notifyFailure(BStruct error) { ErrorHandlerUtils.printError("error: " + BLangVMErrors.getPrintableStackTrace(error)); } }; Executor.submit(onCloseResource, onCloseCallback, null, null, bValues); } private static BStruct getError(WebSocketService webSocketService, Throwable throwable) { ProgramFile programFile = webSocketService.getServiceInfo().getPackageInfo().getProgramFile(); PackageInfo errorPackageInfo = programFile.getPackageInfo(BLangVMErrors.PACKAGE_BUILTIN); StructureTypeInfo errorStructInfo = errorPackageInfo.getStructInfo(BLangVMErrors.STRUCT_GENERIC_ERROR); String errMsg; if (isUnexpectedError(throwable)) { errMsg = "Unexpected internal error. Please check internal-log for more details!"; } else { errMsg = throwable.getMessage(); } return BLangVMStructs.createBStruct(errorStructInfo, errMsg); } private static boolean isUnexpectedError(Throwable throwable) { return !(throwable instanceof CorruptedFrameException); } public static void dispatchIdleTimeout(WebSocketOpenConnectionInfo connectionInfo, WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = connectionInfo.getWebSocketConnection(); WebSocketService wsService = connectionInfo.getService(); Resource onIdleTimeoutResource = wsService.getResourceByName(WebSocketConstants.RESOURCE_NAME_ON_IDLE_TIMEOUT); if (onIdleTimeoutResource == null) { webSocketConnection.readNextFrame(); return; } List<ParamDetail> paramDetails = onIdleTimeoutResource.getParamDetails(); BValue[] bValues = new BValue[paramDetails.size()]; bValues[0] = connectionInfo.getWebSocketEndpoint(); CallableUnitCallback onIdleTimeoutCallback = new CallableUnitCallback() { @Override public void notifySuccess() { } @Override public void notifyFailure(BStruct error) { ErrorHandlerUtils.printError("error: " + BLangVMErrors.getPrintableStackTrace(error)); } }; Executor.submit(onIdleTimeoutResource, onIdleTimeoutCallback, null, null, bValues); } private static void pingAutomatically(WebSocketControlMessage controlMessage) { WebSocketConnection webSocketConnection = controlMessage.getWebSocketConnection(); webSocketConnection.pong(controlMessage.getPayload()).addListener(future -> { Throwable cause = future.cause(); if (!future.isSuccess() && cause != null) { ErrorHandlerUtils.printError(cause); } webSocketConnection.readNextFrame(); }); } public static void setPathParams(BValue[] bValues, List<ParamDetail> paramDetails, Map<String, String> pathParams, int defaultArgSize) { int parameterDetailsSize = paramDetails.size(); if (parameterDetailsSize > defaultArgSize) { for (int i = defaultArgSize; i < parameterDetailsSize; i++) { bValues[i] = new BString(pathParams.get(paramDetails.get(i).getVarName())); } } } }
Is it possible to have a method without any class?
private void checkMethodNamePrefix(DetailAST methodDefToken) { if (TokenUtil.findFirstTokenByPredicate(methodDefToken, parameters -> parameters.getType() == TokenTypes.PARAMETERS && parameters.getChildCount() != 1).isPresent()) { log(methodDefToken, "A fluent method should only has one parameter."); } if (classNameStack.isEmpty()) { return; } final DetailAST typeToken = methodDefToken.findFirstToken(TokenTypes.TYPE); if (TokenUtil.findFirstTokenByPredicate(typeToken, ident -> ident.getType() == TokenTypes.IDENT && !ident.getText().equals(classNameStack.peekLast())).isPresent()) { log(methodDefToken, "Return type of fluent method should be the class itself"); } final String methodName = methodDefToken.findFirstToken(TokenTypes.IDENT).getText(); avoidStartWords.forEach(avoidStartWord -> { if (methodName.length() >= avoidStartWord.length() && methodName.startsWith(avoidStartWord)) { log(methodDefToken, String.format(FLUENT_METHOD_ERR, methodName, avoidStartWord)); } }); }
if (classNameStack.isEmpty()) {
private void checkMethodNamePrefix(DetailAST methodDefToken) { if (TokenUtil.findFirstTokenByPredicate(methodDefToken, parameters -> parameters.getType() == TokenTypes.PARAMETERS && parameters.getChildCount() != 1).isPresent()) { log(methodDefToken, "A fluent method should only have one parameter."); } final DetailAST typeToken = methodDefToken.findFirstToken(TokenTypes.TYPE); if (TokenUtil.findFirstTokenByPredicate(typeToken, ident -> ident.getType() == TokenTypes.IDENT && !ident.getText().equals(classNameStack.peekLast())).isPresent()) { log(methodDefToken, "Return type of fluent method should be the class itself"); } final String methodName = methodDefToken.findFirstToken(TokenTypes.IDENT).getText(); avoidStartWords.forEach(avoidStartWord -> { if (methodName.length() >= avoidStartWord.length() && methodName.startsWith(avoidStartWord)) { log(methodDefToken, String.format("''%s'' fluent method name should not start with keyword ''%s''.", methodName, avoidStartWord)); } }); }
class names when traversals the AST tree. */ private final Deque<String> classNameStack = new ArrayDeque<>(); /** * Adds words that methods in fluent classes should not be prefixed with. * @param avoidStartWords the starting strings that should not start with in fluent method */ public final void setAvoidStartWords(String... avoidStartWords) { Collections.addAll(this.avoidStartWords, avoidStartWords); }
class names when traversals the AST tree. */ private final Deque<String> classNameStack = new ArrayDeque<>(); /** * Adds words that methods in fluent classes should not be prefixed with. * @param avoidStartWords the starting strings that should not start with in fluent method */ public final void setAvoidStartWords(String... avoidStartWords) { Collections.addAll(this.avoidStartWords, avoidStartWords); }
The most risky bug in this code is: The updated code snippet potentially modifies the `expressionMapping` for each column reference even when `allColumnRef` might later be set to false. You can modify the code like this: ``` @@ -292,10 +292,18 @@ private OptExprBuilder window(OptExprBuilder subOpt, List<AnalyticExpr> window) } final ExpressionMapping expressionMapping = subOpt.getExpressionMapping(); + Map<Expr, ColumnRefOperator> tempMapping = new HashMap<>(); boolean allColumnRef = true; for (Expr expression : projectExpressions) { ScalarOperator operator = SqlToScalarOperatorTranslator.translate(expression, expressionMapping, columnRefFactory); if (!operator.isColumnRef()) { allColumnRef = false; + tempMapping.clear(); break; } else { + tempMapping.put(expression, (ColumnRefOperator) operator); } + } + if (allColumnRef) { + expressionMapping.putAll(tempMapping); } ```
private OptExprBuilder window(OptExprBuilder subOpt, List<AnalyticExpr> window) { if (window.isEmpty()) { return subOpt; } /* * Build ProjectOperator of partition expression and order by expression in window function. */ List<Expr> projectExpressions = new ArrayList<>(); for (AnalyticExpr analyticExpr : window) { projectExpressions.addAll(analyticExpr.getPartitionExprs()); projectExpressions.addAll(analyticExpr.getOrderByElements() .stream().map(OrderByElement::getExpr).collect(Collectors.toList())); } final ExpressionMapping expressionMapping = subOpt.getExpressionMapping(); boolean allColumnRef = true; for (Expr expression : projectExpressions) { ScalarOperator operator = SqlToScalarOperatorTranslator.translate(expression, expressionMapping, columnRefFactory); if (!operator.isColumnRef()) { allColumnRef = false; } else { expressionMapping.put(expression, (ColumnRefOperator) operator); } } /* * If there is no expression calculate in partition and order by, * there is no need to add ProjectOperator here */ if (!allColumnRef) { ExpressionMapping outputTranslations = new ExpressionMapping(subOpt.getScope()); List<ColumnRefOperator> fieldMappings = new ArrayList<>(); Map<ColumnRefOperator, ScalarOperator> projections = Maps.newHashMap(); for (ColumnRefOperator expression : subOpt.getFieldMappings()) { ColumnRefOperator variable = columnRefFactory.create(expression, expression.getType(), expression.isNullable()); projections.put(variable, expression); fieldMappings.add(variable); } outputTranslations.setFieldMappings(fieldMappings); for (Expr expression : subOpt.getExpressionMapping().getAllExpressions()) { ColumnRefOperator columnRef = findOrCreateColumnRefForExpr(expression, subOpt.getExpressionMapping(), projections, columnRefFactory); outputTranslations.put(expression, columnRef); } for (Expr expression : projectExpressions) { ColumnRefOperator columnRef = findOrCreateColumnRefForExpr(expression, subOpt.getExpressionMapping(), projections, columnRefFactory); outputTranslations.put(expression, columnRef); } LogicalProjectOperator projectOperator = new LogicalProjectOperator(projections); subOpt.setExpressionMapping(outputTranslations); subOpt = subOpt.withNewRoot(projectOperator); } /* * If necessary, rewrites the analytic function, window, and/or order-by elements * into a standard format for the purpose of simpler backend execution */ List<WindowTransformer.WindowOperator> windowOperators = new ArrayList<>(); for (AnalyticExpr analyticExpr : window) { WindowTransformer.WindowOperator rewriteOperator = WindowTransformer.standardize(analyticExpr); if (windowOperators.contains(rewriteOperator)) { WindowTransformer.WindowOperator windowOperator = windowOperators.get(windowOperators.indexOf(rewriteOperator)); if (rewriteOperator.isSkewed()) { windowOperator.setSkewed(); } windowOperator.addFunction(analyticExpr); } else { windowOperators.add(rewriteOperator); } } List<LogicalWindowOperator> logicalWindowOperators = WindowTransformer.reorderWindowOperator(windowOperators, columnRefFactory, subOpt); for (LogicalWindowOperator logicalWindowOperator : logicalWindowOperators) { subOpt = subOpt.withNewRoot(logicalWindowOperator); } return subOpt; }
Map<ColumnRefOperator, ScalarOperator> projections = Maps.newHashMap();
private OptExprBuilder window(OptExprBuilder subOpt, List<AnalyticExpr> window) { if (window.isEmpty()) { return subOpt; } /* * Build ProjectOperator of partition expression and order by expression in window function. */ List<Expr> projectExpressions = new ArrayList<>(); for (AnalyticExpr analyticExpr : window) { projectExpressions.addAll(analyticExpr.getPartitionExprs()); projectExpressions.addAll(analyticExpr.getOrderByElements() .stream().map(OrderByElement::getExpr).collect(Collectors.toList())); } final ExpressionMapping expressionMapping = subOpt.getExpressionMapping(); boolean allColumnRef = true; Map<Expr, ColumnRefOperator> tempMapping = new HashMap<>(); for (Expr expression : projectExpressions) { ScalarOperator operator = SqlToScalarOperatorTranslator.translate(expression, expressionMapping, columnRefFactory); if (!operator.isColumnRef()) { allColumnRef = false; tempMapping.clear(); break; } else { tempMapping.put(expression, (ColumnRefOperator) operator); } } if (allColumnRef) { expressionMapping.getExpressionToColumns().putAll(tempMapping); } /* * If there is no expression calculate in partition and order by, * there is no need to add ProjectOperator here */ if (!allColumnRef) { ExpressionMapping outputTranslations = new ExpressionMapping(subOpt.getScope()); List<ColumnRefOperator> fieldMappings = new ArrayList<>(); Map<ColumnRefOperator, ScalarOperator> projections = Maps.newHashMap(); for (ColumnRefOperator expression : subOpt.getFieldMappings()) { ColumnRefOperator variable = columnRefFactory.create(expression, expression.getType(), expression.isNullable()); projections.put(variable, expression); fieldMappings.add(variable); } outputTranslations.setFieldMappings(fieldMappings); for (Expr expression : subOpt.getExpressionMapping().getAllExpressions()) { ColumnRefOperator columnRef = findOrCreateColumnRefForExpr(expression, subOpt.getExpressionMapping(), projections, columnRefFactory); outputTranslations.put(expression, columnRef); } for (Expr expression : projectExpressions) { ColumnRefOperator columnRef = findOrCreateColumnRefForExpr(expression, subOpt.getExpressionMapping(), projections, columnRefFactory); outputTranslations.put(expression, columnRef); } LogicalProjectOperator projectOperator = new LogicalProjectOperator(projections); subOpt.setExpressionMapping(outputTranslations); subOpt = subOpt.withNewRoot(projectOperator); } /* * If necessary, rewrites the analytic function, window, and/or order-by elements * into a standard format for the purpose of simpler backend execution */ List<WindowTransformer.WindowOperator> windowOperators = new ArrayList<>(); for (AnalyticExpr analyticExpr : window) { WindowTransformer.WindowOperator rewriteOperator = WindowTransformer.standardize(analyticExpr); if (windowOperators.contains(rewriteOperator)) { WindowTransformer.WindowOperator windowOperator = windowOperators.get(windowOperators.indexOf(rewriteOperator)); if (rewriteOperator.isSkewed()) { windowOperator.setSkewed(); } windowOperator.addFunction(analyticExpr); } else { windowOperators.add(rewriteOperator); } } List<LogicalWindowOperator> logicalWindowOperators = WindowTransformer.reorderWindowOperator(windowOperators, columnRefFactory, subOpt); for (LogicalWindowOperator logicalWindowOperator : logicalWindowOperators) { subOpt = subOpt.withNewRoot(logicalWindowOperator); } return subOpt; }
class QueryTransformer { private final ColumnRefFactory columnRefFactory; private final ConnectContext session; private final List<ColumnRefOperator> correlation = new ArrayList<>(); private final CTETransformerContext cteContext; private final boolean inlineView; private final Map<Operator, ParseNode> optToAstMap; public static final String GROUPING_ID = "GROUPING_ID"; public static final String GROUPING = "GROUPING"; public QueryTransformer(ColumnRefFactory columnRefFactory, ConnectContext session, CTETransformerContext cteContext, boolean inlineView, Map<Operator, ParseNode> optToAstMap) { this.columnRefFactory = columnRefFactory; this.session = session; this.cteContext = cteContext; this.inlineView = inlineView; this.optToAstMap = optToAstMap; } public LogicalPlan plan(SelectRelation queryBlock, ExpressionMapping outer) { OptExprBuilder builder = planFrom(queryBlock.getRelation(), cteContext); builder.setExpressionMapping(new ExpressionMapping(builder.getScope(), builder.getFieldMappings(), outer)); Map<Expr, SlotRef> generatedExprToColumnRef = queryBlock.getGeneratedExprToColumnRef(); ExpressionMapping expressionMapping = builder.getExpressionMapping(); for (Map.Entry<Expr, SlotRef> m : generatedExprToColumnRef.entrySet()) { ScalarOperator scalarOperator = SqlToScalarOperatorTranslator.translate(m.getValue(), builder.getExpressionMapping(), columnRefFactory); expressionMapping.put(m.getKey(), (ColumnRefOperator) scalarOperator); } builder = filter(builder, queryBlock.getPredicate()); builder = aggregate(builder, queryBlock.getGroupBy(), queryBlock.getAggregate(), queryBlock.getGroupingSetsList(), queryBlock.getGroupingFunctionCallExprs()); builder = filter(builder, queryBlock.getHaving()); List<AnalyticExpr> analyticExprList = new ArrayList<>(queryBlock.getOutputAnalytic()); analyticExprList.addAll(queryBlock.getOrderByAnalytic()); builder = window(builder, analyticExprList); if (queryBlock.hasOrderByClause()) { if (!queryBlock.getGroupBy().isEmpty() || !queryBlock.getAggregate().isEmpty()) { List<String> outputNames = new ArrayList<>(queryBlock.getColumnOutputNames()); for (int i = 0; i < queryBlock.getOrderSourceExpressions().size(); ++i) { outputNames.add(queryBlock.getOrderSourceExpressions().get(i).toString()); } builder = projectForOrder(builder, Iterables.concat(queryBlock.getOutputExpression(), queryBlock.getOrderSourceExpressions(), queryBlock.getOrderByAnalytic()), queryBlock.getOutputExprInOrderByScope(), outputNames, builder.getFieldMappings(), queryBlock.getOrderScope(), true); } else { builder = projectForOrder(builder, Iterables.concat(queryBlock.getOutputExpression(), queryBlock.getOrderByAnalytic()), queryBlock.getOutputExprInOrderByScope(), queryBlock.getColumnOutputNames(), builder.getFieldMappings(), queryBlock.getOrderScope(), queryBlock.isDistinct()); } } builder = distinct(builder, queryBlock.isDistinct(), queryBlock.getOutputExpression()); builder = project(builder, Iterables.concat(queryBlock.getOrderByExpressions(), queryBlock.getOutputExpression())); List<ColumnRefOperator> orderByColumns = Lists.newArrayList(); builder = sort(builder, queryBlock.getOrderBy(), orderByColumns); builder = limit(builder, queryBlock.getLimit()); List<ColumnRefOperator> outputColumns = computeOutputs(builder, queryBlock.getOutputExpression(), columnRefFactory); if (!orderByColumns.isEmpty() && !outputColumns.containsAll(orderByColumns)) { long limit = queryBlock.hasLimit() ? queryBlock.getLimit().getLimit() : -1; builder = project(builder, queryBlock.getOutputExpression(), limit); } return new LogicalPlan(builder, outputColumns, correlation); } private static List<ColumnRefOperator> computeOutputs(OptExprBuilder builder, List<Expr> outputExpressions, ColumnRefFactory columnRefFactory) { List<ColumnRefOperator> outputs = new ArrayList<>(); for (Expr expression : outputExpressions) { outputs.add((ColumnRefOperator) SqlToScalarOperatorTranslator .translate(expression, builder.getExpressionMapping(), columnRefFactory)); } return outputs; } private OptExprBuilder planFrom(Relation node, CTETransformerContext cteContext) { TransformerContext transformerContext = new TransformerContext( columnRefFactory, session, new ExpressionMapping(new Scope(RelationId.anonymous(), new RelationFields())), cteContext, inlineView, optToAstMap); return new RelationTransformer(transformerContext).visit(node).getRootBuilder(); } private OptExprBuilder projectForOrder(OptExprBuilder subOpt, Iterable<Expr> outputExpression, List<Integer> outputExprInOrderByScope, List<String> outputNames, List<ColumnRefOperator> sourceExpression, Scope scope, boolean withAggregation) { ExpressionMapping outputTranslations = new ExpressionMapping(scope); Map<ColumnRefOperator, ScalarOperator> projections = Maps.newHashMap(); int outputExprIdx = 0; for (Expr expression : outputExpression) { Map<ScalarOperator, SubqueryOperator> subqueryPlaceholders = Maps.newHashMap(); ScalarOperator scalarOperator = SqlToScalarOperatorTranslator.translate(expression, subOpt.getExpressionMapping(), columnRefFactory, session, cteContext, subOpt, subqueryPlaceholders, false); Pair<ScalarOperator, OptExprBuilder> pair = SubqueryUtils.rewriteScalarOperator(scalarOperator, subOpt, subqueryPlaceholders); scalarOperator = pair.first; subOpt = pair.second; ColumnRefOperator columnRefOperator = getOrCreateColumnRefOperator(expression, scalarOperator, projections); projections.put(columnRefOperator, scalarOperator); if (outputExprInOrderByScope.contains(outputExprIdx)) { outputTranslations.putWithSymbol(expression, new SlotRef(null, outputNames.get(outputExprIdx)), columnRefOperator); } else { outputTranslations.putWithSymbol(expression, expression, columnRefOperator); } outputExprIdx++; } if (!withAggregation) { List<ColumnRefOperator> fieldMappings = new ArrayList<>(outputTranslations.getFieldMappings()); for (int i = 0; i < sourceExpression.size(); ++i) { ColumnRefOperator columnRefOperator = sourceExpression.get(i); projections.put(columnRefOperator, columnRefOperator); fieldMappings.set(scope.getRelationFields().size() + i, columnRefOperator); } outputTranslations.setFieldMappings(fieldMappings); } outputTranslations.addExpressionToColumns(subOpt.getExpressionMapping().getExpressionToColumns()); LogicalProjectOperator projectOperator = new LogicalProjectOperator(projections); return new OptExprBuilder(projectOperator, Lists.newArrayList(subOpt), outputTranslations); } private OptExprBuilder project(OptExprBuilder subOpt, Iterable<Expr> expressions) { return project(subOpt, expressions, -1); } private OptExprBuilder project(OptExprBuilder subOpt, Iterable<Expr> expressions, long limit) { ExpressionMapping outputTranslations = new ExpressionMapping(subOpt.getScope(), subOpt.getFieldMappings()); Map<ColumnRefOperator, ScalarOperator> projections = Maps.newHashMap(); for (Expr expression : expressions) { Map<ScalarOperator, SubqueryOperator> subqueryPlaceholders = Maps.newHashMap(); ScalarOperator scalarOperator = SqlToScalarOperatorTranslator.translate(expression, subOpt.getExpressionMapping(), columnRefFactory, session, cteContext, subOpt, subqueryPlaceholders, false); Pair<ScalarOperator, OptExprBuilder> pair = SubqueryUtils.rewriteScalarOperator(scalarOperator, subOpt, subqueryPlaceholders); scalarOperator = pair.first; subOpt = pair.second; ColumnRefOperator columnRefOperator = getOrCreateColumnRefOperator(expression, scalarOperator, projections); projections.put(columnRefOperator, scalarOperator); outputTranslations.put(expression, columnRefOperator); } outputTranslations.addExpressionToColumns(subOpt.getExpressionMapping().getExpressionToColumns()); LogicalProjectOperator projectOperator = new LogicalProjectOperator(projections, limit); return new OptExprBuilder(projectOperator, Lists.newArrayList(subOpt), outputTranslations); } private OptExprBuilder filter(OptExprBuilder subOpt, Expr predicate) { if (predicate == null) { return subOpt; } Map<ScalarOperator, SubqueryOperator> subqueryPlaceholders = Maps.newHashMap(); ScalarOperator scalarPredicate = SqlToScalarOperatorTranslator.translate(predicate, subOpt.getExpressionMapping(), correlation, columnRefFactory, session, cteContext, subOpt, subqueryPlaceholders, true); Pair<ScalarOperator, OptExprBuilder> pair = SubqueryUtils.rewriteScalarOperator(scalarPredicate, subOpt, subqueryPlaceholders); scalarPredicate = pair.first; subOpt = pair.second; if (scalarPredicate == null) { return subOpt; } LogicalFilterOperator filterOperator = new LogicalFilterOperator(scalarPredicate); return subOpt.withNewRoot(filterOperator); } private OptExprBuilder limit(OptExprBuilder subOpt, LimitElement limit) { if (limit == null) { return subOpt; } LogicalLimitOperator limitOperator = LogicalLimitOperator.init(limit.getLimit(), limit.getOffset()); return subOpt.withNewRoot(limitOperator); } public OptExprBuilder aggregate(OptExprBuilder subOpt, List<Expr> groupByExpressions, List<FunctionCallExpr> aggregates, List<List<Expr>> groupingSetsList, List<Expr> groupingFunctionCallExprs) { if (aggregates.size() == 0 && groupByExpressions.size() == 0) { return subOpt; } List<FunctionCallExpr> copyAggregates; if (groupingSetsList != null) { copyAggregates = aggregates.stream().map(e -> (FunctionCallExpr) e.clone()) .collect(Collectors.toList()); for (Expr groupBy : groupByExpressions) { copyAggregates.replaceAll( root -> (FunctionCallExpr) replaceExprBottomUp(root, groupBy, new CloneExpr(groupBy))); } } else { copyAggregates = aggregates; } ImmutableList.Builder<Expr> arguments = ImmutableList.builder(); copyAggregates.stream().filter(f -> !f.getParams().isStar()) .map(TreeNode::getChildren).flatMap(List::stream) .filter(e -> !(e.isConstant())).forEach(arguments::add); Iterable<Expr> inputs = Iterables.concat(groupByExpressions, arguments.build()); if (!Iterables.isEmpty(inputs)) { subOpt = project(subOpt, inputs); } ExpressionMapping groupingTranslations = new ExpressionMapping(subOpt.getScope(), subOpt.getFieldMappings()); List<ColumnRefOperator> groupByColumnRefs = new ArrayList<>(); boolean groupAllConst = groupByExpressions.stream().allMatch(Expr::isConstant); for (Expr groupingItem : groupByExpressions) { if (groupingItem.isConstant() && !(groupAllConst && groupByColumnRefs.isEmpty()) && groupingSetsList == null) { continue; } ScalarOperator groupingKey = SqlToScalarOperatorTranslator.translate(groupingItem, subOpt.getExpressionMapping(), columnRefFactory); ColumnRefOperator colRef = (ColumnRefOperator) groupingKey; if (!groupByColumnRefs.contains(colRef)) { groupByColumnRefs.add(colRef); } groupingTranslations.put(groupingItem, colRef); } Map<ColumnRefOperator, CallOperator> aggregationsMap = Maps.newHashMap(); for (int i = 0; i < aggregates.size(); i++) { FunctionCallExpr copyAggregate = copyAggregates.get(i); ScalarOperator aggCallOperator = SqlToScalarOperatorTranslator.translate(copyAggregate, subOpt.getExpressionMapping(), columnRefFactory); CallOperator aggOperator = (CallOperator) aggCallOperator; ColumnRefOperator colRef = columnRefFactory.create(aggOperator.getFnName(), copyAggregate.getType(), copyAggregate.isNullable()); aggregationsMap.put(colRef, aggOperator); groupingTranslations.put(aggregates.get(i), colRef); } if (groupingSetsList != null) { /* * repeatOutput is used to record the output column of repeatOperator, * this output column only represents the generated grouping_id column */ List<ColumnRefOperator> repeatOutput = new ArrayList<>(); /* * groupingIdsBitSets is used to record the complete grouping_id, * which contains all the group by columns. * groupingIds is converted by groupingIdsBitSets */ ArrayList<BitSet> groupingIdsBitSets = new ArrayList<>(); List<List<Long>> groupingIds = new ArrayList<>(); /* * repeatColumnRefList is used to record the column reference * that needs to be repeatedly calculated. * This column reference is come from the child of repeat operator */ List<List<ColumnRefOperator>> repeatColumnRefList = new ArrayList<>(); for (List<Expr> grouping : groupingSetsList) { List<ColumnRefOperator> repeatColumnRef = new ArrayList<>(); BitSet groupingIdBitSet = new BitSet(groupByColumnRefs.size()); groupingIdBitSet.set(0, groupByExpressions.size(), true); for (Expr groupingField : grouping) { ColumnRefOperator groupingKey = (ColumnRefOperator) SqlToScalarOperatorTranslator.translate( groupingField, subOpt.getExpressionMapping(), columnRefFactory); repeatColumnRef.add(groupingKey); if (groupByColumnRefs.contains(groupingKey)) { groupingIdBitSet.set(groupByColumnRefs.indexOf(groupingKey), false); } } groupingIdsBitSets.add(groupingIdBitSet); repeatColumnRefList.add(repeatColumnRef); } ColumnRefOperator grouping = columnRefFactory.create(GROUPING_ID, Type.BIGINT, false); List<Long> groupingID = new ArrayList<>(); for (BitSet bitSet : groupingIdsBitSets) { long gid = Utils.convertBitSetToLong(bitSet, groupByColumnRefs.size()); while (groupingID.contains(gid)) { gid += Math.pow(2, groupByColumnRefs.size()); } groupingID.add(gid); } groupingIds.add(groupingID); groupByColumnRefs.add(grouping); repeatOutput.add(grouping); for (Expr groupingFunction : groupingFunctionCallExprs) { grouping = columnRefFactory.create(GROUPING, Type.BIGINT, false); ArrayList<BitSet> tempGroupingIdsBitSets = new ArrayList<>(); for (int i = 0; i < repeatColumnRefList.size(); ++i) { tempGroupingIdsBitSets.add(new BitSet(groupingFunction.getChildren().size())); } for (int childIdx = 0; childIdx < groupingFunction.getChildren().size(); ++childIdx) { SlotRef slotRef = (SlotRef) groupingFunction.getChild(childIdx); ColumnRefOperator groupingKey = (ColumnRefOperator) SqlToScalarOperatorTranslator .translate(slotRef, subOpt.getExpressionMapping(), columnRefFactory); for (List<ColumnRefOperator> repeatColumns : repeatColumnRefList) { if (repeatColumns.contains(groupingKey)) { for (int repeatColIdx = 0; repeatColIdx < repeatColumnRefList.size(); ++repeatColIdx) { tempGroupingIdsBitSets.get(repeatColIdx).set(childIdx, groupingIdsBitSets.get(repeatColIdx) .get(groupByColumnRefs.indexOf(groupingKey))); } } } } groupingTranslations.put(groupingFunction, grouping); groupingIds.add(tempGroupingIdsBitSets.stream().map(bitset -> Utils.convertBitSetToLong(bitset, groupingFunction.getChildren().size())) .collect(Collectors.toList())); groupByColumnRefs.add(grouping); repeatOutput.add(grouping); } LogicalRepeatOperator repeatOperator = new LogicalRepeatOperator(repeatOutput, repeatColumnRefList, groupingIds); subOpt = new OptExprBuilder(repeatOperator, Lists.newArrayList(subOpt), groupingTranslations); } return new OptExprBuilder( new LogicalAggregationOperator(AggType.GLOBAL, groupByColumnRefs, aggregationsMap), Lists.newArrayList(subOpt), groupingTranslations); } private Expr replaceExprBottomUp(Expr root, Expr pattern, Expr replace) { if (root.getChildren().size() > 0) { for (int i = 0; i < root.getChildren().size(); i++) { Expr result = replaceExprBottomUp(root.getChild(i), pattern, replace); root.setChild(i, result); } } if (root.equals(pattern)) { return replace; } return root; } private OptExprBuilder sort(OptExprBuilder subOpt, List<OrderByElement> orderByExpressions, List<ColumnRefOperator> orderByColumns) { if (orderByExpressions.isEmpty()) { return subOpt; } List<Ordering> orderings = new ArrayList<>(); for (OrderByElement item : orderByExpressions) { if (item.getExpr().isLiteral()) { continue; } ColumnRefOperator column = (ColumnRefOperator) SqlToScalarOperatorTranslator.translate(item.getExpr(), subOpt.getExpressionMapping(), columnRefFactory); Ordering ordering = new Ordering(column, item.getIsAsc(), OrderByElement.nullsFirst(item.getNullsFirstParam())); if (!orderByColumns.contains(column)) { orderings.add(ordering); orderByColumns.add(column); } } if (orderByColumns.isEmpty()) { return subOpt; } LogicalTopNOperator sortOperator = new LogicalTopNOperator(orderings); return subOpt.withNewRoot(sortOperator); } private OptExprBuilder distinct(OptExprBuilder subOpt, boolean isDistinct, List<Expr> outputExpressions) { if (isDistinct) { subOpt = project(subOpt, outputExpressions); List<ColumnRefOperator> groupByColumns = Lists.newArrayList(); for (Expr expr : outputExpressions) { ColumnRefOperator column = (ColumnRefOperator) SqlToScalarOperatorTranslator .translate(expr, subOpt.getExpressionMapping(), columnRefFactory); if (!groupByColumns.contains(column)) { groupByColumns.add(column); } } return subOpt.withNewRoot( new LogicalAggregationOperator(AggType.GLOBAL, groupByColumns, new HashMap<>())); } else { return subOpt; } } private ColumnRefOperator getOrCreateColumnRefOperator(Expr expression, ScalarOperator scalarOperator, Map<ColumnRefOperator, ScalarOperator> projections) { ColumnRefOperator columnRefOperator; if (scalarOperator.isColumnRef()) { columnRefOperator = (ColumnRefOperator) scalarOperator; } else if (scalarOperator.isVariable() && projections.containsValue(scalarOperator)) { columnRefOperator = projections.entrySet().stream() .filter(e -> scalarOperator.equals(e.getValue())) .findAny() .map(Map.Entry::getKey) .orElse(null); Preconditions.checkNotNull(columnRefOperator); } else { columnRefOperator = columnRefFactory.create(expression, expression.getType(), scalarOperator.isNullable()); } return columnRefOperator; } }
class QueryTransformer { private final ColumnRefFactory columnRefFactory; private final ConnectContext session; private final List<ColumnRefOperator> correlation = new ArrayList<>(); private final CTETransformerContext cteContext; private final boolean inlineView; private final Map<Operator, ParseNode> optToAstMap; public static final String GROUPING_ID = "GROUPING_ID"; public static final String GROUPING = "GROUPING"; public QueryTransformer(ColumnRefFactory columnRefFactory, ConnectContext session, CTETransformerContext cteContext, boolean inlineView, Map<Operator, ParseNode> optToAstMap) { this.columnRefFactory = columnRefFactory; this.session = session; this.cteContext = cteContext; this.inlineView = inlineView; this.optToAstMap = optToAstMap; } public LogicalPlan plan(SelectRelation queryBlock, ExpressionMapping outer) { OptExprBuilder builder = planFrom(queryBlock.getRelation(), cteContext); builder.setExpressionMapping(new ExpressionMapping(builder.getScope(), builder.getFieldMappings(), outer)); Map<Expr, SlotRef> generatedExprToColumnRef = queryBlock.getGeneratedExprToColumnRef(); ExpressionMapping expressionMapping = builder.getExpressionMapping(); for (Map.Entry<Expr, SlotRef> m : generatedExprToColumnRef.entrySet()) { ScalarOperator scalarOperator = SqlToScalarOperatorTranslator.translate(m.getValue(), builder.getExpressionMapping(), columnRefFactory); expressionMapping.put(m.getKey(), (ColumnRefOperator) scalarOperator); } builder = filter(builder, queryBlock.getPredicate()); builder = aggregate(builder, queryBlock.getGroupBy(), queryBlock.getAggregate(), queryBlock.getGroupingSetsList(), queryBlock.getGroupingFunctionCallExprs()); builder = filter(builder, queryBlock.getHaving()); List<AnalyticExpr> analyticExprList = new ArrayList<>(queryBlock.getOutputAnalytic()); analyticExprList.addAll(queryBlock.getOrderByAnalytic()); builder = window(builder, analyticExprList); if (queryBlock.hasOrderByClause()) { if (!queryBlock.getGroupBy().isEmpty() || !queryBlock.getAggregate().isEmpty()) { List<String> outputNames = new ArrayList<>(queryBlock.getColumnOutputNames()); for (int i = 0; i < queryBlock.getOrderSourceExpressions().size(); ++i) { outputNames.add(queryBlock.getOrderSourceExpressions().get(i).toString()); } builder = projectForOrder(builder, Iterables.concat(queryBlock.getOutputExpression(), queryBlock.getOrderSourceExpressions(), queryBlock.getOrderByAnalytic()), queryBlock.getOutputExprInOrderByScope(), outputNames, builder.getFieldMappings(), queryBlock.getOrderScope(), true); } else { builder = projectForOrder(builder, Iterables.concat(queryBlock.getOutputExpression(), queryBlock.getOrderByAnalytic()), queryBlock.getOutputExprInOrderByScope(), queryBlock.getColumnOutputNames(), builder.getFieldMappings(), queryBlock.getOrderScope(), queryBlock.isDistinct()); } } builder = distinct(builder, queryBlock.isDistinct(), queryBlock.getOutputExpression()); builder = project(builder, Iterables.concat(queryBlock.getOrderByExpressions(), queryBlock.getOutputExpression())); List<ColumnRefOperator> orderByColumns = Lists.newArrayList(); builder = sort(builder, queryBlock.getOrderBy(), orderByColumns); builder = limit(builder, queryBlock.getLimit()); List<ColumnRefOperator> outputColumns = computeOutputs(builder, queryBlock.getOutputExpression(), columnRefFactory); if (!orderByColumns.isEmpty() && !outputColumns.containsAll(orderByColumns)) { long limit = queryBlock.hasLimit() ? queryBlock.getLimit().getLimit() : -1; builder = project(builder, queryBlock.getOutputExpression(), limit); } return new LogicalPlan(builder, outputColumns, correlation); } private static List<ColumnRefOperator> computeOutputs(OptExprBuilder builder, List<Expr> outputExpressions, ColumnRefFactory columnRefFactory) { List<ColumnRefOperator> outputs = new ArrayList<>(); for (Expr expression : outputExpressions) { outputs.add((ColumnRefOperator) SqlToScalarOperatorTranslator .translate(expression, builder.getExpressionMapping(), columnRefFactory)); } return outputs; } private OptExprBuilder planFrom(Relation node, CTETransformerContext cteContext) { TransformerContext transformerContext = new TransformerContext( columnRefFactory, session, new ExpressionMapping(new Scope(RelationId.anonymous(), new RelationFields())), cteContext, inlineView, optToAstMap); return new RelationTransformer(transformerContext).visit(node).getRootBuilder(); } private OptExprBuilder projectForOrder(OptExprBuilder subOpt, Iterable<Expr> outputExpression, List<Integer> outputExprInOrderByScope, List<String> outputNames, List<ColumnRefOperator> sourceExpression, Scope scope, boolean withAggregation) { ExpressionMapping outputTranslations = new ExpressionMapping(scope); Map<ColumnRefOperator, ScalarOperator> projections = Maps.newHashMap(); int outputExprIdx = 0; for (Expr expression : outputExpression) { Map<ScalarOperator, SubqueryOperator> subqueryPlaceholders = Maps.newHashMap(); ScalarOperator scalarOperator = SqlToScalarOperatorTranslator.translate(expression, subOpt.getExpressionMapping(), columnRefFactory, session, cteContext, subOpt, subqueryPlaceholders, false); Pair<ScalarOperator, OptExprBuilder> pair = SubqueryUtils.rewriteScalarOperator(scalarOperator, subOpt, subqueryPlaceholders); scalarOperator = pair.first; subOpt = pair.second; ColumnRefOperator columnRefOperator = getOrCreateColumnRefOperator(expression, scalarOperator, projections); projections.put(columnRefOperator, scalarOperator); if (outputExprInOrderByScope.contains(outputExprIdx)) { outputTranslations.putWithSymbol(expression, new SlotRef(null, outputNames.get(outputExprIdx)), columnRefOperator); } else { outputTranslations.putWithSymbol(expression, expression, columnRefOperator); } outputExprIdx++; } if (!withAggregation) { List<ColumnRefOperator> fieldMappings = new ArrayList<>(outputTranslations.getFieldMappings()); for (int i = 0; i < sourceExpression.size(); ++i) { ColumnRefOperator columnRefOperator = sourceExpression.get(i); projections.put(columnRefOperator, columnRefOperator); fieldMappings.set(scope.getRelationFields().size() + i, columnRefOperator); } outputTranslations.setFieldMappings(fieldMappings); } outputTranslations.addExpressionToColumns(subOpt.getExpressionMapping().getExpressionToColumns()); LogicalProjectOperator projectOperator = new LogicalProjectOperator(projections); return new OptExprBuilder(projectOperator, Lists.newArrayList(subOpt), outputTranslations); } private OptExprBuilder project(OptExprBuilder subOpt, Iterable<Expr> expressions) { return project(subOpt, expressions, -1); } private OptExprBuilder project(OptExprBuilder subOpt, Iterable<Expr> expressions, long limit) { ExpressionMapping outputTranslations = new ExpressionMapping(subOpt.getScope(), subOpt.getFieldMappings()); Map<ColumnRefOperator, ScalarOperator> projections = Maps.newHashMap(); for (Expr expression : expressions) { Map<ScalarOperator, SubqueryOperator> subqueryPlaceholders = Maps.newHashMap(); ScalarOperator scalarOperator = SqlToScalarOperatorTranslator.translate(expression, subOpt.getExpressionMapping(), columnRefFactory, session, cteContext, subOpt, subqueryPlaceholders, false); Pair<ScalarOperator, OptExprBuilder> pair = SubqueryUtils.rewriteScalarOperator(scalarOperator, subOpt, subqueryPlaceholders); scalarOperator = pair.first; subOpt = pair.second; ColumnRefOperator columnRefOperator = getOrCreateColumnRefOperator(expression, scalarOperator, projections); projections.put(columnRefOperator, scalarOperator); outputTranslations.put(expression, columnRefOperator); } outputTranslations.addExpressionToColumns(subOpt.getExpressionMapping().getExpressionToColumns()); LogicalProjectOperator projectOperator = new LogicalProjectOperator(projections, limit); return new OptExprBuilder(projectOperator, Lists.newArrayList(subOpt), outputTranslations); } private OptExprBuilder filter(OptExprBuilder subOpt, Expr predicate) { if (predicate == null) { return subOpt; } Map<ScalarOperator, SubqueryOperator> subqueryPlaceholders = Maps.newHashMap(); ScalarOperator scalarPredicate = SqlToScalarOperatorTranslator.translate(predicate, subOpt.getExpressionMapping(), correlation, columnRefFactory, session, cteContext, subOpt, subqueryPlaceholders, true); Pair<ScalarOperator, OptExprBuilder> pair = SubqueryUtils.rewriteScalarOperator(scalarPredicate, subOpt, subqueryPlaceholders); scalarPredicate = pair.first; subOpt = pair.second; if (scalarPredicate == null) { return subOpt; } LogicalFilterOperator filterOperator = new LogicalFilterOperator(scalarPredicate); return subOpt.withNewRoot(filterOperator); } private OptExprBuilder limit(OptExprBuilder subOpt, LimitElement limit) { if (limit == null) { return subOpt; } LogicalLimitOperator limitOperator = LogicalLimitOperator.init(limit.getLimit(), limit.getOffset()); return subOpt.withNewRoot(limitOperator); } public OptExprBuilder aggregate(OptExprBuilder subOpt, List<Expr> groupByExpressions, List<FunctionCallExpr> aggregates, List<List<Expr>> groupingSetsList, List<Expr> groupingFunctionCallExprs) { if (aggregates.size() == 0 && groupByExpressions.size() == 0) { return subOpt; } List<FunctionCallExpr> copyAggregates; if (groupingSetsList != null) { copyAggregates = aggregates.stream().map(e -> (FunctionCallExpr) e.clone()) .collect(Collectors.toList()); for (Expr groupBy : groupByExpressions) { copyAggregates.replaceAll( root -> (FunctionCallExpr) replaceExprBottomUp(root, groupBy, new CloneExpr(groupBy))); } } else { copyAggregates = aggregates; } ImmutableList.Builder<Expr> arguments = ImmutableList.builder(); copyAggregates.stream().filter(f -> !f.getParams().isStar()) .map(TreeNode::getChildren).flatMap(List::stream) .filter(e -> !(e.isConstant())).forEach(arguments::add); Iterable<Expr> inputs = Iterables.concat(groupByExpressions, arguments.build()); if (!Iterables.isEmpty(inputs)) { subOpt = project(subOpt, inputs); } ExpressionMapping groupingTranslations = new ExpressionMapping(subOpt.getScope(), subOpt.getFieldMappings()); List<ColumnRefOperator> groupByColumnRefs = new ArrayList<>(); boolean groupAllConst = groupByExpressions.stream().allMatch(Expr::isConstant); for (Expr groupingItem : groupByExpressions) { if (groupingItem.isConstant() && !(groupAllConst && groupByColumnRefs.isEmpty()) && groupingSetsList == null) { continue; } ScalarOperator groupingKey = SqlToScalarOperatorTranslator.translate(groupingItem, subOpt.getExpressionMapping(), columnRefFactory); ColumnRefOperator colRef = (ColumnRefOperator) groupingKey; if (!groupByColumnRefs.contains(colRef)) { groupByColumnRefs.add(colRef); } groupingTranslations.put(groupingItem, colRef); } Map<ColumnRefOperator, CallOperator> aggregationsMap = Maps.newHashMap(); for (int i = 0; i < aggregates.size(); i++) { FunctionCallExpr copyAggregate = copyAggregates.get(i); ScalarOperator aggCallOperator = SqlToScalarOperatorTranslator.translate(copyAggregate, subOpt.getExpressionMapping(), columnRefFactory); CallOperator aggOperator = (CallOperator) aggCallOperator; ColumnRefOperator colRef = columnRefFactory.create(aggOperator.getFnName(), copyAggregate.getType(), copyAggregate.isNullable()); aggregationsMap.put(colRef, aggOperator); groupingTranslations.put(aggregates.get(i), colRef); } if (groupingSetsList != null) { /* * repeatOutput is used to record the output column of repeatOperator, * this output column only represents the generated grouping_id column */ List<ColumnRefOperator> repeatOutput = new ArrayList<>(); /* * groupingIdsBitSets is used to record the complete grouping_id, * which contains all the group by columns. * groupingIds is converted by groupingIdsBitSets */ ArrayList<BitSet> groupingIdsBitSets = new ArrayList<>(); List<List<Long>> groupingIds = new ArrayList<>(); /* * repeatColumnRefList is used to record the column reference * that needs to be repeatedly calculated. * This column reference is come from the child of repeat operator */ List<List<ColumnRefOperator>> repeatColumnRefList = new ArrayList<>(); for (List<Expr> grouping : groupingSetsList) { List<ColumnRefOperator> repeatColumnRef = new ArrayList<>(); BitSet groupingIdBitSet = new BitSet(groupByColumnRefs.size()); groupingIdBitSet.set(0, groupByExpressions.size(), true); for (Expr groupingField : grouping) { ColumnRefOperator groupingKey = (ColumnRefOperator) SqlToScalarOperatorTranslator.translate( groupingField, subOpt.getExpressionMapping(), columnRefFactory); repeatColumnRef.add(groupingKey); if (groupByColumnRefs.contains(groupingKey)) { groupingIdBitSet.set(groupByColumnRefs.indexOf(groupingKey), false); } } groupingIdsBitSets.add(groupingIdBitSet); repeatColumnRefList.add(repeatColumnRef); } ColumnRefOperator grouping = columnRefFactory.create(GROUPING_ID, Type.BIGINT, false); List<Long> groupingID = new ArrayList<>(); for (BitSet bitSet : groupingIdsBitSets) { long gid = Utils.convertBitSetToLong(bitSet, groupByColumnRefs.size()); while (groupingID.contains(gid)) { gid += Math.pow(2, groupByColumnRefs.size()); } groupingID.add(gid); } groupingIds.add(groupingID); groupByColumnRefs.add(grouping); repeatOutput.add(grouping); for (Expr groupingFunction : groupingFunctionCallExprs) { grouping = columnRefFactory.create(GROUPING, Type.BIGINT, false); ArrayList<BitSet> tempGroupingIdsBitSets = new ArrayList<>(); for (int i = 0; i < repeatColumnRefList.size(); ++i) { tempGroupingIdsBitSets.add(new BitSet(groupingFunction.getChildren().size())); } for (int childIdx = 0; childIdx < groupingFunction.getChildren().size(); ++childIdx) { SlotRef slotRef = (SlotRef) groupingFunction.getChild(childIdx); ColumnRefOperator groupingKey = (ColumnRefOperator) SqlToScalarOperatorTranslator .translate(slotRef, subOpt.getExpressionMapping(), columnRefFactory); for (List<ColumnRefOperator> repeatColumns : repeatColumnRefList) { if (repeatColumns.contains(groupingKey)) { for (int repeatColIdx = 0; repeatColIdx < repeatColumnRefList.size(); ++repeatColIdx) { tempGroupingIdsBitSets.get(repeatColIdx).set(childIdx, groupingIdsBitSets.get(repeatColIdx) .get(groupByColumnRefs.indexOf(groupingKey))); } } } } groupingTranslations.put(groupingFunction, grouping); groupingIds.add(tempGroupingIdsBitSets.stream().map(bitset -> Utils.convertBitSetToLong(bitset, groupingFunction.getChildren().size())) .collect(Collectors.toList())); groupByColumnRefs.add(grouping); repeatOutput.add(grouping); } LogicalRepeatOperator repeatOperator = new LogicalRepeatOperator(repeatOutput, repeatColumnRefList, groupingIds); subOpt = new OptExprBuilder(repeatOperator, Lists.newArrayList(subOpt), groupingTranslations); } return new OptExprBuilder( new LogicalAggregationOperator(AggType.GLOBAL, groupByColumnRefs, aggregationsMap), Lists.newArrayList(subOpt), groupingTranslations); } private Expr replaceExprBottomUp(Expr root, Expr pattern, Expr replace) { if (root.getChildren().size() > 0) { for (int i = 0; i < root.getChildren().size(); i++) { Expr result = replaceExprBottomUp(root.getChild(i), pattern, replace); root.setChild(i, result); } } if (root.equals(pattern)) { return replace; } return root; } private OptExprBuilder sort(OptExprBuilder subOpt, List<OrderByElement> orderByExpressions, List<ColumnRefOperator> orderByColumns) { if (orderByExpressions.isEmpty()) { return subOpt; } List<Ordering> orderings = new ArrayList<>(); for (OrderByElement item : orderByExpressions) { if (item.getExpr().isLiteral()) { continue; } ColumnRefOperator column = (ColumnRefOperator) SqlToScalarOperatorTranslator.translate(item.getExpr(), subOpt.getExpressionMapping(), columnRefFactory); Ordering ordering = new Ordering(column, item.getIsAsc(), OrderByElement.nullsFirst(item.getNullsFirstParam())); if (!orderByColumns.contains(column)) { orderings.add(ordering); orderByColumns.add(column); } } if (orderByColumns.isEmpty()) { return subOpt; } LogicalTopNOperator sortOperator = new LogicalTopNOperator(orderings); return subOpt.withNewRoot(sortOperator); } private OptExprBuilder distinct(OptExprBuilder subOpt, boolean isDistinct, List<Expr> outputExpressions) { if (isDistinct) { subOpt = project(subOpt, outputExpressions); List<ColumnRefOperator> groupByColumns = Lists.newArrayList(); for (Expr expr : outputExpressions) { ColumnRefOperator column = (ColumnRefOperator) SqlToScalarOperatorTranslator .translate(expr, subOpt.getExpressionMapping(), columnRefFactory); if (!groupByColumns.contains(column)) { groupByColumns.add(column); } } return subOpt.withNewRoot( new LogicalAggregationOperator(AggType.GLOBAL, groupByColumns, new HashMap<>())); } else { return subOpt; } } private ColumnRefOperator getOrCreateColumnRefOperator(Expr expression, ScalarOperator scalarOperator, Map<ColumnRefOperator, ScalarOperator> projections) { ColumnRefOperator columnRefOperator; if (scalarOperator.isColumnRef()) { columnRefOperator = (ColumnRefOperator) scalarOperator; } else if (scalarOperator.isVariable() && projections.containsValue(scalarOperator)) { columnRefOperator = projections.entrySet().stream() .filter(e -> scalarOperator.equals(e.getValue())) .findAny() .map(Map.Entry::getKey) .orElse(null); Preconditions.checkNotNull(columnRefOperator); } else { columnRefOperator = columnRefFactory.create(expression, expression.getType(), scalarOperator.isNullable()); } return columnRefOperator; } }
Can you please make this move in a different commit? Because if we have to revert it something, we don't want to revert the whole thing
private Type getNonAsyncReturnType(Type returnType) { switch (returnType.kind()) { case ARRAY: case CLASS: case PRIMITIVE: case VOID: return returnType; case PARAMETERIZED_TYPE: ParameterizedType parameterizedType = returnType.asParameterizedType(); if (COMPLETION_STAGE.equals(parameterizedType.name()) || COMPLETABLE_FUTURE.equals(parameterizedType.name()) || UNI.equals(parameterizedType.name()) || MULTI.equals(parameterizedType.name()) || REST_RESPONSE.equals(parameterizedType.name())) { return parameterizedType.arguments().get(0); } return returnType; default: } return returnType; }
private Type getNonAsyncReturnType(Type returnType) { switch (returnType.kind()) { case ARRAY: case CLASS: case PRIMITIVE: case VOID: return returnType; case PARAMETERIZED_TYPE: ParameterizedType parameterizedType = returnType.asParameterizedType(); if (COMPLETION_STAGE.equals(parameterizedType.name()) || COMPLETABLE_FUTURE.equals(parameterizedType.name()) || UNI.equals(parameterizedType.name()) || MULTI.equals(parameterizedType.name()) || REST_RESPONSE.equals(parameterizedType.name())) { return parameterizedType.arguments().get(0); } return returnType; default: } return returnType; }
class LinksContainerFactory { private static final String LIST = "list"; private static final String SELF = "self"; private static final String REMOVE = "remove"; private static final String UPDATE = "update"; private static final String ADD = "add"; /** * Find the resource methods that are marked with a {@link RestLink} annotations and add them to a links container. */ LinksContainer getLinksContainer(List<ResteasyReactiveResourceMethodEntriesBuildItem.Entry> entries, IndexView index) { LinksContainer linksContainer = new LinksContainer(); for (ResteasyReactiveResourceMethodEntriesBuildItem.Entry entry : entries) { MethodInfo resourceMethodInfo = entry.getMethodInfo(); AnnotationInstance restLinkAnnotation = resourceMethodInfo.annotation(DotNames.REST_LINK_ANNOTATION); if (restLinkAnnotation != null) { LinkInfo linkInfo = getLinkInfo(entry.getResourceMethod(), resourceMethodInfo, restLinkAnnotation, entry.getBasicResourceClassInfo().getPath(), index); linksContainer.put(linkInfo); } } return linksContainer; } private LinkInfo getLinkInfo(ResourceMethod resourceMethod, MethodInfo resourceMethodInfo, AnnotationInstance restLinkAnnotation, String resourceClassPath, IndexView index) { Type returnType = getNonAsyncReturnType(resourceMethodInfo.returnType()); String rel = getAnnotationValue(restLinkAnnotation, "rel", deductRel(resourceMethod, returnType, index)); String entityType = getAnnotationValue(restLinkAnnotation, "entityType", deductEntityType(returnType)); String path = UriBuilder.fromPath(resourceClassPath).path(resourceMethod.getPath()).toTemplate(); while (path.endsWith("/")) { path = path.substring(0, path.length() - 1); } Set<String> pathParameters = getPathParameters(path); return new LinkInfo(rel, entityType, path, pathParameters); } /** * When the "rel" property is not set, it will be resolved as follows: * - "list" for GET methods returning a Collection. * - "self" for GET methods returning a non-Collection. * - "remove" for DELETE methods. * - "update" for PUT methods. * - "add" for POST methods. * <p> * Otherwise, it will return the method name. * * @param resourceMethod the resource method definition. * @return the deducted rel property. */ private String deductRel(ResourceMethod resourceMethod, Type returnType, IndexView index) { String httpMethod = resourceMethod.getHttpMethod(); boolean isCollection = isCollection(returnType, index); if (HttpMethod.GET.equals(httpMethod) && isCollection) { return LIST; } else if (HttpMethod.GET.equals(httpMethod)) { return SELF; } else if (HttpMethod.DELETE.equals(httpMethod)) { return REMOVE; } else if (HttpMethod.PUT.equals(httpMethod)) { return UPDATE; } else if (HttpMethod.POST.equals(httpMethod)) { return ADD; } return resourceMethod.getName(); } /** * If a method return type is parameterized and has a single argument (e.g. List), then use that argument as an * entity type. Otherwise, use the return type. */ private String deductEntityType(Type returnType) { if (returnType.kind() == Type.Kind.PARAMETERIZED_TYPE) { if (returnType.asParameterizedType().arguments().size() == 1) { return returnType.asParameterizedType().arguments().get(0).name().toString(); } } return returnType.name().toString(); } /** * Extract parameters from a path string */ private Set<String> getPathParameters(String path) { Set<String> names = new HashSet<>(); URLUtils.parsePathParameters(path, names); Set<String> trimmedNames = new HashSet<>(names.size()); for (String name : names) { trimmedNames.add(name.trim()); } return trimmedNames; } private String getAnnotationValue(AnnotationInstance annotationInstance, String name, String defaultValue) { AnnotationValue value = annotationInstance.value(name); if (value == null || value.asString().equals("")) { return defaultValue; } return value.asString(); } private boolean isCollection(Type type, IndexView index) { if (type.kind() == Type.Kind.PRIMITIVE) { return false; } ClassInfo classInfo = index.getClassByName(type.name()); if (classInfo == null) { return false; } return classInfo.interfaceNames().stream().anyMatch(DotName.createSimple(Collection.class.getName())::equals); } }
class LinksContainerFactory { private static final String LIST = "list"; private static final String SELF = "self"; private static final String REMOVE = "remove"; private static final String UPDATE = "update"; private static final String ADD = "add"; /** * Find the resource methods that are marked with a {@link RestLink} annotations and add them to a links container. */ LinksContainer getLinksContainer(List<ResteasyReactiveResourceMethodEntriesBuildItem.Entry> entries, IndexView index) { LinksContainer linksContainer = new LinksContainer(); for (ResteasyReactiveResourceMethodEntriesBuildItem.Entry entry : entries) { MethodInfo resourceMethodInfo = entry.getMethodInfo(); AnnotationInstance restLinkAnnotation = resourceMethodInfo.annotation(DotNames.REST_LINK_ANNOTATION); if (restLinkAnnotation != null) { LinkInfo linkInfo = getLinkInfo(entry.getResourceMethod(), resourceMethodInfo, restLinkAnnotation, entry.getBasicResourceClassInfo().getPath(), index); linksContainer.put(linkInfo); } } return linksContainer; } private LinkInfo getLinkInfo(ResourceMethod resourceMethod, MethodInfo resourceMethodInfo, AnnotationInstance restLinkAnnotation, String resourceClassPath, IndexView index) { Type returnType = getNonAsyncReturnType(resourceMethodInfo.returnType()); String rel = getAnnotationValue(restLinkAnnotation, "rel", deductRel(resourceMethod, returnType, index)); String entityType = getAnnotationValue(restLinkAnnotation, "entityType", deductEntityType(returnType)); String path = UriBuilder.fromPath(resourceClassPath).path(resourceMethod.getPath()).toTemplate(); while (path.endsWith("/")) { path = path.substring(0, path.length() - 1); } Set<String> pathParameters = getPathParameters(path); return new LinkInfo(rel, entityType, path, pathParameters); } /** * When the "rel" property is not set, it will be resolved as follows: * - "list" for GET methods returning a Collection. * - "self" for GET methods returning a non-Collection. * - "remove" for DELETE methods. * - "update" for PUT methods. * - "add" for POST methods. * <p> * Otherwise, it will return the method name. * * @param resourceMethod the resource method definition. * @return the deducted rel property. */ private String deductRel(ResourceMethod resourceMethod, Type returnType, IndexView index) { String httpMethod = resourceMethod.getHttpMethod(); boolean isCollection = isCollection(returnType, index); if (HttpMethod.GET.equals(httpMethod) && isCollection) { return LIST; } else if (HttpMethod.GET.equals(httpMethod)) { return SELF; } else if (HttpMethod.DELETE.equals(httpMethod)) { return REMOVE; } else if (HttpMethod.PUT.equals(httpMethod)) { return UPDATE; } else if (HttpMethod.POST.equals(httpMethod)) { return ADD; } return resourceMethod.getName(); } /** * If a method return type is parameterized and has a single argument (e.g. List), then use that argument as an * entity type. Otherwise, use the return type. */ private String deductEntityType(Type returnType) { if (returnType.kind() == Type.Kind.PARAMETERIZED_TYPE) { if (returnType.asParameterizedType().arguments().size() == 1) { return returnType.asParameterizedType().arguments().get(0).name().toString(); } } return returnType.name().toString(); } /** * Extract parameters from a path string */ private Set<String> getPathParameters(String path) { Set<String> names = new HashSet<>(); URLUtils.parsePathParameters(path, names); Set<String> trimmedNames = new HashSet<>(names.size()); for (String name : names) { trimmedNames.add(name.trim()); } return trimmedNames; } private String getAnnotationValue(AnnotationInstance annotationInstance, String name, String defaultValue) { AnnotationValue value = annotationInstance.value(name); if (value == null || value.asString().equals("")) { return defaultValue; } return value.asString(); } private boolean isCollection(Type type, IndexView index) { if (type.kind() == Type.Kind.PRIMITIVE) { return false; } ClassInfo classInfo = index.getClassByName(type.name()); if (classInfo == null) { return false; } return classInfo.interfaceNames().stream().anyMatch(DotName.createSimple(Collection.class.getName())::equals); } }
I think I wanted to put `this.getClass().getSimpleName()` there. Will update it.
public void close() throws Exception { synchronized (lock) { if (!running) { return; } running = false; LOG.info("Closing {}.", this); ExecutorUtils.gracefulShutdown(10L, TimeUnit.SECONDS, leadershipOperationExecutor); Exception exception = null; try { multipleComponentLeaderElectionDriver.close(); } catch (Exception e) { exception = e; } ExceptionUtils.tryRethrowException(exception); } }
LOG.info("Closing {}.", this);
public void close() throws Exception { synchronized (lock) { if (!running) { return; } running = false; LOG.info("Closing {}.", this.getClass().getSimpleName()); ExecutorUtils.gracefulShutdown(10L, TimeUnit.SECONDS, leadershipOperationExecutor); multipleComponentLeaderElectionDriver.close(); } }
class DefaultMultipleComponentLeaderElectionService implements MultipleComponentLeaderElectionService, MultipleComponentLeaderElectionDriver.Listener { private static final Logger LOG = LoggerFactory.getLogger(DefaultMultipleComponentLeaderElectionService.class); private final Object lock = new Object(); private final MultipleComponentLeaderElectionDriver multipleComponentLeaderElectionDriver; private final FatalErrorHandler fatalErrorHandler; @GuardedBy("lock") private final ExecutorService leadershipOperationExecutor; @GuardedBy("lock") private final Map<String, LeaderElectionEventHandler> leaderElectionEventHandlers; private boolean running = true; @Nullable @GuardedBy("lock") private UUID currentLeaderSessionId = null; @VisibleForTesting DefaultMultipleComponentLeaderElectionService( FatalErrorHandler fatalErrorHandler, String leaderContenderDescription, MultipleComponentLeaderElectionDriverFactory multipleComponentLeaderElectionDriverFactory, ExecutorService leadershipOperationExecutor) throws Exception { this.fatalErrorHandler = fatalErrorHandler; this.leadershipOperationExecutor = leadershipOperationExecutor; leaderElectionEventHandlers = new HashMap<>(); multipleComponentLeaderElectionDriver = multipleComponentLeaderElectionDriverFactory.create( leaderContenderDescription, this); } public DefaultMultipleComponentLeaderElectionService( FatalErrorHandler fatalErrorHandler, String leaderContenderDescription, MultipleComponentLeaderElectionDriverFactory multipleComponentLeaderElectionDriverFactory) throws Exception { this( fatalErrorHandler, leaderContenderDescription, multipleComponentLeaderElectionDriverFactory, java.util.concurrent.Executors.newSingleThreadExecutor( new ExecutorThreadFactory( String.format( "leadershipOperation-%s", leaderContenderDescription)))); } @Override @Override public LeaderElectionDriverFactory createDriverFactory(String leaderName) { return new MultipleComponentLeaderElectionDriverAdapterFactory(leaderName, this); } @Override public void publishLeaderInformation(String leaderName, LeaderInformation leaderInformation) { try { multipleComponentLeaderElectionDriver.publishLeaderInformation( leaderName, leaderInformation); } catch (Exception e) { fatalErrorHandler.onFatalError( new FlinkException( String.format( "Could not write leader information %s for leader %s.", leaderInformation, leaderName), e)); } } @Override public void registerLeaderElectionEventHandler( String componentId, LeaderElectionEventHandler leaderElectionEventHandler) { synchronized (lock) { Preconditions.checkState( !leaderElectionEventHandlers.containsKey(componentId), "Do not support duplicate LeaderElectionEventHandler registration under %s", componentId); leaderElectionEventHandlers.put(componentId, leaderElectionEventHandler); if (currentLeaderSessionId != null) { leadershipOperationExecutor.execute( () -> leaderElectionEventHandler.onGrantLeadership(currentLeaderSessionId)); } } } @Override public void unregisterLeaderElectionEventHandler(String componentId) throws Exception { final LeaderElectionEventHandler unregisteredLeaderElectionEventHandler; synchronized (lock) { unregisteredLeaderElectionEventHandler = leaderElectionEventHandlers.remove(componentId); if (unregisteredLeaderElectionEventHandler != null) { leadershipOperationExecutor.execute( unregisteredLeaderElectionEventHandler::onRevokeLeadership); } } multipleComponentLeaderElectionDriver.deleteLeaderInformation(componentId); } @Override public boolean hasLeadership(String leaderName) { synchronized (lock) { Preconditions.checkState(running); return leaderElectionEventHandlers.containsKey(leaderName) && multipleComponentLeaderElectionDriver.hasLeadership(); } } @Override public void isLeader() { final UUID newLeaderSessionId = UUID.randomUUID(); synchronized (lock) { if (!running) { return; } currentLeaderSessionId = UUID.randomUUID(); forEachLeaderElectionEventHandler( leaderElectionEventHandler -> leaderElectionEventHandler.onGrantLeadership(newLeaderSessionId)); } } @Override public void notLeader() { synchronized (lock) { if (!running) { return; } currentLeaderSessionId = null; forEachLeaderElectionEventHandler(LeaderElectionEventHandler::onRevokeLeadership); } } @GuardedBy("lock") private void forEachLeaderElectionEventHandler( Consumer<? super LeaderElectionEventHandler> action) { for (LeaderElectionEventHandler leaderElectionEventHandler : leaderElectionEventHandlers.values()) { leadershipOperationExecutor.execute(() -> action.accept(leaderElectionEventHandler)); } } @Override public void notifyLeaderInformationChange( String leaderName, LeaderInformation leaderInformation) { synchronized (lock) { if (!running) { return; } final LeaderElectionEventHandler leaderElectionEventHandler = leaderElectionEventHandlers.get(leaderName); if (leaderElectionEventHandler != null) { leadershipOperationExecutor.execute( () -> leaderElectionEventHandler.onLeaderInformationChange( leaderInformation)); } } } @Override public void notifyAllKnownLeaderInformation( Collection<LeaderInformationWithComponentId> leaderInformationWithComponentIds) { synchronized (lock) { if (!running) { return; } final Map<String, LeaderInformation> leaderInformationByName = leaderInformationWithComponentIds.stream() .collect( Collectors.toMap( LeaderInformationWithComponentId::getComponentId, LeaderInformationWithComponentId ::getLeaderInformation)); for (Map.Entry<String, LeaderElectionEventHandler> leaderNameLeaderElectionEventHandlerPair : leaderElectionEventHandlers.entrySet()) { final String leaderName = leaderNameLeaderElectionEventHandlerPair.getKey(); if (leaderInformationByName.containsKey(leaderName)) { leaderNameLeaderElectionEventHandlerPair .getValue() .onLeaderInformationChange(leaderInformationByName.get(leaderName)); } else { leaderNameLeaderElectionEventHandlerPair .getValue() .onLeaderInformationChange(LeaderInformation.empty()); } } } } }
class DefaultMultipleComponentLeaderElectionService implements MultipleComponentLeaderElectionService, MultipleComponentLeaderElectionDriver.Listener { private static final Logger LOG = LoggerFactory.getLogger(DefaultMultipleComponentLeaderElectionService.class); private final Object lock = new Object(); private final MultipleComponentLeaderElectionDriver multipleComponentLeaderElectionDriver; private final FatalErrorHandler fatalErrorHandler; @GuardedBy("lock") private final ExecutorService leadershipOperationExecutor; @GuardedBy("lock") private final Map<String, LeaderElectionEventHandler> leaderElectionEventHandlers; @GuardedBy("lock") private boolean running = true; @Nullable @GuardedBy("lock") private UUID currentLeaderSessionId = null; @VisibleForTesting DefaultMultipleComponentLeaderElectionService( FatalErrorHandler fatalErrorHandler, MultipleComponentLeaderElectionDriverFactory multipleComponentLeaderElectionDriverFactory, ExecutorService leadershipOperationExecutor) throws Exception { this.fatalErrorHandler = Preconditions.checkNotNull(fatalErrorHandler); this.leadershipOperationExecutor = Preconditions.checkNotNull(leadershipOperationExecutor); leaderElectionEventHandlers = new HashMap<>(); multipleComponentLeaderElectionDriver = multipleComponentLeaderElectionDriverFactory.create(this); } public DefaultMultipleComponentLeaderElectionService( FatalErrorHandler fatalErrorHandler, MultipleComponentLeaderElectionDriverFactory multipleComponentLeaderElectionDriverFactory) throws Exception { this( fatalErrorHandler, multipleComponentLeaderElectionDriverFactory, Executors.newSingleThreadExecutor( new ExecutorThreadFactory("leadershipOperationExecutor"))); } @Override @Override public LeaderElectionDriverFactory createDriverFactory(String componentId) { return new MultipleComponentLeaderElectionDriverAdapterFactory(componentId, this); } @Override public void publishLeaderInformation(String componentId, LeaderInformation leaderInformation) { try { multipleComponentLeaderElectionDriver.publishLeaderInformation( componentId, leaderInformation); } catch (Exception e) { fatalErrorHandler.onFatalError( new FlinkException( String.format( "Could not write leader information %s for leader %s.", leaderInformation, componentId), e)); } } @Override public void registerLeaderElectionEventHandler( String componentId, LeaderElectionEventHandler leaderElectionEventHandler) { synchronized (lock) { Preconditions.checkArgument( !leaderElectionEventHandlers.containsKey(componentId), "Do not support duplicate LeaderElectionEventHandler registration under %s", componentId); leaderElectionEventHandlers.put(componentId, leaderElectionEventHandler); if (currentLeaderSessionId != null) { final UUID leaderSessionId = currentLeaderSessionId; leadershipOperationExecutor.execute( () -> leaderElectionEventHandler.onGrantLeadership(leaderSessionId)); } } } @Override public void unregisterLeaderElectionEventHandler(String componentId) throws Exception { final LeaderElectionEventHandler unregisteredLeaderElectionEventHandler; synchronized (lock) { unregisteredLeaderElectionEventHandler = leaderElectionEventHandlers.remove(componentId); if (unregisteredLeaderElectionEventHandler != null) { leadershipOperationExecutor.execute( unregisteredLeaderElectionEventHandler::onRevokeLeadership); } else { LOG.debug( "Could not find leader election event handler for componentId {}. Ignoring the unregister call.", componentId); } } multipleComponentLeaderElectionDriver.deleteLeaderInformation(componentId); } @Override public boolean hasLeadership(String componentId) { synchronized (lock) { Preconditions.checkState(running); return leaderElectionEventHandlers.containsKey(componentId) && multipleComponentLeaderElectionDriver.hasLeadership(); } } @Override public void isLeader() { final UUID newLeaderSessionId = UUID.randomUUID(); synchronized (lock) { if (!running) { return; } currentLeaderSessionId = UUID.randomUUID(); forEachLeaderElectionEventHandler( leaderElectionEventHandler -> leaderElectionEventHandler.onGrantLeadership(newLeaderSessionId)); } } @Override public void notLeader() { synchronized (lock) { if (!running) { return; } currentLeaderSessionId = null; forEachLeaderElectionEventHandler(LeaderElectionEventHandler::onRevokeLeadership); } } @GuardedBy("lock") private void forEachLeaderElectionEventHandler( Consumer<? super LeaderElectionEventHandler> action) { for (LeaderElectionEventHandler leaderElectionEventHandler : leaderElectionEventHandlers.values()) { leadershipOperationExecutor.execute(() -> action.accept(leaderElectionEventHandler)); } } @Override public void notifyLeaderInformationChange( String componentId, LeaderInformation leaderInformation) { synchronized (lock) { if (!running) { return; } final LeaderElectionEventHandler leaderElectionEventHandler = leaderElectionEventHandlers.get(componentId); if (leaderElectionEventHandler != null) { leadershipOperationExecutor.execute( () -> leaderElectionEventHandler.onLeaderInformationChange( leaderInformation)); } } } @Override public void notifyAllKnownLeaderInformation( Collection<LeaderInformationWithComponentId> leaderInformationWithComponentIds) { synchronized (lock) { if (!running) { return; } final Map<String, LeaderInformation> leaderInformationByName = leaderInformationWithComponentIds.stream() .collect( Collectors.toMap( LeaderInformationWithComponentId::getComponentId, LeaderInformationWithComponentId ::getLeaderInformation)); for (Map.Entry<String, LeaderElectionEventHandler> leaderNameLeaderElectionEventHandlerPair : leaderElectionEventHandlers.entrySet()) { final String leaderName = leaderNameLeaderElectionEventHandlerPair.getKey(); if (leaderInformationByName.containsKey(leaderName)) { leaderNameLeaderElectionEventHandlerPair .getValue() .onLeaderInformationChange(leaderInformationByName.get(leaderName)); } else { leaderNameLeaderElectionEventHandlerPair .getValue() .onLeaderInformationChange(LeaderInformation.empty()); } } } } }
Whether this should be calling into the parent or not, I don't see getters for other properties. Shouldn't there be? In .NET, we almost never have write-only properties: just read-only or read-write.
public Integer getKeySize() { return this.keySize; }
return this.keySize;
public Integer getKeySize() { return this.keySize; }
class CreateOctKeyOptions extends CreateKeyOptions { /** * The AES key size. */ private Integer keySize; /** * The hardware protected indicator for the key. */ private boolean hardwareProtected; /** * Creates a {@link CreateOctKeyOptions} with {@code name} as name of the AES key. * * @param name The name of the key. */ public CreateOctKeyOptions(String name) { super(name, KeyType.OCT); } /** * Get the key size in bits. * * @return The key size in bits. */ /** * Set the key size in bits. * * @param keySize The key size to set. * * @return The updated {@link CreateOctKeyOptions} object. */ public CreateOctKeyOptions setKeySize(Integer keySize) { this.keySize = keySize; return this; } /** * Set the key operations. * * @param keyOperations The key operations to set. * * @return The updated {@link CreateOctKeyOptions} object. */ @Override public CreateOctKeyOptions setKeyOperations(KeyOperation... keyOperations) { super.setKeyOperations(keyOperations); return this; } /** * Set the {@link OffsetDateTime notBefore} UTC time. * * @param notBefore The notBefore UTC time to set. * * @return The updated {@link CreateOctKeyOptions} object. */ @Override public CreateOctKeyOptions setNotBefore(OffsetDateTime notBefore) { super.setNotBefore(notBefore); return this; } /** * Set the {@link OffsetDateTime expires} UTC time. * * @param expiresOn The expiry time to set. for the key. * * @return The updated {@link CreateOctKeyOptions} object. */ @Override public CreateOctKeyOptions setExpiresOn(OffsetDateTime expiresOn) { super.setExpiresOn(expiresOn); return this; } /** * Set the tags to be associated with the key. * * @param tags The tags to set. * * @return The updated {@link CreateOctKeyOptions} object. */ @Override public CreateOctKeyOptions setTags(Map<String, String> tags) { super.setTags(tags); return this; } /** * Set a value that indicates if the key is enabled. * * @param enabled The enabled value to set. * * @return The updated {@link CreateOctKeyOptions} object. */ public CreateOctKeyOptions setEnabled(Boolean enabled) { super.setEnabled(enabled); return this; } /** * Set whether the key being created is of HSM type or not. * * @param hardwareProtected The HSM value to set. * * @return The updated {@link CreateOctKeyOptions} object. */ public CreateOctKeyOptions setHardwareProtected(Boolean hardwareProtected) { this.hardwareProtected = hardwareProtected; KeyType keyType = hardwareProtected ? KeyType.OCT_HSM : KeyType.OCT; setKeyType(keyType); return this; } /** * Get the HSM value of the key being created. * * @return the HSM value. */ public Boolean isHardwareProtected() { return this.hardwareProtected; } }
class CreateOctKeyOptions extends CreateKeyOptions { /** * The AES key size. */ private Integer keySize; /** * The hardware protected indicator for the key. */ private boolean hardwareProtected; /** * Creates a {@link CreateOctKeyOptions} with {@code name} as name of the AES key. * * @param name The name of the key. */ public CreateOctKeyOptions(String name) { super(name, KeyType.OCT); } /** * Set the key operations. * * @param keyOperations The key operations to set. * * @return The updated {@link CreateOctKeyOptions} object. */ @Override public CreateOctKeyOptions setKeyOperations(KeyOperation... keyOperations) { super.setKeyOperations(keyOperations); return this; } /** * Set the {@link OffsetDateTime notBefore} UTC time. * * @param notBefore The notBefore UTC time to set. * * @return The updated {@link CreateOctKeyOptions} object. */ @Override public CreateOctKeyOptions setNotBefore(OffsetDateTime notBefore) { super.setNotBefore(notBefore); return this; } /** * Set the {@link OffsetDateTime expires} UTC time. * * @param expiresOn The expiry time to set. for the key. * * @return The updated {@link CreateOctKeyOptions} object. */ @Override public CreateOctKeyOptions setExpiresOn(OffsetDateTime expiresOn) { super.setExpiresOn(expiresOn); return this; } /** * Set the tags to be associated with the key. * * @param tags The tags to set. * * @return The updated {@link CreateOctKeyOptions} object. */ @Override public CreateOctKeyOptions setTags(Map<String, String> tags) { super.setTags(tags); return this; } /** * Set a value that indicates if the key is enabled. * * @param enabled The enabled value to set. * * @return The updated {@link CreateOctKeyOptions} object. */ public CreateOctKeyOptions setEnabled(Boolean enabled) { super.setEnabled(enabled); return this; } /** * Set the key size in bits. * * @param keySize The key size to set. * * @return The updated {@link CreateOctKeyOptions} object. */ public CreateOctKeyOptions setKeySize(Integer keySize) { this.keySize = keySize; return this; } /** * Get the key size in bits. * * @return The key size in bits. */ /** * Set whether the key being created is of HSM type or not. * * @param hardwareProtected The HSM value to set. * * @return The updated {@link CreateOctKeyOptions} object. */ public CreateOctKeyOptions setHardwareProtected(Boolean hardwareProtected) { this.hardwareProtected = hardwareProtected; KeyType keyType = hardwareProtected ? KeyType.OCT_HSM : KeyType.OCT; setKeyType(keyType); return this; } /** * Get the HSM value of the key being created. * * @return the HSM value. */ public Boolean isHardwareProtected() { return this.hardwareProtected; } }
"But was not thrown or Exception did not match" ??? The failure cause makes it look like the wrong outcome was being met and not the one that was added specifically for this case.
public void testInstructionEmbeddedElementsWithMalformedData() throws Exception { ProcessBundleHandler handler = setupProcessBundleHanlderForSimpleRecordingDoFn(); ByteString.Output encodedData = ByteString.newOutput(); KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()).encode(KV.of("", "data"), encodedData); assertThrows( "Expect java.lang.IllegalStateException: Unable to find inbound data receiver for" + " instruction 998L and transform 3L. But was not thrown or Exception did not match.", IllegalStateException.class, () -> handler.processBundle( InstructionRequest.newBuilder() .setInstructionId("998L") .setProcessBundle( ProcessBundleRequest.newBuilder() .setProcessBundleDescriptorId("1L") .setElements( Elements.newBuilder() .addData( Data.newBuilder() .setInstructionId("998L") .setTransformId("3L") .setData(encodedData.toByteString()) .build()) .build())) .build())); assertThrows( "Expect java.lang.RuntimeException: Elements embedded in ProcessBundleRequest are " + "incomplete. But was not thrown or Exception did not match.", RuntimeException.class, () -> handler.processBundle( InstructionRequest.newBuilder() .setInstructionId("998L") .setProcessBundle( ProcessBundleRequest.newBuilder() .setProcessBundleDescriptorId("1L") .setElements( Elements.newBuilder() .addData( Data.newBuilder() .setInstructionId("998L") .setTransformId("2L") .setData(encodedData.toByteString()) .build()) .build())) .build())); handler.shutdown(); }
+ " instruction 998L and transform 3L. But was not thrown or Exception did not match.",
public void testInstructionEmbeddedElementsWithMalformedData() throws Exception { List<String> dataOutput = new ArrayList<>(); List<String> timerOutput = new ArrayList<>(); ProcessBundleHandler handler = setupProcessBundleHanlderForSimpleRecordingDoFn(dataOutput, timerOutput); ByteString.Output encodedData = ByteString.newOutput(); KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()).encode(KV.of("", "data"), encodedData); assertThrows( "Expect java.lang.IllegalStateException: Unable to find inbound data receiver for" + " instruction 998L and transform 3L.", IllegalStateException.class, () -> handler.processBundle( InstructionRequest.newBuilder() .setInstructionId("998L") .setProcessBundle( ProcessBundleRequest.newBuilder() .setProcessBundleDescriptorId("1L") .setElements( Elements.newBuilder() .addData( Data.newBuilder() .setInstructionId("998L") .setTransformId("3L") .setData(encodedData.toByteString()) .build()) .build())) .build())); assertThrows( "Elements embedded in ProcessBundleRequest do not contain stream terminators for " + "all data and timer inputs. Unterminated endpoints: [2L:data," + " 3L:timers:tfs-timer_family]", RuntimeException.class, () -> handler.processBundle( InstructionRequest.newBuilder() .setInstructionId("998L") .setProcessBundle( ProcessBundleRequest.newBuilder() .setProcessBundleDescriptorId("1L") .setElements( Elements.newBuilder() .addData( Data.newBuilder() .setInstructionId("998L") .setTransformId("2L") .setData(encodedData.toByteString()) .build()) .build())) .build())); handler.shutdown(); }
class SimpleRecordingDoFn extends DoFn<KV<String, String>, String> { private static final TupleTag<String> MAIN_OUTPUT_TAG = new TupleTag<>("mainOutput"); private static final String TIMER_FAMILY_ID = "timer_family"; @TimerFamily(TIMER_FAMILY_ID) private final TimerSpec timer = TimerSpecs.timerMap(TimeDomain.EVENT_TIME); static List<String> consumedData = new ArrayList<>(); static List<String> firedOnTimerCallbackTimerIds = new ArrayList<>(); @ProcessElement public void processElement(ProcessContext context, BoundedWindow window) {} @OnTimerFamily(TIMER_FAMILY_ID) public void onTimer(@TimerId String timerId) { firedOnTimerCallbackTimerIds.add(timerId); } }
class SimpleDoFn extends DoFn<KV<String, String>, String> { private static final TupleTag<String> MAIN_OUTPUT_TAG = new TupleTag<>("mainOutput"); private static final String TIMER_FAMILY_ID = "timer_family"; @TimerFamily(TIMER_FAMILY_ID) private final TimerSpec timer = TimerSpecs.timerMap(TimeDomain.EVENT_TIME); @ProcessElement public void processElement(ProcessContext context, BoundedWindow window) {} @OnTimerFamily(TIMER_FAMILY_ID) public void onTimer(@TimerFamily(TIMER_FAMILY_ID) TimerMap timerFamily) { timerFamily .get("output_timer") .withOutputTimestamp(Instant.ofEpochMilli(100L)) .set(Instant.ofEpochMilli(100L)); } }
updated. this TokenUtil would've made everything easier since the beginning. :)
private void checkForOnlyFinalFields(DetailAST objBlockToken) { for (DetailAST ast = objBlockToken.getFirstChild(); ast != null; ast = ast.getNextSibling()) { if (TokenTypes.VARIABLE_DEF == ast.getType()) { final DetailAST modifiersToken = ast.findFirstToken(TokenTypes.MODIFIERS); if (!modifiersToken.branchContains(TokenTypes.FINAL) && !Utils.hasIllegalCombination(modifiersToken)) { log(modifiersToken, String.format(ERROR_MSG, ast.findFirstToken(TokenTypes.IDENT).getText(), objBlockToken.getPreviousSibling().getText())); } } } }
for (DetailAST ast = objBlockToken.getFirstChild(); ast != null; ast = ast.getNextSibling()) {
private void checkForOnlyFinalFields(DetailAST objBlockToken) { Optional<DetailAST> nonFinalFieldFound = TokenUtil.findFirstTokenByPredicate(objBlockToken, node -> TokenTypes.VARIABLE_DEF == node.getType() && !node.branchContains(TokenTypes.FINAL) && !Utils.hasIllegalCombination(node.findFirstToken(TokenTypes.MODIFIERS))); if (nonFinalFieldFound.isPresent()) { DetailAST field = nonFinalFieldFound.get().findFirstToken(TokenTypes.IDENT); log(field, String.format(ERROR_MSG, field.getText())); } }
class are final * * @param objBlockToken the OBJBLOCK AST node */
class are final * * @param objBlockToken the OBJBLOCK AST node */
At the moment the filter instance is created per call. I plan to create a new PR with performance improvements, such as caching web targets, and reusing filters. I will cache the header container proxy in it.
public void filter(ClientRequestContext requestContext) { MultivaluedMap<String, String> headers = new MultivaluedHashMap<>(); requestContext.getHeaders().forEach( (key, values) -> headers.put(key, castToListOfStrings(values))); if (headerFiller != null) { headerFiller.addHeaders(headers); } MultivaluedMap<String, String> incomingHeaders = MicroProfileRestRequestClientFilter.EMPTY_MAP; if (Arc.container().getActiveContext(RequestScoped.class) != null) { HeaderContainer headerContainer = Arc.container().instance(HeaderContainer.class).get(); if (headerContainer != null) { incomingHeaders = headerContainer.getHeaders(); } } if (headersFactory instanceof DefaultClientHeadersFactoryImpl) { headers.forEach((key, values) -> requestContext.getHeaders().put(key, castToListOfObjects(values))); } headersFactory.update(incomingHeaders, headers) .forEach((key, values) -> requestContext.getHeaders().put(key, castToListOfObjects(values))); requestContext.setProperty("org.eclipse.microprofile.rest.client.invokedMethod", method); }
incomingHeaders = headerContainer.getHeaders();
public void filter(ClientRequestContext requestContext) { MultivaluedMap<String, String> headers = new MultivaluedHashMap<>(); for (Map.Entry<String, List<Object>> headerEntry : requestContext.getHeaders().entrySet()) { headers.put(headerEntry.getKey(), castToListOfStrings(headerEntry.getValue())); } if (headerFiller != null) { headerFiller.addHeaders(headers); } MultivaluedMap<String, String> incomingHeaders = MicroProfileRestRequestClientFilter.EMPTY_MAP; if (Arc.container().getActiveContext(RequestScoped.class) != null) { HeaderContainer headerContainer = Arc.container().instance(HeaderContainer.class).get(); if (headerContainer != null) { incomingHeaders = headerContainer.getHeaders(); } } if (headersFactory instanceof DefaultClientHeadersFactoryImpl) { for (Map.Entry<String, List<String>> headerEntry : headers.entrySet()) { requestContext.getHeaders().put(headerEntry.getKey(), castToListOfObjects(headerEntry.getValue())); } } MultivaluedMap<String, String> updatedHeaders = headersFactory.update(incomingHeaders, headers); for (Map.Entry<String, List<String>> headerEntry : updatedHeaders.entrySet()) { requestContext.getHeaders().put(headerEntry.getKey(), castToListOfObjects(headerEntry.getValue())); } requestContext.setProperty("org.eclipse.microprofile.rest.client.invokedMethod", method); }
class MicroProfileRestRequestClientFilter implements ClientRequestFilter { private static final MultivaluedMap<String, String> EMPTY_MAP = new MultivaluedHashMap<>(); @Nullable private final HeaderFiller headerFiller; @NotNull private final ClientHeadersFactory headersFactory; @Nullable private final Method method; /** * * @param headerFiller fills headers as specified in @ClientHeaderParam annotations * @param headersFactory MP Rest Client headersFactory * @param method java method of the JAX-RS interface */ public MicroProfileRestRequestClientFilter(@Nullable HeaderFiller headerFiller, @NotNull ClientHeadersFactory headersFactory, @Nullable Method method) { this.headerFiller = headerFiller; this.headersFactory = headersFactory; this.method = method; } @Override private static List<String> castToListOfStrings(List<Object> values) { return values.stream() .map(val -> val instanceof String ? (String) val : String.valueOf(val)) .collect(Collectors.toList()); } @SuppressWarnings("unchecked") private static List<Object> castToListOfObjects(List<String> values) { return (List<Object>) (List<?>) values; } }
class MicroProfileRestRequestClientFilter implements ClientRequestFilter { private static final MultivaluedMap<String, String> EMPTY_MAP = new MultivaluedHashMap<>(); @Nullable private final HeaderFiller headerFiller; @NotNull private final ClientHeadersFactory headersFactory; @Nullable private final Method method; /** * * @param headerFiller fills headers as specified in @ClientHeaderParam annotations * @param headersFactory MP Rest Client headersFactory * @param method java method of the JAX-RS interface */ public MicroProfileRestRequestClientFilter(@Nullable HeaderFiller headerFiller, @NotNull ClientHeadersFactory headersFactory, @Nullable Method method) { this.headerFiller = headerFiller; this.headersFactory = headersFactory; this.method = method; } @Override private static List<String> castToListOfStrings(List<Object> values) { List<String> result = new ArrayList<>(); for (Object value : values) { if (value instanceof String) { result.add((String) value); } else { result.add(String.valueOf(value)); } } return result; } @SuppressWarnings("unchecked") private static List<Object> castToListOfObjects(List<String> values) { return (List<Object>) (List<?>) values; } }
Yeah, this test is flaky the way it's written, but we also can't just comment out the assertion since that defeats the purpose of the test. Looking through this file, it looks like we're already creating and deleting a pool for every other test. Can we get rid of the `@BeforeClass` and `@AfterClass` functions? I don't see why we have a static variable livePool at all. If we then create a pool inside `testPoolOData`, we can just assert that the expected pool exists in the list. Then there's no issue with other test pools existing. As far as I can see this won't affect test performance at all, because we'd be removing one pool create call and adding one.
public void testPoolOData() throws Exception { CloudPool pool = batchClient.poolOperations().getPool(poolId, new DetailLevel.Builder().withExpandClause("stats").build()); List<CloudPool> pools = batchClient.poolOperations() .listPools(new DetailLevel.Builder().withSelectClause("id, state").build()); Assert.assertTrue(pools.size() > 0); Assert.assertNotNull(pools.get(0).id()); Assert.assertNull(pools.get(0).vmSize()); pools = batchClient.poolOperations() .listPools(new DetailLevel.Builder().withFilterClause("state eq 'deleting'").build()); }
public void testPoolOData() throws Exception { String poolId = getStringIdWithUserNamePrefix("-testPoolOData"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 2; int POOL_LOW_PRI_VM_COUNT = 2; if (!batchClient.poolOperations().existsPool(poolId)) { ImageReference imgRef = new ImageReference().withPublisher("Canonical").withOffer("UbuntuServer") .withSku("18.04-LTS").withVersion("latest"); VirtualMachineConfiguration configuration = new VirtualMachineConfiguration(); configuration.withNodeAgentSKUId("batch.node.ubuntu 18.04").withImageReference(imgRef); NetworkConfiguration netConfig = createNetworkConfiguration(); PoolEndpointConfiguration endpointConfig = new PoolEndpointConfiguration(); List<InboundNATPool> inbounds = new ArrayList<>(); inbounds.add(new InboundNATPool().withName("testinbound").withProtocol(InboundEndpointProtocol.TCP) .withBackendPort(5000).withFrontendPortRangeStart(60000).withFrontendPortRangeEnd(60040)); endpointConfig.withInboundNATPools(inbounds); netConfig.withEndpointConfiguration(endpointConfig).withEnableAcceleratedNetworking(true); PoolAddParameter addParameter = new PoolAddParameter().withId(poolId) .withTargetDedicatedNodes(POOL_VM_COUNT).withTargetLowPriorityNodes(POOL_LOW_PRI_VM_COUNT) .withVmSize(POOL_VM_SIZE).withVirtualMachineConfiguration(configuration) .withNetworkConfiguration(netConfig) .withTargetNodeCommunicationMode(NodeCommunicationMode.DEFAULT); batchClient.poolOperations().createPool(addParameter); } Assert.assertTrue(batchClient.poolOperations().existsPool(poolId)); try { List<CloudPool> pools = batchClient.poolOperations() .listPools(new DetailLevel.Builder().withSelectClause("id, state").build()); Assert.assertTrue(pools.size() > 0); Assert.assertNotNull(pools.get(0).id()); Assert.assertNull(pools.get(0).vmSize()); } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } }
class PoolTests extends BatchIntegrationTestBase { private static CloudPool livePool; private static String poolId; private static NetworkConfiguration networkConfiguration; @BeforeClass public static void setup() throws Exception { poolId = getStringIdWithUserNamePrefix("-testpool"); if(isRecordMode()) { createClient(AuthMode.AAD); livePool = createIfNotExistIaaSPool(poolId); Assert.assertNotNull(livePool); } networkConfiguration = createNetworkConfiguration(); } @AfterClass public static void cleanup() throws Exception { try { } catch (Exception e) { } } @Test @Test public void canCRUDLowPriIaaSPool() throws Exception { String poolId = getStringIdWithUserNamePrefix("-canCRUDLowPri-testPool"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 2; int POOL_LOW_PRI_VM_COUNT = 2; long POOL_STEADY_TIMEOUT_IN_MILLISECONDS = 10 * 60 * 1000; TimeUnit.SECONDS.toMillis(30); if (!batchClient.poolOperations().existsPool(poolId)) { ImageReference imgRef = new ImageReference().withPublisher("Canonical").withOffer("UbuntuServer") .withSku("18.04-LTS").withVersion("latest"); VirtualMachineConfiguration configuration = new VirtualMachineConfiguration(); configuration.withNodeAgentSKUId("batch.node.ubuntu 18.04").withImageReference(imgRef); NetworkConfiguration netConfig = createNetworkConfiguration(); PoolEndpointConfiguration endpointConfig = new PoolEndpointConfiguration(); List<InboundNATPool> inbounds = new ArrayList<>(); inbounds.add(new InboundNATPool().withName("testinbound").withProtocol(InboundEndpointProtocol.TCP) .withBackendPort(5000).withFrontendPortRangeStart(60000).withFrontendPortRangeEnd(60040)); endpointConfig.withInboundNATPools(inbounds); netConfig.withEndpointConfiguration(endpointConfig).withEnableAcceleratedNetworking(true); PoolAddParameter addParameter = new PoolAddParameter().withId(poolId) .withTargetDedicatedNodes(POOL_VM_COUNT).withTargetLowPriorityNodes(POOL_LOW_PRI_VM_COUNT) .withVmSize(POOL_VM_SIZE).withVirtualMachineConfiguration(configuration) .withNetworkConfiguration(netConfig) .withTargetNodeCommunicationMode(NodeCommunicationMode.DEFAULT); batchClient.poolOperations().createPool(addParameter); } try { Assert.assertTrue(batchClient.poolOperations().existsPool(poolId)); long startTime = System.currentTimeMillis(); long elapsedTime = 0L; CloudPool pool = waitForPoolState(poolId, AllocationState.STEADY, POOL_STEADY_TIMEOUT_IN_MILLISECONDS); Assert.assertEquals(POOL_VM_COUNT, (long) pool.currentDedicatedNodes()); Assert.assertEquals(POOL_LOW_PRI_VM_COUNT, (long) pool.currentLowPriorityNodes()); Assert.assertNotNull("CurrentNodeCommunicationMode should be defined for pool with more than one target dedicated node", pool.currentNodeCommunicationMode()); Assert.assertEquals(NodeCommunicationMode.DEFAULT, pool.targetNodeCommunicationMode()); Assert.assertTrue(pool.networkConfiguration().enableAcceleratedNetworking()); List<ComputeNode> computeNodes = batchClient.computeNodeOperations().listComputeNodes(poolId); List<InboundEndpoint> inboundEndpoints = computeNodes.get(0).endpointConfiguration().inboundEndpoints(); Assert.assertEquals(2, inboundEndpoints.size()); InboundEndpoint inboundEndpoint = inboundEndpoints.get(0); Assert.assertEquals(5000, inboundEndpoint.backendPort()); Assert.assertTrue(inboundEndpoint.frontendPort() >= 60000); Assert.assertTrue(inboundEndpoint.frontendPort() <= 60040); Assert.assertTrue(inboundEndpoint.name().startsWith("testinbound.")); Assert.assertTrue(inboundEndpoints.get(1).name().startsWith("SSHRule")); PoolNodeCounts poolNodeCount = null; List<PoolNodeCounts> poolNodeCounts = batchClient.accountOperations().listPoolNodeCounts(); for (PoolNodeCounts tmp : poolNodeCounts) { if (tmp.poolId().equals(poolId)) { poolNodeCount = tmp; break; } } Assert.assertNotNull(poolNodeCount); Assert.assertNotNull(poolNodeCount.lowPriority()); Assert.assertEquals(POOL_LOW_PRI_VM_COUNT, poolNodeCount.lowPriority().total()); Assert.assertEquals(POOL_VM_COUNT, poolNodeCount.dedicated().total()); PoolUpdatePropertiesParameter updatePropertiesParam = new PoolUpdatePropertiesParameter(); updatePropertiesParam.withTargetNodeCommunicationMode(NodeCommunicationMode.SIMPLIFIED) .withApplicationPackageReferences( new LinkedList<ApplicationPackageReference>()) .withMetadata(new LinkedList<MetadataItem>()) .withCertificateReferences(new LinkedList<CertificateReference>()); batchClient.poolOperations().updatePoolProperties(poolId, updatePropertiesParam); pool = batchClient.poolOperations().getPool(poolId); Assert.assertNotNull("CurrentNodeCommunicationMode should be defined for pool with more than one target dedicated node", pool.currentNodeCommunicationMode()); Assert.assertEquals(NodeCommunicationMode.SIMPLIFIED, pool.targetNodeCommunicationMode()); PoolPatchParameter patchParam = new PoolPatchParameter(); patchParam.withTargetNodeCommunicationMode(NodeCommunicationMode.CLASSIC); batchClient.poolOperations().patchPool(poolId, patchParam); pool = batchClient.poolOperations().getPool(poolId); Assert.assertNotNull("CurrentNodeCommunicationMode should be defined for pool with more than one target dedicated node", pool.currentNodeCommunicationMode()); Assert.assertEquals(NodeCommunicationMode.CLASSIC, pool.targetNodeCommunicationMode()); batchClient.poolOperations().resizePool(poolId, 1, 1); pool = batchClient.poolOperations().getPool(poolId); Assert.assertEquals(1, (long) pool.targetDedicatedNodes()); Assert.assertEquals(1, (long) pool.targetLowPriorityNodes()); boolean deleted = false; elapsedTime = 0L; batchClient.poolOperations().deletePool(poolId); while (elapsedTime < POOL_STEADY_TIMEOUT_IN_MILLISECONDS) { try { batchClient.poolOperations().getPool(poolId); } catch (BatchErrorException err) { if (err.body().code().equals(BatchErrorCodeStrings.PoolNotFound)) { deleted = true; break; } else { throw err; } } System.out.println("wait 15 seconds for pool delete..."); threadSleepInRecordMode(15 * 1000); elapsedTime = (new Date()).getTime() - startTime; } Assert.assertTrue(deleted); } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void canInstallVMExtension() throws Exception { String poolId = getStringIdWithUserNamePrefix("-installVMExtension"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 1; int POOL_LOW_PRI_VM_COUNT = 1; String VM_EXTENSION_NAME = "secretext"; String VM_EXTENSION_TYPE = "KeyVaultForLinux"; String VM_EXTENSION_PUBLISHER = "Microsoft.Azure.KeyVault"; String VM_TYPEHANDLER_VERSION = "1.0"; long POOL_STEADY_TIMEOUT_IN_Milliseconds = 15 * 60 * 1000; List<VMExtension> vmExtensions = new ArrayList<VMExtension>(); vmExtensions.add(new VMExtension().withName(VM_EXTENSION_NAME).withType(VM_EXTENSION_TYPE).withPublisher(VM_EXTENSION_PUBLISHER).withTypeHandlerVersion(VM_TYPEHANDLER_VERSION).withEnableAutomaticUpgrade(true)); ImageReference imgRef = new ImageReference().withPublisher("Canonical").withOffer("UbuntuServer") .withSku("18.04-LTS").withVersion("latest"); VirtualMachineConfiguration configuration = new VirtualMachineConfiguration(); configuration.withNodeAgentSKUId("batch.node.ubuntu 18.04").withImageReference(imgRef).withExtensions(vmExtensions); PoolAddParameter addParameter = new PoolAddParameter().withId(poolId) .withTargetDedicatedNodes(POOL_VM_COUNT).withTargetLowPriorityNodes(POOL_LOW_PRI_VM_COUNT) .withVmSize(POOL_VM_SIZE).withVirtualMachineConfiguration(configuration) .withNetworkConfiguration(networkConfiguration) .withTargetNodeCommunicationMode(NodeCommunicationMode.DEFAULT); batchClient.poolOperations().createPool(addParameter); try{ long startTime = System.currentTimeMillis(); long elapsedTime = 0L; CloudPool pool = waitForPoolState(poolId, AllocationState.STEADY, POOL_STEADY_TIMEOUT_IN_Milliseconds); Assert.assertEquals(POOL_VM_COUNT, (long) pool.currentDedicatedNodes()); Assert.assertEquals(POOL_LOW_PRI_VM_COUNT, (long) pool.currentLowPriorityNodes()); List<ComputeNode> computeNodes = batchClient.computeNodeOperations().listComputeNodes(poolId); for(ComputeNode node : computeNodes){ NodeVMExtension nodeVMExtension = batchClient.protocolLayer().computeNodeExtensions().get(poolId, node.id(), VM_EXTENSION_NAME); Assert.assertNotNull(nodeVMExtension); Assert.assertTrue(nodeVMExtension.vmExtension().enableAutomaticUpgrade()); } boolean deleted = false; batchClient.poolOperations().deletePool(poolId); while (elapsedTime < POOL_STEADY_TIMEOUT_IN_Milliseconds * 2) { try { batchClient.poolOperations().getPool(poolId); } catch (BatchErrorException err) { if (err.body().code().equals(BatchErrorCodeStrings.PoolNotFound)) { deleted = true; break; } else { throw err; } } System.out.println("wait 15 seconds for pool delete..."); threadSleepInRecordMode(15 * 1000); elapsedTime = (new Date()).getTime() - startTime; } Assert.assertTrue(deleted); }finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void canCreateContainerPool() throws Exception { String poolId = getStringIdWithUserNamePrefix("-createContainerPool"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 1; long POOL_STEADY_TIMEOUT_IN_MILLISECONDS = 10 * 60 * 1000; TimeUnit.SECONDS.toMillis(30); if (!batchClient.poolOperations().existsPool(poolId)){ List<String> images = new ArrayList<String>(); images.add("tensorflow/tensorflow:latest-gpu"); VirtualMachineConfiguration configuration = new VirtualMachineConfiguration(); configuration .withImageReference( new ImageReference().withPublisher("microsoft-azure-batch").withOffer("ubuntu-server-container").withSku("20-04-lts")) .withNodeAgentSKUId("batch.node.ubuntu 20.04") .withContainerConfiguration(new ContainerConfiguration().withContainerImageNames(images).withType(ContainerType.DOCKER_COMPATIBLE)); PoolAddParameter addParameter = new PoolAddParameter() .withId(poolId) .withVmSize(POOL_VM_SIZE) .withTargetDedicatedNodes(POOL_VM_COUNT) .withVirtualMachineConfiguration(configuration) .withNetworkConfiguration(networkConfiguration); batchClient.poolOperations().createPool(addParameter); } try { Assert.assertTrue(batchClient.poolOperations().existsPool(poolId)); long startTime = System.currentTimeMillis(); long elapsedTime = 0L; CloudPool pool = waitForPoolState(poolId, AllocationState.STEADY, POOL_STEADY_TIMEOUT_IN_MILLISECONDS); Assert.assertEquals(POOL_VM_COUNT, (long) pool.currentDedicatedNodes()); Assert.assertEquals(ContainerType.DOCKER_COMPATIBLE,pool.virtualMachineConfiguration().containerConfiguration().type()); boolean deleted = false; elapsedTime = 0L; batchClient.poolOperations().deletePool(poolId); while (elapsedTime < POOL_STEADY_TIMEOUT_IN_MILLISECONDS) { try { batchClient.poolOperations().getPool(poolId); } catch (BatchErrorException err) { if (err.body().code().equals(BatchErrorCodeStrings.PoolNotFound)) { deleted = true; break; } else { throw err; } } System.out.println("wait 15 seconds for pool delete..."); threadSleepInRecordMode(15 * 1000); elapsedTime = (new Date()).getTime() - startTime; } Assert.assertTrue(deleted); } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void canCreateDataDisk() throws Exception { String poolId = getStringIdWithUserNamePrefix("-testpool3"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 0; int lun = 50; int diskSizeGB = 50; List<DataDisk> dataDisks = new ArrayList<DataDisk>(); dataDisks.add(new DataDisk().withLun(lun).withDiskSizeGB(diskSizeGB)); VirtualMachineConfiguration configuration = new VirtualMachineConfiguration(); configuration .withImageReference( new ImageReference().withPublisher("Canonical").withOffer("UbuntuServer").withSku("18.04-LTS")) .withNodeAgentSKUId("batch.node.ubuntu 18.04").withDataDisks(dataDisks); PoolAddParameter poolConfig = new PoolAddParameter() .withId(poolId) .withNetworkConfiguration(networkConfiguration) .withTargetDedicatedNodes(POOL_VM_COUNT) .withVmSize(POOL_VM_SIZE) .withVirtualMachineConfiguration(configuration); try { batchClient.poolOperations().createPool(poolConfig); CloudPool pool = batchClient.poolOperations().getPool(poolId); Assert.assertEquals(lun, pool.virtualMachineConfiguration().dataDisks().get(0).lun()); Assert.assertEquals(diskSizeGB, pool.virtualMachineConfiguration().dataDisks().get(0).diskSizeGB()); } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void canCreateCustomImageWithExpectedError() throws Exception { String poolId = getStringIdWithUserNamePrefix("-customImageExpErr"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 0; VirtualMachineConfiguration configuration = new VirtualMachineConfiguration(); configuration.withImageReference(new ImageReference().withVirtualMachineImageId(String.format( "/subscriptions/%s/resourceGroups/batchexp/providers/Microsoft.Compute/images/FakeImage", System.getenv("SUBSCRIPTION_ID")))) .withNodeAgentSKUId("batch.node.ubuntu 16.04"); PoolAddParameter poolConfig = new PoolAddParameter() .withId(poolId) .withVmSize(POOL_VM_SIZE) .withTargetDedicatedNodes(POOL_VM_COUNT) .withVirtualMachineConfiguration(configuration) .withNetworkConfiguration(networkConfiguration); try { batchClient.poolOperations().createPool(poolConfig); throw new Exception("Expect exception, but not got it."); } catch (BatchErrorException err) { if (err.body().code().equals("InsufficientPermissions")) { Assert.assertTrue(err.body().values().get(0).value().contains( "The user identity used for this operation does not have the required privilege Microsoft.Compute/images/read on the specified resource")); } else { if (!err.body().code().equals("InvalidPropertyValue")) { throw err; } } } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void shouldFailOnCreateContainerPoolWithRegularImage() throws Exception { String poolId = getStringIdWithUserNamePrefix("-createContainerRegImage"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 0; List<String> images = new ArrayList<String>(); images.add("ubuntu"); VirtualMachineConfiguration configuration = new VirtualMachineConfiguration(); configuration .withImageReference( new ImageReference().withPublisher("Canonical").withOffer("UbuntuServer").withSku("18.04-LTS")) .withNodeAgentSKUId("batch.node.ubuntu 18.04") .withContainerConfiguration(new ContainerConfiguration().withContainerImageNames(images).withType(ContainerType.DOCKER_COMPATIBLE)); PoolAddParameter poolConfig = new PoolAddParameter() .withId(poolId) .withVmSize(POOL_VM_SIZE) .withTargetDedicatedNodes(POOL_VM_COUNT) .withVirtualMachineConfiguration(configuration) .withNetworkConfiguration(networkConfiguration); try { batchClient.poolOperations().createPool(poolConfig); throw new Exception("The test case should throw exception here"); } catch (BatchErrorException err) { if (err.body().code().equals("InvalidPropertyValue")) { for (int i = 0; i < err.body().values().size(); i++) { if (err.body().values().get(i).key().equals("Reason")) { Assert.assertEquals( "The specified imageReference with publisher Canonical offer UbuntuServer sku 18.04-LTS does not support container feature.", err.body().values().get(i).value()); return; } } throw new Exception("Couldn't find expect error reason"); } else { throw err; } } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void shouldFailOnCreateLinuxPoolWithWindowsConfig() throws Exception { String poolId = getStringIdWithUserNamePrefix("-createLinuxPool"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 0; List<String> images = new ArrayList<String>(); images.add("ubuntu"); VirtualMachineConfiguration configuration = new VirtualMachineConfiguration(); configuration .withImageReference( new ImageReference().withPublisher("Canonical").withOffer("UbuntuServer").withSku("16.04-LTS")) .withNodeAgentSKUId("batch.node.ubuntu 16.04"); UserAccount windowsUser = new UserAccount(); windowsUser.withWindowsUserConfiguration(new WindowsUserConfiguration().withLoginMode(LoginMode.INTERACTIVE)) .withName("testaccount") .withPassword("password"); ArrayList<UserAccount> users = new ArrayList<UserAccount>(); users.add(windowsUser); PoolAddParameter pool = new PoolAddParameter().withId(poolId) .withVirtualMachineConfiguration(configuration) .withTargetDedicatedNodes(POOL_VM_COUNT) .withTargetLowPriorityNodes(0) .withVmSize(POOL_VM_SIZE) .withUserAccounts(users) .withNetworkConfiguration(networkConfiguration); try { batchClient.poolOperations().createPool(pool); throw new Exception("The test case should throw exception here"); } catch (BatchErrorException err) { if (err.body().code().equals("InvalidPropertyValue")) { for (int i = 0; i < err.body().values().size(); i++) { if (err.body().values().get(i).key().equals("Reason")) { Assert.assertEquals( "The user configuration for user account 'testaccount' has a mismatch with the OS (Windows/Linux) configuration specified in VirtualMachineConfiguration", err.body().values().get(i).value()); return; } } throw new Exception("Couldn't find expect error reason"); } else { throw err; } } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void canCRUDLowPriPaaSPool() throws Exception { String poolId = getStringIdWithUserNamePrefix("-testpool4"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 1; int POOL_LOW_PRI_VM_COUNT = 2; long POOL_STEADY_TIMEOUT_IN_Milliseconds = 10 * 60 * 1000; if (!batchClient.poolOperations().existsPool(poolId)) { ImageReference imageReference = new ImageReference().withPublisher("Canonical") .withOffer("UbuntuServer").withSku("18.04-LTS").withVersion("latest"); VirtualMachineConfiguration vmConfiguration = new VirtualMachineConfiguration() .withImageReference(imageReference).withNodeAgentSKUId("batch.node.ubuntu 18.04"); batchClient.poolOperations().createPool(new PoolAddParameter().withId(poolId) .withVmSize(POOL_VM_SIZE) .withVirtualMachineConfiguration(vmConfiguration) .withTargetDedicatedNodes(POOL_VM_COUNT) .withTargetLowPriorityNodes(POOL_LOW_PRI_VM_COUNT)); } try { Assert.assertTrue(batchClient.poolOperations().existsPool(poolId)); long startTime = System.currentTimeMillis(); long elapsedTime = 0L; CloudPool pool = waitForPoolState(poolId, AllocationState.STEADY, POOL_STEADY_TIMEOUT_IN_Milliseconds); Assert.assertEquals(POOL_VM_COUNT, (long) pool.currentDedicatedNodes()); Assert.assertEquals(POOL_LOW_PRI_VM_COUNT, (long) pool.currentLowPriorityNodes()); batchClient.poolOperations().resizePool(poolId, null, 1); pool = batchClient.poolOperations().getPool(poolId); Assert.assertEquals(POOL_VM_COUNT, (long) pool.targetDedicatedNodes()); Assert.assertEquals(1, (long) pool.targetLowPriorityNodes()); boolean deleted = false; batchClient.poolOperations().deletePool(poolId); while (elapsedTime < POOL_STEADY_TIMEOUT_IN_Milliseconds * 2) { try { batchClient.poolOperations().getPool(poolId); } catch (BatchErrorException err) { if (err.body().code().equals(BatchErrorCodeStrings.PoolNotFound)) { deleted = true; break; } else { throw err; } } System.out.println("wait 15 seconds for pool delete..."); threadSleepInRecordMode(15 * 1000); elapsedTime = (new Date()).getTime() - startTime; } Assert.assertTrue(deleted); } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } private static CloudPool waitForPoolState(String poolId, AllocationState targetState, long poolAllocationTimeoutInMilliseconds) throws IOException, InterruptedException { long startTime = System.currentTimeMillis(); long elapsedTime = 0L; boolean allocationStateReached = false; CloudPool pool = null; while (elapsedTime < poolAllocationTimeoutInMilliseconds) { pool = batchClient.poolOperations().getPool(poolId); Assert.assertNotNull(pool); if (pool.allocationState() == targetState) { allocationStateReached = true; break; } System.out.println("wait 30 seconds for pool allocationStateReached..."); threadSleepInRecordMode(30 * 1000); elapsedTime = (new Date()).getTime() - startTime; } Assert.assertTrue("The pool did not reach a allocationStateReached state in the allotted time", allocationStateReached); return pool; } @Test public void canCRUDPaaSPool() throws Exception { String poolId = getStringIdWithUserNamePrefix("-CRUDPaaS"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 3; long POOL_STEADY_TIMEOUT_IN_Milliseconds = 15 * 60 * 1000; if (!batchClient.poolOperations().existsPool(poolId)) { ImageReference imageReference = new ImageReference().withPublisher("Canonical") .withOffer("UbuntuServer").withSku("18.04-LTS").withVersion("latest"); VirtualMachineConfiguration vmConfiguration = new VirtualMachineConfiguration() .withImageReference(imageReference).withNodeAgentSKUId("batch.node.ubuntu 18.04"); List<UserAccount> userList = new ArrayList<>(); userList.add(new UserAccount().withName("test-user-1").withPassword("kt userList.add(new UserAccount().withName("test-user-2").withPassword("kt .withElevationLevel(ElevationLevel.ADMIN)); PoolAddParameter addParameter = new PoolAddParameter().withId(poolId) .withVmSize(POOL_VM_SIZE) .withVirtualMachineConfiguration(vmConfiguration) .withTargetDedicatedNodes(POOL_VM_COUNT) .withUserAccounts(userList); batchClient.poolOperations().createPool(addParameter); } try { Assert.assertTrue(batchClient.poolOperations().existsPool(poolId)); long startTime = System.currentTimeMillis(); long elapsedTime = 0L; CloudPool pool = waitForPoolState(poolId, AllocationState.STEADY, POOL_STEADY_TIMEOUT_IN_Milliseconds); Assert.assertNotNull(pool.userAccounts()); Assert.assertEquals("test-user-1", pool.userAccounts().get(0).name()); Assert.assertEquals(ElevationLevel.NON_ADMIN, pool.userAccounts().get(0).elevationLevel()); Assert.assertNull(pool.userAccounts().get(0).password()); Assert.assertEquals(ElevationLevel.ADMIN, pool.userAccounts().get(1).elevationLevel()); List<CloudPool> pools = batchClient.poolOperations().listPools(); Assert.assertTrue(pools.size() > 0); boolean found = false; for (CloudPool p : pools) { if (p.id().equals(poolId)) { found = true; break; } } Assert.assertTrue(found); PoolNodeCounts poolNodeCount = null; List<PoolNodeCounts> poolNodeCounts = batchClient.accountOperations().listPoolNodeCounts(); for (PoolNodeCounts tmp : poolNodeCounts) { if (tmp.poolId().equals(poolId)) { poolNodeCount = tmp; break; } } Assert.assertNotNull(poolNodeCount); Assert.assertNotNull(poolNodeCount.lowPriority()); Assert.assertEquals(0, poolNodeCount.lowPriority().total()); Assert.assertEquals(3, poolNodeCount.dedicated().total()); LinkedList<MetadataItem> metadata = new LinkedList<>(); metadata.add((new MetadataItem()).withName("key1").withValue("value1")); batchClient.poolOperations().patchPool(poolId, null, null, null, metadata); pool = batchClient.poolOperations().getPool(poolId); Assert.assertTrue(pool.metadata().size() == 1); Assert.assertTrue(pool.metadata().get(0).name().equals("key1")); batchClient.poolOperations().updatePoolProperties(poolId, null, new LinkedList<CertificateReference>(), new LinkedList<ApplicationPackageReference>(), new LinkedList<MetadataItem>()); pool = batchClient.poolOperations().getPool(poolId); Assert.assertNull(pool.metadata()); boolean deleted = false; batchClient.poolOperations().deletePool(poolId); while (elapsedTime < POOL_STEADY_TIMEOUT_IN_Milliseconds) { try { batchClient.poolOperations().getPool(poolId); } catch (BatchErrorException err) { if (err.body().code().equals(BatchErrorCodeStrings.PoolNotFound)) { deleted = true; break; } else { throw err; } } System.out.println("wait 5 seconds for pool delete..."); threadSleepInRecordMode(5 * 1000); elapsedTime = (new Date()).getTime() - startTime; } Assert.assertTrue(deleted); } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void testPoolWithAutoOSUpgradeAndRollingUpgrade() throws Exception { String poolId = getStringIdWithUserNamePrefix("-autoOSUpgradeRollingUpgrade"); if (!batchClient.poolOperations().existsPool(poolId)) { ImageReference imageReference = new ImageReference() .withPublisher("Canonical") .withOffer("UbuntuServer") .withSku("18.04-LTS"); NodePlacementConfiguration nodePlacementConfiguration = new NodePlacementConfiguration() .withPolicy(NodePlacementPolicyType.ZONAL); VirtualMachineConfiguration vmConfiguration = new VirtualMachineConfiguration() .withImageReference(imageReference) .withNodeAgentSKUId("batch.node.ubuntu 18.04") .withNodePlacementConfiguration(nodePlacementConfiguration); UpgradePolicy upgradePolicy = new UpgradePolicy() .withMode(UpgradeMode.AUTOMATIC) .withAutomaticOSUpgradePolicy(new AutomaticOSUpgradePolicy() .withDisableAutomaticRollback(true) .withEnableAutomaticOSUpgrade(true) .withUseRollingUpgradePolicy(true) .withOsRollingUpgradeDeferral(true)) .withRollingUpgradePolicy(new RollingUpgradePolicy() .withEnableCrossZoneUpgrade(true) .withMaxBatchInstancePercent(20) .withMaxUnhealthyInstancePercent(20) .withMaxUnhealthyUpgradedInstancePercent(20) .withPauseTimeBetweenBatches("PT5S") .withPrioritizeUnhealthyInstances(false) .withRollbackFailedInstancesOnPolicyBreach(false)); PoolAddParameter testPoolWithUpgradePolicy = new PoolAddParameter() .withId(poolId) .withVmSize("STANDARD_D2S_V3") .withVirtualMachineConfiguration(vmConfiguration) .withUpgradePolicy(upgradePolicy); batchClient.poolOperations().createPool(testPoolWithUpgradePolicy); } try { CloudPool pool = batchClient.poolOperations().getPool(poolId); Assert.assertNotNull(pool); Assert.assertEquals("automatic", pool.upgradePolicy().mode().toString()); Assert.assertTrue(pool.upgradePolicy().automaticOSUpgradePolicy().enableAutomaticOSUpgrade()); Assert.assertTrue(pool.upgradePolicy().rollingUpgradePolicy().enableCrossZoneUpgrade()); Assert.assertEquals(20, (int) pool.upgradePolicy().rollingUpgradePolicy().maxBatchInstancePercent()); } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void testPoolWithSecurityProfileAndOSDisk() throws Exception { String poolId = getStringIdWithUserNamePrefix("SecurityProfile"); if (!batchClient.poolOperations().existsPool(poolId)) { ImageReference imageReference = new ImageReference() .withPublisher("Canonical") .withOffer("0001-com-ubuntu-server-jammy") .withSku("22_04-lts"); SecurityProfile securityProfile = new SecurityProfile() .withSecurityType(SecurityTypes.TRUSTED_LAUNCH) .withEncryptionAtHost(true) .withUefiSettings(new UefiSettings() .withSecureBootEnabled(true) .withVTpmEnabled(true)); ManagedDisk managedDisk = new ManagedDisk() .withStorageAccountType(StorageAccountType.STANDARD_LRS); OSDisk osDisk = new OSDisk() .withCaching(CachingType.READ_WRITE) .withManagedDisk(managedDisk) .withDiskSizeGB(50) .withWriteAcceleratorEnabled(true); VirtualMachineConfiguration vmConfiguration = new VirtualMachineConfiguration() .withImageReference(imageReference) .withNodeAgentSKUId("batch.node.ubuntu 22.04") .withSecurityProfile(securityProfile) .withOsDisk(osDisk); PoolAddParameter poolAddParameter = new PoolAddParameter() .withId(poolId) .withVmSize("STANDARD_D2S_V3") .withVirtualMachineConfiguration(vmConfiguration) .withTargetDedicatedNodes(0); batchClient.poolOperations().createPool(poolAddParameter); } try { CloudPool pool = batchClient.poolOperations().getPool(poolId); Assert.assertNotNull(pool); SecurityProfile sp = pool.virtualMachineConfiguration().securityProfile(); Assert.assertEquals(SecurityTypes.TRUSTED_LAUNCH, sp.securityType()); Assert.assertTrue(sp.encryptionAtHost()); Assert.assertTrue(sp.uefiSettings().secureBootEnabled()); Assert.assertTrue(sp.uefiSettings().vTpmEnabled()); OSDisk disk = pool.virtualMachineConfiguration().osDisk(); Assert.assertEquals("readwrite", pool.virtualMachineConfiguration().osDisk().caching().toString().toLowerCase()); Assert.assertEquals(StorageAccountType.STANDARD_LRS, disk.managedDisk().storageAccountType()); Assert.assertEquals(Integer.valueOf(50), disk.diskSizeGB()); Assert.assertTrue(disk.writeAcceleratorEnabled()); } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } }
class PoolTests extends BatchIntegrationTestBase { private static NetworkConfiguration networkConfiguration; @BeforeClass public static void setup() throws Exception { if(isRecordMode()) { createClient(AuthMode.AAD); } networkConfiguration = createNetworkConfiguration(); } @Test @Test public void canCRUDLowPriIaaSPool() throws Exception { String poolId = getStringIdWithUserNamePrefix("-canCRUDLowPri-testPool"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 2; int POOL_LOW_PRI_VM_COUNT = 2; long POOL_STEADY_TIMEOUT_IN_MILLISECONDS = 10 * 60 * 1000; TimeUnit.SECONDS.toMillis(30); if (!batchClient.poolOperations().existsPool(poolId)) { ImageReference imgRef = new ImageReference().withPublisher("Canonical").withOffer("UbuntuServer") .withSku("18.04-LTS").withVersion("latest"); VirtualMachineConfiguration configuration = new VirtualMachineConfiguration(); configuration.withNodeAgentSKUId("batch.node.ubuntu 18.04").withImageReference(imgRef); NetworkConfiguration netConfig = createNetworkConfiguration(); PoolEndpointConfiguration endpointConfig = new PoolEndpointConfiguration(); List<InboundNATPool> inbounds = new ArrayList<>(); inbounds.add(new InboundNATPool().withName("testinbound").withProtocol(InboundEndpointProtocol.TCP) .withBackendPort(5000).withFrontendPortRangeStart(60000).withFrontendPortRangeEnd(60040)); endpointConfig.withInboundNATPools(inbounds); netConfig.withEndpointConfiguration(endpointConfig).withEnableAcceleratedNetworking(true); PoolAddParameter addParameter = new PoolAddParameter().withId(poolId) .withTargetDedicatedNodes(POOL_VM_COUNT).withTargetLowPriorityNodes(POOL_LOW_PRI_VM_COUNT) .withVmSize(POOL_VM_SIZE).withVirtualMachineConfiguration(configuration) .withNetworkConfiguration(netConfig) .withTargetNodeCommunicationMode(NodeCommunicationMode.DEFAULT); batchClient.poolOperations().createPool(addParameter); } try { Assert.assertTrue(batchClient.poolOperations().existsPool(poolId)); long startTime = System.currentTimeMillis(); long elapsedTime = 0L; CloudPool pool = waitForPoolState(poolId, AllocationState.STEADY, POOL_STEADY_TIMEOUT_IN_MILLISECONDS); Assert.assertEquals(POOL_VM_COUNT, (long) pool.currentDedicatedNodes()); Assert.assertEquals(POOL_LOW_PRI_VM_COUNT, (long) pool.currentLowPriorityNodes()); Assert.assertNotNull("CurrentNodeCommunicationMode should be defined for pool with more than one target dedicated node", pool.currentNodeCommunicationMode()); Assert.assertEquals(NodeCommunicationMode.DEFAULT, pool.targetNodeCommunicationMode()); Assert.assertTrue(pool.networkConfiguration().enableAcceleratedNetworking()); List<ComputeNode> computeNodes = batchClient.computeNodeOperations().listComputeNodes(poolId); List<InboundEndpoint> inboundEndpoints = computeNodes.get(0).endpointConfiguration().inboundEndpoints(); Assert.assertEquals(2, inboundEndpoints.size()); InboundEndpoint inboundEndpoint = inboundEndpoints.get(0); Assert.assertEquals(5000, inboundEndpoint.backendPort()); Assert.assertTrue(inboundEndpoint.frontendPort() >= 60000); Assert.assertTrue(inboundEndpoint.frontendPort() <= 60040); Assert.assertTrue(inboundEndpoint.name().startsWith("testinbound.")); Assert.assertTrue(inboundEndpoints.get(1).name().startsWith("SSHRule")); PoolNodeCounts poolNodeCount = null; List<PoolNodeCounts> poolNodeCounts = batchClient.accountOperations().listPoolNodeCounts(); for (PoolNodeCounts tmp : poolNodeCounts) { if (tmp.poolId().equals(poolId)) { poolNodeCount = tmp; break; } } Assert.assertNotNull(poolNodeCount); Assert.assertNotNull(poolNodeCount.lowPriority()); Assert.assertEquals(POOL_LOW_PRI_VM_COUNT, poolNodeCount.lowPriority().total()); Assert.assertEquals(POOL_VM_COUNT, poolNodeCount.dedicated().total()); PoolUpdatePropertiesParameter updatePropertiesParam = new PoolUpdatePropertiesParameter(); updatePropertiesParam.withTargetNodeCommunicationMode(NodeCommunicationMode.SIMPLIFIED) .withApplicationPackageReferences( new LinkedList<ApplicationPackageReference>()) .withMetadata(new LinkedList<MetadataItem>()) .withCertificateReferences(new LinkedList<CertificateReference>()); batchClient.poolOperations().updatePoolProperties(poolId, updatePropertiesParam); pool = batchClient.poolOperations().getPool(poolId); Assert.assertNotNull("CurrentNodeCommunicationMode should be defined for pool with more than one target dedicated node", pool.currentNodeCommunicationMode()); Assert.assertEquals(NodeCommunicationMode.SIMPLIFIED, pool.targetNodeCommunicationMode()); PoolPatchParameter patchParam = new PoolPatchParameter(); patchParam.withTargetNodeCommunicationMode(NodeCommunicationMode.CLASSIC); batchClient.poolOperations().patchPool(poolId, patchParam); pool = batchClient.poolOperations().getPool(poolId); Assert.assertNotNull("CurrentNodeCommunicationMode should be defined for pool with more than one target dedicated node", pool.currentNodeCommunicationMode()); Assert.assertEquals(NodeCommunicationMode.CLASSIC, pool.targetNodeCommunicationMode()); batchClient.poolOperations().resizePool(poolId, 1, 1); pool = batchClient.poolOperations().getPool(poolId); Assert.assertEquals(1, (long) pool.targetDedicatedNodes()); Assert.assertEquals(1, (long) pool.targetLowPriorityNodes()); boolean deleted = false; elapsedTime = 0L; batchClient.poolOperations().deletePool(poolId); while (elapsedTime < POOL_STEADY_TIMEOUT_IN_MILLISECONDS) { try { batchClient.poolOperations().getPool(poolId); } catch (BatchErrorException err) { if (err.body().code().equals(BatchErrorCodeStrings.PoolNotFound)) { deleted = true; break; } else { throw err; } } System.out.println("wait 15 seconds for pool delete..."); threadSleepInRecordMode(15 * 1000); elapsedTime = (new Date()).getTime() - startTime; } Assert.assertTrue(deleted); } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void canInstallVMExtension() throws Exception { String poolId = getStringIdWithUserNamePrefix("-installVMExtension"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 1; int POOL_LOW_PRI_VM_COUNT = 1; String VM_EXTENSION_NAME = "secretext"; String VM_EXTENSION_TYPE = "KeyVaultForLinux"; String VM_EXTENSION_PUBLISHER = "Microsoft.Azure.KeyVault"; String VM_TYPEHANDLER_VERSION = "1.0"; long POOL_STEADY_TIMEOUT_IN_Milliseconds = 15 * 60 * 1000; List<VMExtension> vmExtensions = new ArrayList<VMExtension>(); vmExtensions.add(new VMExtension().withName(VM_EXTENSION_NAME).withType(VM_EXTENSION_TYPE).withPublisher(VM_EXTENSION_PUBLISHER).withTypeHandlerVersion(VM_TYPEHANDLER_VERSION).withEnableAutomaticUpgrade(true)); ImageReference imgRef = new ImageReference().withPublisher("Canonical").withOffer("UbuntuServer") .withSku("18.04-LTS").withVersion("latest"); VirtualMachineConfiguration configuration = new VirtualMachineConfiguration(); configuration.withNodeAgentSKUId("batch.node.ubuntu 18.04").withImageReference(imgRef).withExtensions(vmExtensions); PoolAddParameter addParameter = new PoolAddParameter().withId(poolId) .withTargetDedicatedNodes(POOL_VM_COUNT).withTargetLowPriorityNodes(POOL_LOW_PRI_VM_COUNT) .withVmSize(POOL_VM_SIZE).withVirtualMachineConfiguration(configuration) .withNetworkConfiguration(networkConfiguration) .withTargetNodeCommunicationMode(NodeCommunicationMode.DEFAULT); batchClient.poolOperations().createPool(addParameter); try{ long startTime = System.currentTimeMillis(); long elapsedTime = 0L; CloudPool pool = waitForPoolState(poolId, AllocationState.STEADY, POOL_STEADY_TIMEOUT_IN_Milliseconds); Assert.assertEquals(POOL_VM_COUNT, (long) pool.currentDedicatedNodes()); Assert.assertEquals(POOL_LOW_PRI_VM_COUNT, (long) pool.currentLowPriorityNodes()); List<ComputeNode> computeNodes = batchClient.computeNodeOperations().listComputeNodes(poolId); for(ComputeNode node : computeNodes){ NodeVMExtension nodeVMExtension = batchClient.protocolLayer().computeNodeExtensions().get(poolId, node.id(), VM_EXTENSION_NAME); Assert.assertNotNull(nodeVMExtension); Assert.assertTrue(nodeVMExtension.vmExtension().enableAutomaticUpgrade()); } boolean deleted = false; batchClient.poolOperations().deletePool(poolId); while (elapsedTime < POOL_STEADY_TIMEOUT_IN_Milliseconds * 2) { try { batchClient.poolOperations().getPool(poolId); } catch (BatchErrorException err) { if (err.body().code().equals(BatchErrorCodeStrings.PoolNotFound)) { deleted = true; break; } else { throw err; } } System.out.println("wait 15 seconds for pool delete..."); threadSleepInRecordMode(15 * 1000); elapsedTime = (new Date()).getTime() - startTime; } Assert.assertTrue(deleted); }finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void canCreateContainerPool() throws Exception { String poolId = getStringIdWithUserNamePrefix("-createContainerPool"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 1; long POOL_STEADY_TIMEOUT_IN_MILLISECONDS = 10 * 60 * 1000; TimeUnit.SECONDS.toMillis(30); if (!batchClient.poolOperations().existsPool(poolId)){ List<String> images = new ArrayList<String>(); images.add("tensorflow/tensorflow:latest-gpu"); VirtualMachineConfiguration configuration = new VirtualMachineConfiguration(); configuration .withImageReference( new ImageReference().withPublisher("microsoft-azure-batch").withOffer("ubuntu-server-container").withSku("20-04-lts")) .withNodeAgentSKUId("batch.node.ubuntu 20.04") .withContainerConfiguration(new ContainerConfiguration().withContainerImageNames(images).withType(ContainerType.DOCKER_COMPATIBLE)); PoolAddParameter addParameter = new PoolAddParameter() .withId(poolId) .withVmSize(POOL_VM_SIZE) .withTargetDedicatedNodes(POOL_VM_COUNT) .withVirtualMachineConfiguration(configuration) .withNetworkConfiguration(networkConfiguration); batchClient.poolOperations().createPool(addParameter); } try { Assert.assertTrue(batchClient.poolOperations().existsPool(poolId)); long startTime = System.currentTimeMillis(); long elapsedTime = 0L; CloudPool pool = waitForPoolState(poolId, AllocationState.STEADY, POOL_STEADY_TIMEOUT_IN_MILLISECONDS); Assert.assertEquals(POOL_VM_COUNT, (long) pool.currentDedicatedNodes()); Assert.assertEquals(ContainerType.DOCKER_COMPATIBLE,pool.virtualMachineConfiguration().containerConfiguration().type()); boolean deleted = false; elapsedTime = 0L; batchClient.poolOperations().deletePool(poolId); while (elapsedTime < POOL_STEADY_TIMEOUT_IN_MILLISECONDS) { try { batchClient.poolOperations().getPool(poolId); } catch (BatchErrorException err) { if (err.body().code().equals(BatchErrorCodeStrings.PoolNotFound)) { deleted = true; break; } else { throw err; } } System.out.println("wait 15 seconds for pool delete..."); threadSleepInRecordMode(15 * 1000); elapsedTime = (new Date()).getTime() - startTime; } Assert.assertTrue(deleted); } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void canCreateDataDisk() throws Exception { String poolId = getStringIdWithUserNamePrefix("-testpool3"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 0; int lun = 50; int diskSizeGB = 50; List<DataDisk> dataDisks = new ArrayList<DataDisk>(); dataDisks.add(new DataDisk().withLun(lun).withDiskSizeGB(diskSizeGB)); VirtualMachineConfiguration configuration = new VirtualMachineConfiguration(); configuration .withImageReference( new ImageReference().withPublisher("Canonical").withOffer("UbuntuServer").withSku("18.04-LTS")) .withNodeAgentSKUId("batch.node.ubuntu 18.04").withDataDisks(dataDisks); PoolAddParameter poolConfig = new PoolAddParameter() .withId(poolId) .withNetworkConfiguration(networkConfiguration) .withTargetDedicatedNodes(POOL_VM_COUNT) .withVmSize(POOL_VM_SIZE) .withVirtualMachineConfiguration(configuration); try { batchClient.poolOperations().createPool(poolConfig); CloudPool pool = batchClient.poolOperations().getPool(poolId); Assert.assertEquals(lun, pool.virtualMachineConfiguration().dataDisks().get(0).lun()); Assert.assertEquals(diskSizeGB, pool.virtualMachineConfiguration().dataDisks().get(0).diskSizeGB()); } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void canCreateCustomImageWithExpectedError() throws Exception { String poolId = getStringIdWithUserNamePrefix("-customImageExpErr"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 0; VirtualMachineConfiguration configuration = new VirtualMachineConfiguration(); configuration.withImageReference(new ImageReference().withVirtualMachineImageId(String.format( "/subscriptions/%s/resourceGroups/batchexp/providers/Microsoft.Compute/images/FakeImage", System.getenv("SUBSCRIPTION_ID")))) .withNodeAgentSKUId("batch.node.ubuntu 16.04"); PoolAddParameter poolConfig = new PoolAddParameter() .withId(poolId) .withVmSize(POOL_VM_SIZE) .withTargetDedicatedNodes(POOL_VM_COUNT) .withVirtualMachineConfiguration(configuration) .withNetworkConfiguration(networkConfiguration); try { batchClient.poolOperations().createPool(poolConfig); throw new Exception("Expect exception, but not got it."); } catch (BatchErrorException err) { if (err.body().code().equals("InsufficientPermissions")) { Assert.assertTrue(err.body().values().get(0).value().contains( "The user identity used for this operation does not have the required privilege Microsoft.Compute/images/read on the specified resource")); } else { if (!err.body().code().equals("InvalidPropertyValue")) { throw err; } } } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void shouldFailOnCreateContainerPoolWithRegularImage() throws Exception { String poolId = getStringIdWithUserNamePrefix("-createContainerRegImage"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 0; List<String> images = new ArrayList<String>(); images.add("ubuntu"); VirtualMachineConfiguration configuration = new VirtualMachineConfiguration(); configuration .withImageReference( new ImageReference().withPublisher("Canonical").withOffer("UbuntuServer").withSku("18.04-LTS")) .withNodeAgentSKUId("batch.node.ubuntu 18.04") .withContainerConfiguration(new ContainerConfiguration().withContainerImageNames(images).withType(ContainerType.DOCKER_COMPATIBLE)); PoolAddParameter poolConfig = new PoolAddParameter() .withId(poolId) .withVmSize(POOL_VM_SIZE) .withTargetDedicatedNodes(POOL_VM_COUNT) .withVirtualMachineConfiguration(configuration) .withNetworkConfiguration(networkConfiguration); try { batchClient.poolOperations().createPool(poolConfig); throw new Exception("The test case should throw exception here"); } catch (BatchErrorException err) { if (err.body().code().equals("InvalidPropertyValue")) { for (int i = 0; i < err.body().values().size(); i++) { if (err.body().values().get(i).key().equals("Reason")) { Assert.assertEquals( "The specified imageReference with publisher Canonical offer UbuntuServer sku 18.04-LTS does not support container feature.", err.body().values().get(i).value()); return; } } throw new Exception("Couldn't find expect error reason"); } else { throw err; } } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void shouldFailOnCreateLinuxPoolWithWindowsConfig() throws Exception { String poolId = getStringIdWithUserNamePrefix("-createLinuxPool"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 0; List<String> images = new ArrayList<String>(); images.add("ubuntu"); VirtualMachineConfiguration configuration = new VirtualMachineConfiguration(); configuration .withImageReference( new ImageReference().withPublisher("Canonical").withOffer("UbuntuServer").withSku("16.04-LTS")) .withNodeAgentSKUId("batch.node.ubuntu 16.04"); UserAccount windowsUser = new UserAccount(); windowsUser.withWindowsUserConfiguration(new WindowsUserConfiguration().withLoginMode(LoginMode.INTERACTIVE)) .withName("testaccount") .withPassword("password"); ArrayList<UserAccount> users = new ArrayList<UserAccount>(); users.add(windowsUser); PoolAddParameter pool = new PoolAddParameter().withId(poolId) .withVirtualMachineConfiguration(configuration) .withTargetDedicatedNodes(POOL_VM_COUNT) .withTargetLowPriorityNodes(0) .withVmSize(POOL_VM_SIZE) .withUserAccounts(users) .withNetworkConfiguration(networkConfiguration); try { batchClient.poolOperations().createPool(pool); throw new Exception("The test case should throw exception here"); } catch (BatchErrorException err) { if (err.body().code().equals("InvalidPropertyValue")) { for (int i = 0; i < err.body().values().size(); i++) { if (err.body().values().get(i).key().equals("Reason")) { Assert.assertEquals( "The user configuration for user account 'testaccount' has a mismatch with the OS (Windows/Linux) configuration specified in VirtualMachineConfiguration", err.body().values().get(i).value()); return; } } throw new Exception("Couldn't find expect error reason"); } else { throw err; } } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void canCRUDLowPriPaaSPool() throws Exception { String poolId = getStringIdWithUserNamePrefix("-testpool4"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 1; int POOL_LOW_PRI_VM_COUNT = 2; long POOL_STEADY_TIMEOUT_IN_Milliseconds = 10 * 60 * 1000; if (!batchClient.poolOperations().existsPool(poolId)) { ImageReference imageReference = new ImageReference().withPublisher("Canonical") .withOffer("UbuntuServer").withSku("18.04-LTS").withVersion("latest"); VirtualMachineConfiguration vmConfiguration = new VirtualMachineConfiguration() .withImageReference(imageReference).withNodeAgentSKUId("batch.node.ubuntu 18.04"); batchClient.poolOperations().createPool(new PoolAddParameter().withId(poolId) .withVmSize(POOL_VM_SIZE) .withVirtualMachineConfiguration(vmConfiguration) .withTargetDedicatedNodes(POOL_VM_COUNT) .withTargetLowPriorityNodes(POOL_LOW_PRI_VM_COUNT)); } try { Assert.assertTrue(batchClient.poolOperations().existsPool(poolId)); long startTime = System.currentTimeMillis(); long elapsedTime = 0L; CloudPool pool = waitForPoolState(poolId, AllocationState.STEADY, POOL_STEADY_TIMEOUT_IN_Milliseconds); Assert.assertEquals(POOL_VM_COUNT, (long) pool.currentDedicatedNodes()); Assert.assertEquals(POOL_LOW_PRI_VM_COUNT, (long) pool.currentLowPriorityNodes()); batchClient.poolOperations().resizePool(poolId, null, 1); pool = batchClient.poolOperations().getPool(poolId); Assert.assertEquals(POOL_VM_COUNT, (long) pool.targetDedicatedNodes()); Assert.assertEquals(1, (long) pool.targetLowPriorityNodes()); boolean deleted = false; batchClient.poolOperations().deletePool(poolId); while (elapsedTime < POOL_STEADY_TIMEOUT_IN_Milliseconds * 2) { try { batchClient.poolOperations().getPool(poolId); } catch (BatchErrorException err) { if (err.body().code().equals(BatchErrorCodeStrings.PoolNotFound)) { deleted = true; break; } else { throw err; } } System.out.println("wait 15 seconds for pool delete..."); threadSleepInRecordMode(15 * 1000); elapsedTime = (new Date()).getTime() - startTime; } Assert.assertTrue(deleted); } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } private static CloudPool waitForPoolState(String poolId, AllocationState targetState, long poolAllocationTimeoutInMilliseconds) throws IOException, InterruptedException { long startTime = System.currentTimeMillis(); long elapsedTime = 0L; boolean allocationStateReached = false; CloudPool pool = null; while (elapsedTime < poolAllocationTimeoutInMilliseconds) { pool = batchClient.poolOperations().getPool(poolId); Assert.assertNotNull(pool); if (pool.allocationState() == targetState) { allocationStateReached = true; break; } System.out.println("wait 30 seconds for pool allocationStateReached..."); threadSleepInRecordMode(30 * 1000); elapsedTime = (new Date()).getTime() - startTime; } Assert.assertTrue("The pool did not reach a allocationStateReached state in the allotted time", allocationStateReached); return pool; } @Test public void canCRUDPaaSPool() throws Exception { String poolId = getStringIdWithUserNamePrefix("-CRUDPaaS"); String POOL_VM_SIZE = "STANDARD_D1_V2"; int POOL_VM_COUNT = 3; long POOL_STEADY_TIMEOUT_IN_Milliseconds = 15 * 60 * 1000; if (!batchClient.poolOperations().existsPool(poolId)) { ImageReference imageReference = new ImageReference().withPublisher("Canonical") .withOffer("UbuntuServer").withSku("18.04-LTS").withVersion("latest"); VirtualMachineConfiguration vmConfiguration = new VirtualMachineConfiguration() .withImageReference(imageReference).withNodeAgentSKUId("batch.node.ubuntu 18.04"); List<UserAccount> userList = new ArrayList<>(); userList.add(new UserAccount().withName("test-user-1").withPassword("kt userList.add(new UserAccount().withName("test-user-2").withPassword("kt .withElevationLevel(ElevationLevel.ADMIN)); PoolAddParameter addParameter = new PoolAddParameter().withId(poolId) .withVmSize(POOL_VM_SIZE) .withVirtualMachineConfiguration(vmConfiguration) .withTargetDedicatedNodes(POOL_VM_COUNT) .withUserAccounts(userList); batchClient.poolOperations().createPool(addParameter); } try { Assert.assertTrue(batchClient.poolOperations().existsPool(poolId)); long startTime = System.currentTimeMillis(); long elapsedTime = 0L; CloudPool pool = waitForPoolState(poolId, AllocationState.STEADY, POOL_STEADY_TIMEOUT_IN_Milliseconds); Assert.assertNotNull(pool.userAccounts()); Assert.assertEquals("test-user-1", pool.userAccounts().get(0).name()); Assert.assertEquals(ElevationLevel.NON_ADMIN, pool.userAccounts().get(0).elevationLevel()); Assert.assertNull(pool.userAccounts().get(0).password()); Assert.assertEquals(ElevationLevel.ADMIN, pool.userAccounts().get(1).elevationLevel()); List<CloudPool> pools = batchClient.poolOperations().listPools(); Assert.assertTrue(pools.size() > 0); boolean found = false; for (CloudPool p : pools) { if (p.id().equals(poolId)) { found = true; break; } } Assert.assertTrue(found); PoolNodeCounts poolNodeCount = null; List<PoolNodeCounts> poolNodeCounts = batchClient.accountOperations().listPoolNodeCounts(); for (PoolNodeCounts tmp : poolNodeCounts) { if (tmp.poolId().equals(poolId)) { poolNodeCount = tmp; break; } } Assert.assertNotNull(poolNodeCount); Assert.assertNotNull(poolNodeCount.lowPriority()); Assert.assertEquals(0, poolNodeCount.lowPriority().total()); Assert.assertEquals(3, poolNodeCount.dedicated().total()); LinkedList<MetadataItem> metadata = new LinkedList<>(); metadata.add((new MetadataItem()).withName("key1").withValue("value1")); batchClient.poolOperations().patchPool(poolId, null, null, null, metadata); pool = batchClient.poolOperations().getPool(poolId); Assert.assertTrue(pool.metadata().size() == 1); Assert.assertTrue(pool.metadata().get(0).name().equals("key1")); batchClient.poolOperations().updatePoolProperties(poolId, null, new LinkedList<CertificateReference>(), new LinkedList<ApplicationPackageReference>(), new LinkedList<MetadataItem>()); pool = batchClient.poolOperations().getPool(poolId); Assert.assertNull(pool.metadata()); boolean deleted = false; batchClient.poolOperations().deletePool(poolId); while (elapsedTime < POOL_STEADY_TIMEOUT_IN_Milliseconds) { try { batchClient.poolOperations().getPool(poolId); } catch (BatchErrorException err) { if (err.body().code().equals(BatchErrorCodeStrings.PoolNotFound)) { deleted = true; break; } else { throw err; } } System.out.println("wait 5 seconds for pool delete..."); threadSleepInRecordMode(5 * 1000); elapsedTime = (new Date()).getTime() - startTime; } Assert.assertTrue(deleted); } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void testPoolWithAutoOSUpgradeAndRollingUpgrade() throws Exception { String poolId = getStringIdWithUserNamePrefix("-autoOSUpgradeRollingUpgrade"); if (!batchClient.poolOperations().existsPool(poolId)) { ImageReference imageReference = new ImageReference() .withPublisher("Canonical") .withOffer("UbuntuServer") .withSku("18.04-LTS"); NodePlacementConfiguration nodePlacementConfiguration = new NodePlacementConfiguration() .withPolicy(NodePlacementPolicyType.ZONAL); VirtualMachineConfiguration vmConfiguration = new VirtualMachineConfiguration() .withImageReference(imageReference) .withNodeAgentSKUId("batch.node.ubuntu 18.04") .withNodePlacementConfiguration(nodePlacementConfiguration); UpgradePolicy upgradePolicy = new UpgradePolicy() .withMode(UpgradeMode.AUTOMATIC) .withAutomaticOSUpgradePolicy(new AutomaticOSUpgradePolicy() .withDisableAutomaticRollback(true) .withEnableAutomaticOSUpgrade(true) .withUseRollingUpgradePolicy(true) .withOsRollingUpgradeDeferral(true)) .withRollingUpgradePolicy(new RollingUpgradePolicy() .withEnableCrossZoneUpgrade(true) .withMaxBatchInstancePercent(20) .withMaxUnhealthyInstancePercent(20) .withMaxUnhealthyUpgradedInstancePercent(20) .withPauseTimeBetweenBatches("PT5S") .withPrioritizeUnhealthyInstances(false) .withRollbackFailedInstancesOnPolicyBreach(false)); PoolAddParameter testPoolWithUpgradePolicy = new PoolAddParameter() .withId(poolId) .withVmSize("STANDARD_D2S_V3") .withVirtualMachineConfiguration(vmConfiguration) .withUpgradePolicy(upgradePolicy); batchClient.poolOperations().createPool(testPoolWithUpgradePolicy); } try { CloudPool pool = batchClient.poolOperations().getPool(poolId); Assert.assertNotNull(pool); Assert.assertEquals("automatic", pool.upgradePolicy().mode().toString()); Assert.assertTrue(pool.upgradePolicy().automaticOSUpgradePolicy().enableAutomaticOSUpgrade()); Assert.assertTrue(pool.upgradePolicy().rollingUpgradePolicy().enableCrossZoneUpgrade()); Assert.assertEquals(20, (int) pool.upgradePolicy().rollingUpgradePolicy().maxBatchInstancePercent()); } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } @Test public void testPoolWithSecurityProfileAndOSDisk() throws Exception { String poolId = getStringIdWithUserNamePrefix("SecurityProfile"); if (!batchClient.poolOperations().existsPool(poolId)) { ImageReference imageReference = new ImageReference() .withPublisher("Canonical") .withOffer("0001-com-ubuntu-server-jammy") .withSku("22_04-lts"); SecurityProfile securityProfile = new SecurityProfile() .withSecurityType(SecurityTypes.TRUSTED_LAUNCH) .withEncryptionAtHost(true) .withUefiSettings(new UefiSettings() .withSecureBootEnabled(true) .withVTpmEnabled(true)); ManagedDisk managedDisk = new ManagedDisk() .withStorageAccountType(StorageAccountType.STANDARD_LRS); OSDisk osDisk = new OSDisk() .withCaching(CachingType.READ_WRITE) .withManagedDisk(managedDisk) .withDiskSizeGB(50) .withWriteAcceleratorEnabled(true); VirtualMachineConfiguration vmConfiguration = new VirtualMachineConfiguration() .withImageReference(imageReference) .withNodeAgentSKUId("batch.node.ubuntu 22.04") .withSecurityProfile(securityProfile) .withOsDisk(osDisk); PoolAddParameter poolAddParameter = new PoolAddParameter() .withId(poolId) .withVmSize("STANDARD_D2S_V3") .withVirtualMachineConfiguration(vmConfiguration) .withTargetDedicatedNodes(0); batchClient.poolOperations().createPool(poolAddParameter); } try { CloudPool pool = batchClient.poolOperations().getPool(poolId); Assert.assertNotNull(pool); SecurityProfile sp = pool.virtualMachineConfiguration().securityProfile(); Assert.assertEquals(SecurityTypes.TRUSTED_LAUNCH, sp.securityType()); Assert.assertTrue(sp.encryptionAtHost()); Assert.assertTrue(sp.uefiSettings().secureBootEnabled()); Assert.assertTrue(sp.uefiSettings().vTpmEnabled()); OSDisk disk = pool.virtualMachineConfiguration().osDisk(); Assert.assertEquals("readwrite", pool.virtualMachineConfiguration().osDisk().caching().toString().toLowerCase()); Assert.assertEquals(StorageAccountType.STANDARD_LRS, disk.managedDisk().storageAccountType()); Assert.assertEquals(Integer.valueOf(50), disk.diskSizeGB()); Assert.assertTrue(disk.writeAcceleratorEnabled()); } finally { try { if (batchClient.poolOperations().existsPool(poolId)) { batchClient.poolOperations().deletePool(poolId); } } catch (Exception e) { } } } }
We could also use `RpcUtils#terminateRpcService` or extend to take multiple `RpcServices` for termination.
public void testDeclineCheckpointInvocationWithUserException() throws Exception { RpcService rpcService1 = null; RpcService rpcService2 = null; try { final ActorSystem actorSystem1 = AkkaUtils.createDefaultActorSystem(); final ActorSystem actorSystem2 = AkkaUtils.createDefaultActorSystem(); rpcService1 = new AkkaRpcService(actorSystem1, testingTimeout); rpcService2 = new AkkaRpcService(actorSystem2, testingTimeout); final CompletableFuture<Throwable> declineCheckpointMessageFuture = new CompletableFuture<>(); final JobManagerSharedServices jobManagerSharedServices = new TestingJobManagerSharedServicesBuilder().build(); final JobMasterConfiguration jobMasterConfiguration = JobMasterConfiguration.fromConfiguration(configuration); final JobMaster jobMaster = new JobMaster( rpcService1, jobMasterConfiguration, jmResourceId, jobGraph, haServices, DefaultSlotPoolFactory.fromConfiguration(configuration, rpcService1), jobManagerSharedServices, heartbeatServices, blobServer, UnregisteredJobManagerJobMetricGroupFactory.INSTANCE, new NoOpOnCompletionActions(), testingFatalErrorHandler, JobMasterTest.class.getClassLoader()) { @Override public void declineCheckpoint(DeclineCheckpoint declineCheckpoint) { declineCheckpointMessageFuture.complete(declineCheckpoint.getReason()); } }; jobMaster.start(jobMasterId, testingTimeout).get(); final String className = "UserException"; final URLClassLoader userClassLoader = ClassLoaderUtils.compileAndLoadJava( temporaryFolder.newFolder(), className + ".java", String.format("public class %s extends RuntimeException { public %s() {super(\"UserMessage\");} }", className, className)); Throwable userException = (Throwable) Class.forName(className, false, userClassLoader).newInstance(); CompletableFuture<JobMasterGateway> jobMasterGateway = rpcService2.connect(jobMaster.getAddress(), jobMaster.getFencingToken(), JobMasterGateway.class); jobMasterGateway.thenAccept(gateway -> { try { gateway.declineCheckpoint(new DeclineCheckpoint( jobGraph.getJobID(), new ExecutionAttemptID(1, 1), 1, userException ) ); } catch (Exception e) { throw new RuntimeException(e); } }); Throwable throwable = declineCheckpointMessageFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(throwable, instanceOf(SerializedThrowable.class)); assertThat(throwable.getMessage(), equalTo(userException.getMessage())); } finally { final Collection<CompletableFuture<?>> terminationFutures = new ArrayList<>(2); if (rpcService1 != null) { terminationFutures.add(rpcService1.stopService()); } if (rpcService2 != null) { terminationFutures.add(rpcService2.stopService()); } FutureUtils.waitForAll(terminationFutures).get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); } }
FutureUtils.waitForAll(terminationFutures).get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS);
public void testDeclineCheckpointInvocationWithUserException() throws Exception { RpcService rpcService1 = null; RpcService rpcService2 = null; try { final ActorSystem actorSystem1 = AkkaUtils.createDefaultActorSystem(); final ActorSystem actorSystem2 = AkkaUtils.createDefaultActorSystem(); rpcService1 = new AkkaRpcService(actorSystem1, testingTimeout); rpcService2 = new AkkaRpcService(actorSystem2, testingTimeout); final CompletableFuture<Throwable> declineCheckpointMessageFuture = new CompletableFuture<>(); final JobManagerSharedServices jobManagerSharedServices = new TestingJobManagerSharedServicesBuilder().build(); final JobMasterConfiguration jobMasterConfiguration = JobMasterConfiguration.fromConfiguration(configuration); final JobMaster jobMaster = new JobMaster( rpcService1, jobMasterConfiguration, jmResourceId, jobGraph, haServices, DefaultSlotPoolFactory.fromConfiguration(configuration, rpcService1), jobManagerSharedServices, heartbeatServices, blobServer, UnregisteredJobManagerJobMetricGroupFactory.INSTANCE, new NoOpOnCompletionActions(), testingFatalErrorHandler, JobMasterTest.class.getClassLoader()) { @Override public void declineCheckpoint(DeclineCheckpoint declineCheckpoint) { declineCheckpointMessageFuture.complete(declineCheckpoint.getReason()); } }; jobMaster.start(jobMasterId, testingTimeout).get(); final String className = "UserException"; final URLClassLoader userClassLoader = ClassLoaderUtils.compileAndLoadJava( temporaryFolder.newFolder(), className + ".java", String.format("public class %s extends RuntimeException { public %s() {super(\"UserMessage\");} }", className, className)); Throwable userException = (Throwable) Class.forName(className, false, userClassLoader).newInstance(); CompletableFuture<JobMasterGateway> jobMasterGateway = rpcService2.connect(jobMaster.getAddress(), jobMaster.getFencingToken(), JobMasterGateway.class); jobMasterGateway.thenAccept(gateway -> { gateway.declineCheckpoint(new DeclineCheckpoint( jobGraph.getJobID(), new ExecutionAttemptID(1, 1), 1, userException ) ); }); Throwable throwable = declineCheckpointMessageFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(throwable, instanceOf(SerializedThrowable.class)); assertThat(throwable.getMessage(), equalTo(userException.getMessage())); } finally { RpcUtils.terminateRpcServices(testingTimeout, rpcService1, rpcService2); } }
class JobMasterTest extends TestLogger { private static final TestingInputSplit[] EMPTY_TESTING_INPUT_SPLITS = new TestingInputSplit[0]; @ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder(); private static final Time testingTimeout = Time.seconds(10L); private static final long fastHeartbeatInterval = 1L; private static final long fastHeartbeatTimeout = 5L; private static final long heartbeatInterval = 1000L; private static final long heartbeatTimeout = 5000L; private static final JobGraph jobGraph = new JobGraph(); private static TestingRpcService rpcService; private static HeartbeatServices fastHeartbeatServices; private static HeartbeatServices heartbeatServices; private BlobServer blobServer; private Configuration configuration; private ResourceID jmResourceId; private JobMasterId jobMasterId; private TestingHighAvailabilityServices haServices; private SettableLeaderRetrievalService rmLeaderRetrievalService; private TestingFatalErrorHandler testingFatalErrorHandler; @BeforeClass public static void setupClass() { rpcService = new TestingRpcService(); fastHeartbeatServices = new TestingHeartbeatServices(fastHeartbeatInterval, fastHeartbeatTimeout, rpcService.getScheduledExecutor()); heartbeatServices = new TestingHeartbeatServices(heartbeatInterval, heartbeatTimeout, rpcService.getScheduledExecutor()); } @Before public void setup() throws IOException { configuration = new Configuration(); haServices = new TestingHighAvailabilityServices(); jobMasterId = JobMasterId.generate(); jmResourceId = ResourceID.generate(); testingFatalErrorHandler = new TestingFatalErrorHandler(); haServices.setCheckpointRecoveryFactory(new StandaloneCheckpointRecoveryFactory()); rmLeaderRetrievalService = new SettableLeaderRetrievalService( null, null); haServices.setResourceManagerLeaderRetriever(rmLeaderRetrievalService); configuration.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath()); blobServer = new BlobServer(configuration, new VoidBlobStore()); blobServer.start(); } @After public void teardown() throws Exception { if (testingFatalErrorHandler != null) { testingFatalErrorHandler.rethrowError(); } if (blobServer != null) { blobServer.close(); } rpcService.clearGateways(); } @AfterClass public static void teardownClass() { if (rpcService != null) { rpcService.stopService(); rpcService = null; } } @Test @Test public void testHeartbeatTimeoutWithTaskManager() throws Exception { final CompletableFuture<ResourceID> heartbeatResourceIdFuture = new CompletableFuture<>(); final CompletableFuture<JobID> disconnectedJobManagerFuture = new CompletableFuture<>(); final TaskManagerLocation taskManagerLocation = new LocalTaskManagerLocation(); final TestingTaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setHeartbeatJobManagerConsumer(heartbeatResourceIdFuture::complete) .setDisconnectJobManagerConsumer((jobId, throwable) -> disconnectedJobManagerFuture.complete(jobId)) .createTestingTaskExecutorGateway(); rpcService.registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway); final JobManagerSharedServices jobManagerSharedServices = new TestingJobManagerSharedServicesBuilder().build(); final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, jobManagerSharedServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); CompletableFuture<RegistrationResponse> registrationResponse = jobMasterGateway.registerTaskManager( taskExecutorGateway.getAddress(), taskManagerLocation, testingTimeout); registrationResponse.get(); final ResourceID heartbeatResourceId = heartbeatResourceIdFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(heartbeatResourceId, Matchers.equalTo(jmResourceId)); final JobID disconnectedJobManager = disconnectedJobManagerFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(disconnectedJobManager, Matchers.equalTo(jobGraph.getJobID())); } finally { jobManagerSharedServices.shutdown(); RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testHeartbeatTimeoutWithResourceManager() throws Exception { final String resourceManagerAddress = "rm"; final ResourceManagerId resourceManagerId = ResourceManagerId.generate(); final ResourceID rmResourceId = new ResourceID(resourceManagerAddress); final TestingResourceManagerGateway resourceManagerGateway = new TestingResourceManagerGateway( resourceManagerId, rmResourceId, resourceManagerAddress, "localhost"); final CompletableFuture<Tuple3<JobMasterId, ResourceID, JobID>> jobManagerRegistrationFuture = new CompletableFuture<>(); final CompletableFuture<JobID> disconnectedJobManagerFuture = new CompletableFuture<>(); final CountDownLatch registrationAttempts = new CountDownLatch(2); resourceManagerGateway.setRegisterJobManagerConsumer(tuple -> { jobManagerRegistrationFuture.complete( Tuple3.of( tuple.f0, tuple.f1, tuple.f3)); registrationAttempts.countDown(); }); resourceManagerGateway.setDisconnectJobManagerConsumer(tuple -> disconnectedJobManagerFuture.complete(tuple.f0)); rpcService.registerGateway(resourceManagerAddress, resourceManagerGateway); final JobManagerSharedServices jobManagerSharedServices = new TestingJobManagerSharedServicesBuilder().build(); final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, jobManagerSharedServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); rmLeaderRetrievalService.notifyListener(resourceManagerAddress, resourceManagerId.toUUID()); final Tuple3<JobMasterId, ResourceID, JobID> registrationInformation = jobManagerRegistrationFuture.get( testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(registrationInformation.f0, Matchers.equalTo(jobMasterId)); assertThat(registrationInformation.f1, Matchers.equalTo(jmResourceId)); assertThat(registrationInformation.f2, Matchers.equalTo(jobGraph.getJobID())); final JobID disconnectedJobManager = disconnectedJobManagerFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(disconnectedJobManager, Matchers.equalTo(jobGraph.getJobID())); registrationAttempts.await(); } finally { jobManagerSharedServices.shutdown(); RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that a JobMaster will restore the given JobGraph from its savepoint upon * initial submission. */ @Test public void testRestoringFromSavepoint() throws Exception { final long savepointId = 42L; final File savepointFile = createSavepoint(savepointId); final SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.forPath( savepointFile.getAbsolutePath(), true); final JobGraph jobGraph = createJobGraphWithCheckpointing(savepointRestoreSettings); final StandaloneCompletedCheckpointStore completedCheckpointStore = new StandaloneCompletedCheckpointStore(1); final TestingCheckpointRecoveryFactory testingCheckpointRecoveryFactory = new TestingCheckpointRecoveryFactory(completedCheckpointStore, new StandaloneCheckpointIDCounter()); haServices.setCheckpointRecoveryFactory(testingCheckpointRecoveryFactory); final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, new TestingJobManagerSharedServicesBuilder().build()); try { final CompletedCheckpoint savepointCheckpoint = completedCheckpointStore.getLatestCheckpoint(); assertThat(savepointCheckpoint, Matchers.notNullValue()); assertThat(savepointCheckpoint.getCheckpointID(), is(savepointId)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that a JobMaster will only restore a modified JobGraph if non * restored state is allowed. */ @Test public void testRestoringModifiedJobFromSavepoint() throws Exception { final long savepointId = 42L; final OperatorID operatorID = new OperatorID(); final File savepointFile = createSavepointWithOperatorState(savepointId, operatorID); final SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.forPath( savepointFile.getAbsolutePath(), false); final JobVertex jobVertex = new JobVertex("New operator"); jobVertex.setInvokableClass(NoOpInvokable.class); final JobGraph jobGraphWithNewOperator = createJobGraphFromJobVerticesWithCheckpointing(savepointRestoreSettings, jobVertex); final StandaloneCompletedCheckpointStore completedCheckpointStore = new StandaloneCompletedCheckpointStore(1); final TestingCheckpointRecoveryFactory testingCheckpointRecoveryFactory = new TestingCheckpointRecoveryFactory(completedCheckpointStore, new StandaloneCheckpointIDCounter()); haServices.setCheckpointRecoveryFactory(testingCheckpointRecoveryFactory); try { createJobMaster( configuration, jobGraphWithNewOperator, haServices, new TestingJobManagerSharedServicesBuilder().build()); fail("Should fail because we cannot resume the changed JobGraph from the savepoint."); } catch (IllegalStateException expected) { } jobGraphWithNewOperator.setSavepointRestoreSettings( SavepointRestoreSettings.forPath( savepointFile.getAbsolutePath(), true)); final JobMaster jobMaster = createJobMaster( configuration, jobGraphWithNewOperator, haServices, new TestingJobManagerSharedServicesBuilder().build()); try { final CompletedCheckpoint savepointCheckpoint = completedCheckpointStore.getLatestCheckpoint(); assertThat(savepointCheckpoint, Matchers.notNullValue()); assertThat(savepointCheckpoint.getCheckpointID(), is(savepointId)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that in a streaming use case where checkpointing is enabled, a * fixed delay with Integer.MAX_VALUE retries is instantiated if no other restart * strategy has been specified. */ @Test public void testAutomaticRestartingWhenCheckpointing() throws Exception { final long savepointId = 42L; final File savepointFile = createSavepoint(savepointId); final SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.forPath( savepointFile.getAbsolutePath(), true); final JobGraph jobGraph = createJobGraphWithCheckpointing(savepointRestoreSettings); final StandaloneCompletedCheckpointStore completedCheckpointStore = new StandaloneCompletedCheckpointStore(1); final TestingCheckpointRecoveryFactory testingCheckpointRecoveryFactory = new TestingCheckpointRecoveryFactory( completedCheckpointStore, new StandaloneCheckpointIDCounter()); haServices.setCheckpointRecoveryFactory(testingCheckpointRecoveryFactory); final JobMaster jobMaster = createJobMaster( new Configuration(), jobGraph, haServices, new TestingJobManagerSharedServicesBuilder() .setRestartStrategyFactory(RestartStrategyFactory.createRestartStrategyFactory(configuration)) .build()); RestartStrategy restartStrategy = jobMaster.getRestartStrategy(); assertNotNull(restartStrategy); assertTrue(restartStrategy instanceof FixedDelayRestartStrategy); } /** * Tests that an existing checkpoint will have precedence over an savepoint. */ @Test public void testCheckpointPrecedesSavepointRecovery() throws Exception { final long savepointId = 42L; final File savepointFile = createSavepoint(savepointId); final SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.forPath("" + savepointFile.getAbsolutePath(), true); final JobGraph jobGraph = createJobGraphWithCheckpointing(savepointRestoreSettings); final long checkpointId = 1L; final CompletedCheckpoint completedCheckpoint = new CompletedCheckpoint( jobGraph.getJobID(), checkpointId, 1L, 1L, Collections.emptyMap(), null, CheckpointProperties.forCheckpoint(CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), new DummyCheckpointStorageLocation()); final StandaloneCompletedCheckpointStore completedCheckpointStore = new StandaloneCompletedCheckpointStore(1); completedCheckpointStore.addCheckpoint(completedCheckpoint); final TestingCheckpointRecoveryFactory testingCheckpointRecoveryFactory = new TestingCheckpointRecoveryFactory(completedCheckpointStore, new StandaloneCheckpointIDCounter()); haServices.setCheckpointRecoveryFactory(testingCheckpointRecoveryFactory); final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, new TestingJobManagerSharedServicesBuilder().build()); try { final CompletedCheckpoint savepointCheckpoint = completedCheckpointStore.getLatestCheckpoint(); assertThat(savepointCheckpoint, Matchers.notNullValue()); assertThat(savepointCheckpoint.getCheckpointID(), is(checkpointId)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that the JobMaster retries the scheduling of a job * in case of a missing slot offering from a registered TaskExecutor. */ @Test public void testSlotRequestTimeoutWhenNoSlotOffering() throws Exception { final JobGraph restartingJobGraph = createSingleVertexJobWithRestartStrategy(); final long slotRequestTimeout = 10L; configuration.setLong(JobManagerOptions.SLOT_REQUEST_TIMEOUT, slotRequestTimeout); final JobMaster jobMaster = createJobMaster( configuration, restartingJobGraph, haServices, new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); try { final long start = System.nanoTime(); jobMaster.start(JobMasterId.generate(), testingTimeout).get(); final TestingResourceManagerGateway resourceManagerGateway = new TestingResourceManagerGateway(); final ArrayBlockingQueue<SlotRequest> blockingQueue = new ArrayBlockingQueue<>(2); resourceManagerGateway.setRequestSlotConsumer(blockingQueue::offer); rpcService.registerGateway(resourceManagerGateway.getAddress(), resourceManagerGateway); rmLeaderRetrievalService.notifyListener(resourceManagerGateway.getAddress(), resourceManagerGateway.getFencingToken().toUUID()); blockingQueue.take(); final CompletableFuture<TaskDeploymentDescriptor> submittedTaskFuture = new CompletableFuture<>(); final LocalTaskManagerLocation taskManagerLocation = new LocalTaskManagerLocation(); final TestingTaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setSubmitTaskConsumer((tdd, ignored) -> { submittedTaskFuture.complete(tdd); return CompletableFuture.completedFuture(Acknowledge.get()); }) .createTestingTaskExecutorGateway(); rpcService.registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway); jobMasterGateway.registerTaskManager(taskExecutorGateway.getAddress(), taskManagerLocation, testingTimeout).get(); final SlotRequest slotRequest = blockingQueue.take(); final long end = System.nanoTime(); assertThat((end - start) / 1_000_000L, Matchers.greaterThanOrEqualTo(slotRequestTimeout)); assertThat(submittedTaskFuture.isDone(), is(false)); final SlotOffer slotOffer = new SlotOffer(slotRequest.getAllocationId(), 0, ResourceProfile.UNKNOWN); final CompletableFuture<Collection<SlotOffer>> acceptedSlotsFuture = jobMasterGateway.offerSlots(taskManagerLocation.getResourceID(), Collections.singleton(slotOffer), testingTimeout); final Collection<SlotOffer> acceptedSlots = acceptedSlotsFuture.get(); assertThat(acceptedSlots, hasSize(1)); final SlotOffer acceptedSlot = acceptedSlots.iterator().next(); assertThat(acceptedSlot.getAllocationId(), equalTo(slotRequest.getAllocationId())); final TaskDeploymentDescriptor taskDeploymentDescriptor = submittedTaskFuture.get(); assertThat(taskDeploymentDescriptor.getAllocationId(), equalTo(slotRequest.getAllocationId())); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that we can close an unestablished ResourceManager connection. */ @Test public void testCloseUnestablishedResourceManagerConnection() throws Exception { final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, new TestingJobManagerSharedServicesBuilder().build()); try { jobMaster.start(JobMasterId.generate(), testingTimeout).get(); final ResourceManagerId resourceManagerId = ResourceManagerId.generate(); final String firstResourceManagerAddress = "address1"; final String secondResourceManagerAddress = "address2"; final TestingResourceManagerGateway firstResourceManagerGateway = new TestingResourceManagerGateway(); final TestingResourceManagerGateway secondResourceManagerGateway = new TestingResourceManagerGateway(); rpcService.registerGateway(firstResourceManagerAddress, firstResourceManagerGateway); rpcService.registerGateway(secondResourceManagerAddress, secondResourceManagerGateway); final OneShotLatch firstJobManagerRegistration = new OneShotLatch(); final OneShotLatch secondJobManagerRegistration = new OneShotLatch(); firstResourceManagerGateway.setRegisterJobManagerConsumer( jobMasterIdResourceIDStringJobIDTuple4 -> firstJobManagerRegistration.trigger()); secondResourceManagerGateway.setRegisterJobManagerConsumer( jobMasterIdResourceIDStringJobIDTuple4 -> secondJobManagerRegistration.trigger()); rmLeaderRetrievalService.notifyListener(firstResourceManagerAddress, resourceManagerId.toUUID()); firstJobManagerRegistration.await(); rmLeaderRetrievalService.notifyListener(secondResourceManagerAddress, resourceManagerId.toUUID()); secondJobManagerRegistration.await(); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that we continue reconnecting to the latest known RM after a disconnection * message. */ @Test public void testReconnectionAfterDisconnect() throws Exception { final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, new TestingJobManagerSharedServicesBuilder().build()); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final TestingResourceManagerGateway testingResourceManagerGateway = new TestingResourceManagerGateway(); final BlockingQueue<JobMasterId> registrationsQueue = new ArrayBlockingQueue<>(1); testingResourceManagerGateway.setRegisterJobManagerConsumer( jobMasterIdResourceIDStringJobIDTuple4 -> registrationsQueue.offer(jobMasterIdResourceIDStringJobIDTuple4.f0)); rpcService.registerGateway(testingResourceManagerGateway.getAddress(), testingResourceManagerGateway); final ResourceManagerId resourceManagerId = testingResourceManagerGateway.getFencingToken(); rmLeaderRetrievalService.notifyListener( testingResourceManagerGateway.getAddress(), resourceManagerId.toUUID()); final JobMasterId firstRegistrationAttempt = registrationsQueue.take(); assertThat(firstRegistrationAttempt, equalTo(jobMasterId)); assertThat(registrationsQueue.isEmpty(), is(true)); jobMasterGateway.disconnectResourceManager(resourceManagerId, new FlinkException("Test exception")); assertThat(registrationsQueue.take(), equalTo(jobMasterId)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that the a JM connects to the leading RM after regaining leadership. */ @Test public void testResourceManagerConnectionAfterRegainingLeadership() throws Exception { final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, new TestingJobManagerSharedServicesBuilder().build()); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final TestingResourceManagerGateway testingResourceManagerGateway = new TestingResourceManagerGateway(); final BlockingQueue<JobMasterId> registrationQueue = new ArrayBlockingQueue<>(1); testingResourceManagerGateway.setRegisterJobManagerConsumer( jobMasterIdResourceIDStringJobIDTuple4 -> registrationQueue.offer(jobMasterIdResourceIDStringJobIDTuple4.f0)); final String resourceManagerAddress = testingResourceManagerGateway.getAddress(); rpcService.registerGateway(resourceManagerAddress, testingResourceManagerGateway); rmLeaderRetrievalService.notifyListener(resourceManagerAddress, testingResourceManagerGateway.getFencingToken().toUUID()); final JobMasterId firstRegistrationAttempt = registrationQueue.take(); assertThat(firstRegistrationAttempt, equalTo(jobMasterId)); jobMaster.suspend(new FlinkException("Test exception."), testingTimeout).get(); final JobMasterId jobMasterId2 = JobMasterId.generate(); jobMaster.start(jobMasterId2, testingTimeout).get(); final JobMasterId secondRegistrationAttempt = registrationQueue.take(); assertThat(secondRegistrationAttempt, equalTo(jobMasterId2)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testRequestNextInputSplit() throws Exception { final List<TestingInputSplit> expectedInputSplits = Arrays.asList( new TestingInputSplit(1), new TestingInputSplit(42), new TestingInputSplit(1337)); InputSplitSource<TestingInputSplit> inputSplitSource = new TestingInputSplitSource(expectedInputSplits); JobVertex source = new JobVertex("vertex1"); source.setParallelism(1); source.setInputSplitSource(inputSplitSource); source.setInvokableClass(AbstractInvokable.class); final JobGraph testJobGraph = new JobGraph(source); testJobGraph.setAllowQueuedScheduling(true); configuration.setLong(ConfigConstants.RESTART_STRATEGY_FIXED_DELAY_ATTEMPTS, 1); configuration.setString(ConfigConstants.RESTART_STRATEGY_FIXED_DELAY_DELAY, "0 s"); final JobManagerSharedServices jobManagerSharedServices = new TestingJobManagerSharedServicesBuilder() .setRestartStrategyFactory(RestartStrategyFactory.createRestartStrategyFactory(configuration)) .build(); final JobMaster jobMaster = createJobMaster( configuration, testJobGraph, haServices, jobManagerSharedServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); ExecutionGraph eg = jobMaster.getExecutionGraph(); ExecutionVertex ev = eg.getAllExecutionVertices().iterator().next(); final SupplierWithException<SerializedInputSplit, Exception> inputSplitSupplier = () -> jobMasterGateway.requestNextInputSplit( source.getID(), ev.getCurrentExecutionAttempt().getAttemptId()).get(); List<InputSplit> actualInputSplits = getInputSplits( expectedInputSplits.size(), inputSplitSupplier); final Matcher<Iterable<? extends InputSplit>> expectedInputSplitsMatcher = containsInAnyOrder(expectedInputSplits.toArray(EMPTY_TESTING_INPUT_SPLITS)); assertThat(actualInputSplits, expectedInputSplitsMatcher); final long maxWaitMillis = 2000L; ExecutionGraphTestUtils.waitUntilExecutionVertexState(ev, ExecutionState.SCHEDULED, maxWaitMillis); eg.failGlobal(new Exception("Testing exception")); ExecutionGraphTestUtils.waitUntilExecutionVertexState(ev, ExecutionState.SCHEDULED, maxWaitMillis); actualInputSplits = getInputSplits( expectedInputSplits.size(), inputSplitSupplier); assertThat(actualInputSplits, expectedInputSplitsMatcher); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Nonnull private static List<InputSplit> getInputSplits(int numberInputSplits, SupplierWithException<SerializedInputSplit, Exception> nextInputSplit) throws Exception { final List<InputSplit> actualInputSplits = new ArrayList<>(numberInputSplits); for (int i = 0; i < numberInputSplits; i++) { final SerializedInputSplit serializedInputSplit = nextInputSplit.get(); assertThat(serializedInputSplit.isEmpty(), is(false)); actualInputSplits.add(InstantiationUtil.deserializeObject(serializedInputSplit.getInputSplitData(), ClassLoader.getSystemClassLoader())); } final SerializedInputSplit serializedInputSplit = nextInputSplit.get(); if (!serializedInputSplit.isEmpty()) { InputSplit emptyInputSplit = InstantiationUtil.deserializeObject(serializedInputSplit.getInputSplitData(), ClassLoader.getSystemClassLoader()); assertThat(emptyInputSplit, is(nullValue())); } return actualInputSplits; } private static final class TestingInputSplitSource implements InputSplitSource<TestingInputSplit> { private static final long serialVersionUID = -2344684048759139086L; private final List<TestingInputSplit> inputSplits; private TestingInputSplitSource(List<TestingInputSplit> inputSplits) { this.inputSplits = inputSplits; } @Override public TestingInputSplit[] createInputSplits(int minNumSplits) { return inputSplits.toArray(EMPTY_TESTING_INPUT_SPLITS); } @Override public InputSplitAssigner getInputSplitAssigner(TestingInputSplit[] inputSplits) { return new DefaultInputSplitAssigner(inputSplits); } } private static final class TestingInputSplit implements InputSplit { private static final long serialVersionUID = -5404803705463116083L; private final int splitNumber; TestingInputSplit(int number) { this.splitNumber = number; } public int getSplitNumber() { return splitNumber; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TestingInputSplit that = (TestingInputSplit) o; return splitNumber == that.splitNumber; } @Override public int hashCode() { return Objects.hash(splitNumber); } } @Test public void testRequestKvStateWithoutRegistration() throws Exception { final JobGraph graph = createKvJobGraph(); final JobMaster jobMaster = createJobMaster( configuration, graph, haServices, new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); try { jobMasterGateway.requestKvStateLocation(graph.getJobID(), "unknown").get(); fail("Expected to fail with UnknownKvStateLocation"); } catch (Exception e) { assertTrue(ExceptionUtils.findThrowable(e, UnknownKvStateLocation.class).isPresent()); } } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testRequestKvStateOfWrongJob() throws Exception { final JobGraph graph = createKvJobGraph(); final JobMaster jobMaster = createJobMaster( configuration, graph, haServices, new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); try { jobMasterGateway.requestKvStateLocation(new JobID(), "unknown").get(); fail("Expected to fail with FlinkJobNotFoundException"); } catch (Exception e) { assertTrue(ExceptionUtils.findThrowable(e, FlinkJobNotFoundException.class).isPresent()); } } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Nonnull public JobGraph createKvJobGraph() { final JobVertex vertex1 = new JobVertex("v1"); vertex1.setParallelism(4); vertex1.setMaxParallelism(16); vertex1.setInvokableClass(BlockingNoOpInvokable.class); final JobVertex vertex2 = new JobVertex("v2"); vertex2.setParallelism(4); vertex2.setMaxParallelism(16); vertex2.setInvokableClass(BlockingNoOpInvokable.class); return new JobGraph(vertex1, vertex2); } @Test public void testRequestKvStateWithIrrelevantRegistration() throws Exception { final JobGraph graph = createKvJobGraph(); final JobMaster jobMaster = createJobMaster( configuration, graph, haServices, new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); try { jobMasterGateway.notifyKvStateRegistered( new JobID(), new JobVertexID(), new KeyGroupRange(0, 0), "any-name", new KvStateID(), new InetSocketAddress(InetAddress.getLocalHost(), 1233)).get(); fail("Expected to fail with FlinkJobNotFoundException."); } catch (Exception e) { assertTrue(ExceptionUtils.findThrowable(e, FlinkJobNotFoundException.class).isPresent()); } } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testRegisterAndUnregisterKvState() throws Exception { final JobGraph graph = createKvJobGraph(); final List<JobVertex> jobVertices = graph.getVerticesSortedTopologicallyFromSources(); final JobVertex vertex1 = jobVertices.get(0); final JobMaster jobMaster = createJobMaster( configuration, graph, haServices, new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final String registrationName = "register-me"; final KvStateID kvStateID = new KvStateID(); final KeyGroupRange keyGroupRange = new KeyGroupRange(0, 0); final InetSocketAddress address = new InetSocketAddress(InetAddress.getLocalHost(), 1029); jobMasterGateway.notifyKvStateRegistered( graph.getJobID(), vertex1.getID(), keyGroupRange, registrationName, kvStateID, address).get(); final KvStateLocation location = jobMasterGateway.requestKvStateLocation(graph.getJobID(), registrationName).get(); assertEquals(graph.getJobID(), location.getJobId()); assertEquals(vertex1.getID(), location.getJobVertexId()); assertEquals(vertex1.getMaxParallelism(), location.getNumKeyGroups()); assertEquals(1, location.getNumRegisteredKeyGroups()); assertEquals(1, keyGroupRange.getNumberOfKeyGroups()); assertEquals(kvStateID, location.getKvStateID(keyGroupRange.getStartKeyGroup())); assertEquals(address, location.getKvStateServerAddress(keyGroupRange.getStartKeyGroup())); jobMasterGateway.notifyKvStateUnregistered( graph.getJobID(), vertex1.getID(), keyGroupRange, registrationName).get(); try { jobMasterGateway.requestKvStateLocation(graph.getJobID(), registrationName).get(); fail("Expected to fail with an UnknownKvStateLocation."); } catch (Exception e) { assertTrue(ExceptionUtils.findThrowable(e, UnknownKvStateLocation.class).isPresent()); } } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testDuplicatedKvStateRegistrationsFailTask() throws Exception { final JobGraph graph = createKvJobGraph(); final List<JobVertex> jobVertices = graph.getVerticesSortedTopologicallyFromSources(); final JobVertex vertex1 = jobVertices.get(0); final JobVertex vertex2 = jobVertices.get(1); final JobMaster jobMaster = createJobMaster( configuration, graph, haServices, new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final String registrationName = "duplicate-me"; final KvStateID kvStateID = new KvStateID(); final KeyGroupRange keyGroupRange = new KeyGroupRange(0, 0); final InetSocketAddress address = new InetSocketAddress(InetAddress.getLocalHost(), 4396); jobMasterGateway.notifyKvStateRegistered( graph.getJobID(), vertex1.getID(), keyGroupRange, registrationName, kvStateID, address).get(); try { jobMasterGateway.notifyKvStateRegistered( graph.getJobID(), vertex2.getID(), keyGroupRange, registrationName, kvStateID, address).get(); fail("Expected to fail because of clashing registration message."); } catch (Exception e) { assertTrue(ExceptionUtils.findThrowableWithMessage(e, "Registration name clash").isPresent()); assertEquals(JobStatus.FAILED, jobMaster.getExecutionGraph().getState()); } } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests the {@link JobMaster * call for a finished result partition. */ @Test public void testRequestPartitionState() throws Exception { final JobGraph producerConsumerJobGraph = producerConsumerJobGraph(); final JobMaster jobMaster = createJobMaster( configuration, producerConsumerJobGraph, haServices, new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final TestingResourceManagerGateway testingResourceManagerGateway = new TestingResourceManagerGateway(); final CompletableFuture<AllocationID> allocationIdFuture = new CompletableFuture<>(); testingResourceManagerGateway.setRequestSlotConsumer(slotRequest -> allocationIdFuture.complete(slotRequest.getAllocationId())); rpcService.registerGateway(testingResourceManagerGateway.getAddress(), testingResourceManagerGateway); final CompletableFuture<TaskDeploymentDescriptor> tddFuture = new CompletableFuture<>(); final TestingTaskExecutorGateway testingTaskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setSubmitTaskConsumer((taskDeploymentDescriptor, jobMasterId) -> { tddFuture.complete(taskDeploymentDescriptor); return CompletableFuture.completedFuture(Acknowledge.get()); }) .createTestingTaskExecutorGateway(); rpcService.registerGateway(testingTaskExecutorGateway.getAddress(), testingTaskExecutorGateway); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); rmLeaderRetrievalService.notifyListener(testingResourceManagerGateway.getAddress(), testingResourceManagerGateway.getFencingToken().toUUID()); final AllocationID allocationId = allocationIdFuture.get(); final LocalTaskManagerLocation taskManagerLocation = new LocalTaskManagerLocation(); jobMasterGateway.registerTaskManager(testingTaskExecutorGateway.getAddress(), taskManagerLocation, testingTimeout).get(); final SlotOffer slotOffer = new SlotOffer(allocationId, 0, ResourceProfile.UNKNOWN); final Collection<SlotOffer> slotOffers = jobMasterGateway.offerSlots(taskManagerLocation.getResourceID(), Collections.singleton(slotOffer), testingTimeout).get(); assertThat(slotOffers, hasSize(1)); assertThat(slotOffers, contains(slotOffer)); final TaskDeploymentDescriptor tdd = tddFuture.get(); assertThat(tdd.getProducedPartitions(), hasSize(1)); final ResultPartitionDeploymentDescriptor partition = tdd.getProducedPartitions().iterator().next(); final ExecutionAttemptID executionAttemptId = tdd.getExecutionAttemptId(); final ExecutionAttemptID copiedExecutionAttemptId = new ExecutionAttemptID(executionAttemptId.getLowerPart(), executionAttemptId.getUpperPart()); jobMasterGateway.updateTaskExecutionState(new TaskExecutionState(producerConsumerJobGraph.getJobID(), executionAttemptId, ExecutionState.FINISHED)).get(); final ResultPartitionID partitionId = new ResultPartitionID(partition.getPartitionId(), copiedExecutionAttemptId); CompletableFuture<ExecutionState> partitionStateFuture = jobMasterGateway.requestPartitionState(partition.getResultId(), partitionId); assertThat(partitionStateFuture.get(), equalTo(ExecutionState.FINISHED)); partitionStateFuture = jobMasterGateway.requestPartitionState(partition.getResultId(), new ResultPartitionID()); try { partitionStateFuture.get(); fail("Expected failure."); } catch (ExecutionException e) { assertThat(ExceptionUtils.findThrowable(e, IllegalArgumentException.class).isPresent(), is(true)); } partitionStateFuture = jobMasterGateway.requestPartitionState(new IntermediateDataSetID(), partitionId); try { partitionStateFuture.get(); fail("Expected failure."); } catch (ExecutionException e) { assertThat(ExceptionUtils.findThrowable(e, IllegalArgumentException.class).isPresent(), is(true)); } partitionStateFuture = jobMasterGateway.requestPartitionState(partition.getResultId(), new ResultPartitionID(partition.getPartitionId(), new ExecutionAttemptID())); try { partitionStateFuture.get(); fail("Expected failure."); } catch (ExecutionException e) { assertThat(ExceptionUtils.findThrowable(e, PartitionProducerDisposedException.class).isPresent(), is(true)); } } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that the timeout in {@link JobMasterGateway * is respected. */ @Test public void testTriggerSavepointTimeout() throws Exception { final JobMaster jobMaster = new JobMaster( rpcService, JobMasterConfiguration.fromConfiguration(configuration), jmResourceId, jobGraph, haServices, DefaultSlotPoolFactory.fromConfiguration(configuration, rpcService), new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices, blobServer, UnregisteredJobManagerJobMetricGroupFactory.INSTANCE, new NoOpOnCompletionActions(), testingFatalErrorHandler, JobMasterTest.class.getClassLoader()) { @Override public CompletableFuture<String> triggerSavepoint( @Nullable final String targetDirectory, final boolean cancelJob, final Time timeout) { return new CompletableFuture<>(); } }; try { final CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); final CompletableFuture<String> savepointFutureLowTimeout = jobMasterGateway.triggerSavepoint("/tmp", false, Time.milliseconds(1)); final CompletableFuture<String> savepointFutureHighTimeout = jobMasterGateway.triggerSavepoint("/tmp", false, RpcUtils.INF_TIMEOUT); try { savepointFutureLowTimeout.get(testingTimeout.getSize(), testingTimeout.getUnit()); fail(); } catch (final ExecutionException e) { final Throwable cause = ExceptionUtils.stripExecutionException(e); assertThat(cause, instanceOf(TimeoutException.class)); } assertThat(savepointFutureHighTimeout.isDone(), is(equalTo(false))); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that the TaskExecutor is released if all of its slots have been freed. */ @Test public void testReleasingTaskExecutorIfNoMoreSlotsRegistered() throws Exception { final JobManagerSharedServices jobManagerSharedServices = new TestingJobManagerSharedServicesBuilder().build(); final JobGraph jobGraph = createSingleVertexJobWithRestartStrategy(); final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, jobManagerSharedServices, heartbeatServices); final TestingResourceManagerGateway testingResourceManagerGateway = new TestingResourceManagerGateway(); rpcService.registerGateway(testingResourceManagerGateway.getAddress(), testingResourceManagerGateway); rmLeaderRetrievalService.notifyListener(testingResourceManagerGateway.getAddress(), testingResourceManagerGateway.getFencingToken().toUUID()); final CompletableFuture<AllocationID> allocationIdFuture = new CompletableFuture<>(); testingResourceManagerGateway.setRequestSlotConsumer( slotRequest -> allocationIdFuture.complete(slotRequest.getAllocationId())); final CompletableFuture<JobID> disconnectTaskExecutorFuture = new CompletableFuture<>(); final CompletableFuture<AllocationID> freedSlotFuture = new CompletableFuture<>(); final TestingTaskExecutorGateway testingTaskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setFreeSlotFunction( (allocationID, throwable) -> { freedSlotFuture.complete(allocationID); return CompletableFuture.completedFuture(Acknowledge.get()); }) .setDisconnectJobManagerConsumer((jobID, throwable) -> disconnectTaskExecutorFuture.complete(jobID)) .createTestingTaskExecutorGateway(); final TaskManagerLocation taskManagerLocation = new LocalTaskManagerLocation(); rpcService.registerGateway(testingTaskExecutorGateway.getAddress(), testingTaskExecutorGateway); try { jobMaster.start(jobMasterId, testingTimeout).get(); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); final AllocationID allocationId = allocationIdFuture.get(); jobMasterGateway.registerTaskManager(testingTaskExecutorGateway.getAddress(), taskManagerLocation, testingTimeout).get(); final SlotOffer slotOffer = new SlotOffer(allocationId, 0, ResourceProfile.UNKNOWN); final CompletableFuture<Collection<SlotOffer>> acceptedSlotOffers = jobMasterGateway.offerSlots(taskManagerLocation.getResourceID(), Collections.singleton(slotOffer), testingTimeout); final Collection<SlotOffer> slotOffers = acceptedSlotOffers.get(); assertThat(slotOffers, hasSize(1)); jobMasterGateway.notifyAllocationFailure(allocationId, new FlinkException("Fail alloction test exception")); assertThat(freedSlotFuture.get(), equalTo(allocationId)); assertThat(disconnectTaskExecutorFuture.get(), equalTo(jobGraph.getJobID())); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } private JobGraph producerConsumerJobGraph() { final JobVertex producer = new JobVertex("Producer"); producer.setInvokableClass(NoOpInvokable.class); final JobVertex consumer = new JobVertex("Consumer"); consumer.setInvokableClass(NoOpInvokable.class); consumer.connectNewDataSetAsInput(producer, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING); final JobGraph jobGraph = new JobGraph(producer, consumer); jobGraph.setAllowQueuedScheduling(true); return jobGraph; } private File createSavepoint(long savepointId) throws IOException { return createSavepointWithOperatorState(savepointId); } private File createSavepointWithOperatorState(long savepointId, OperatorID... operatorIds) throws IOException { final File savepointFile = temporaryFolder.newFile(); final Collection<OperatorState> operatorStates = createOperatorState(operatorIds); final SavepointV2 savepoint = new SavepointV2(savepointId, operatorStates, Collections.emptyList()); try (FileOutputStream fileOutputStream = new FileOutputStream(savepointFile)) { Checkpoints.storeCheckpointMetadata(savepoint, fileOutputStream); } return savepointFile; } private Collection<OperatorState> createOperatorState(OperatorID... operatorIds) { Collection<OperatorState> operatorStates = new ArrayList<>(operatorIds.length); for (OperatorID operatorId : operatorIds) { final OperatorState operatorState = new OperatorState(operatorId, 1, 42); final OperatorSubtaskState subtaskState = new OperatorSubtaskState( new OperatorStreamStateHandle( Collections.emptyMap(), new ByteStreamStateHandle("foobar", new byte[0])), null, null, null); operatorState.putState(0, subtaskState); operatorStates.add(operatorState); } return operatorStates; } @Nonnull private JobGraph createJobGraphWithCheckpointing(SavepointRestoreSettings savepointRestoreSettings) { return createJobGraphFromJobVerticesWithCheckpointing(savepointRestoreSettings); } @Nonnull private JobGraph createJobGraphFromJobVerticesWithCheckpointing(SavepointRestoreSettings savepointRestoreSettings, JobVertex... jobVertices) { final JobGraph jobGraph = new JobGraph(jobVertices); final CheckpointCoordinatorConfiguration checkpoinCoordinatorConfiguration = new CheckpointCoordinatorConfiguration( 1000L, 1000L, 1000L, 1, CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION, true); final JobCheckpointingSettings checkpointingSettings = new JobCheckpointingSettings( Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), checkpoinCoordinatorConfiguration, null); jobGraph.setSnapshotSettings(checkpointingSettings); jobGraph.setSavepointRestoreSettings(savepointRestoreSettings); return jobGraph; } @Nonnull private JobMaster createJobMaster( Configuration configuration, JobGraph jobGraph, HighAvailabilityServices highAvailabilityServices, JobManagerSharedServices jobManagerSharedServices) throws Exception { return createJobMaster( configuration, jobGraph, highAvailabilityServices, jobManagerSharedServices, fastHeartbeatServices); } @Nonnull private JobMaster createJobMaster( Configuration configuration, JobGraph jobGraph, HighAvailabilityServices highAvailabilityServices, JobManagerSharedServices jobManagerSharedServices, HeartbeatServices heartbeatServices) throws Exception { final JobMasterConfiguration jobMasterConfiguration = JobMasterConfiguration.fromConfiguration(configuration); return new JobMaster( rpcService, jobMasterConfiguration, jmResourceId, jobGraph, highAvailabilityServices, DefaultSlotPoolFactory.fromConfiguration(configuration, rpcService), jobManagerSharedServices, heartbeatServices, blobServer, UnregisteredJobManagerJobMetricGroupFactory.INSTANCE, new NoOpOnCompletionActions(), testingFatalErrorHandler, JobMasterTest.class.getClassLoader()); } private JobGraph createSingleVertexJobWithRestartStrategy() throws IOException { final JobVertex jobVertex = new JobVertex("Test vertex"); jobVertex.setInvokableClass(NoOpInvokable.class); final ExecutionConfig executionConfig = new ExecutionConfig(); executionConfig.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 0L)); final JobGraph jobGraph = new JobGraph(jobVertex); jobGraph.setAllowQueuedScheduling(true); jobGraph.setExecutionConfig(executionConfig); return jobGraph; } /** * No op implementation of {@link OnCompletionActions}. */ private static final class NoOpOnCompletionActions implements OnCompletionActions { @Override public void jobReachedGloballyTerminalState(ArchivedExecutionGraph executionGraph) { } @Override public void jobFinishedByOther() { } @Override public void jobMasterFailed(Throwable cause) { } } private static final class DummyCheckpointStorageLocation implements CompletedCheckpointStorageLocation { private static final long serialVersionUID = 164095949572620688L; @Override public String getExternalPointer() { return null; } @Override public StreamStateHandle getMetadataHandle() { return null; } @Override public void disposeStorageLocation() throws IOException { } } }
class JobMasterTest extends TestLogger { private static final TestingInputSplit[] EMPTY_TESTING_INPUT_SPLITS = new TestingInputSplit[0]; @ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder(); private static final Time testingTimeout = Time.seconds(10L); private static final long fastHeartbeatInterval = 1L; private static final long fastHeartbeatTimeout = 5L; private static final long heartbeatInterval = 1000L; private static final long heartbeatTimeout = 5000L; private static final JobGraph jobGraph = new JobGraph(); private static TestingRpcService rpcService; private static HeartbeatServices fastHeartbeatServices; private static HeartbeatServices heartbeatServices; private BlobServer blobServer; private Configuration configuration; private ResourceID jmResourceId; private JobMasterId jobMasterId; private TestingHighAvailabilityServices haServices; private SettableLeaderRetrievalService rmLeaderRetrievalService; private TestingFatalErrorHandler testingFatalErrorHandler; @BeforeClass public static void setupClass() { rpcService = new TestingRpcService(); fastHeartbeatServices = new TestingHeartbeatServices(fastHeartbeatInterval, fastHeartbeatTimeout, rpcService.getScheduledExecutor()); heartbeatServices = new TestingHeartbeatServices(heartbeatInterval, heartbeatTimeout, rpcService.getScheduledExecutor()); } @Before public void setup() throws IOException { configuration = new Configuration(); haServices = new TestingHighAvailabilityServices(); jobMasterId = JobMasterId.generate(); jmResourceId = ResourceID.generate(); testingFatalErrorHandler = new TestingFatalErrorHandler(); haServices.setCheckpointRecoveryFactory(new StandaloneCheckpointRecoveryFactory()); rmLeaderRetrievalService = new SettableLeaderRetrievalService( null, null); haServices.setResourceManagerLeaderRetriever(rmLeaderRetrievalService); configuration.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath()); blobServer = new BlobServer(configuration, new VoidBlobStore()); blobServer.start(); } @After public void teardown() throws Exception { if (testingFatalErrorHandler != null) { testingFatalErrorHandler.rethrowError(); } if (blobServer != null) { blobServer.close(); } rpcService.clearGateways(); } @AfterClass public static void teardownClass() { if (rpcService != null) { rpcService.stopService(); rpcService = null; } } @Test @Test public void testHeartbeatTimeoutWithTaskManager() throws Exception { final CompletableFuture<ResourceID> heartbeatResourceIdFuture = new CompletableFuture<>(); final CompletableFuture<JobID> disconnectedJobManagerFuture = new CompletableFuture<>(); final TaskManagerLocation taskManagerLocation = new LocalTaskManagerLocation(); final TestingTaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setHeartbeatJobManagerConsumer(heartbeatResourceIdFuture::complete) .setDisconnectJobManagerConsumer((jobId, throwable) -> disconnectedJobManagerFuture.complete(jobId)) .createTestingTaskExecutorGateway(); rpcService.registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway); final JobManagerSharedServices jobManagerSharedServices = new TestingJobManagerSharedServicesBuilder().build(); final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, jobManagerSharedServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); CompletableFuture<RegistrationResponse> registrationResponse = jobMasterGateway.registerTaskManager( taskExecutorGateway.getAddress(), taskManagerLocation, testingTimeout); registrationResponse.get(); final ResourceID heartbeatResourceId = heartbeatResourceIdFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(heartbeatResourceId, Matchers.equalTo(jmResourceId)); final JobID disconnectedJobManager = disconnectedJobManagerFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(disconnectedJobManager, Matchers.equalTo(jobGraph.getJobID())); } finally { jobManagerSharedServices.shutdown(); RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testHeartbeatTimeoutWithResourceManager() throws Exception { final String resourceManagerAddress = "rm"; final ResourceManagerId resourceManagerId = ResourceManagerId.generate(); final ResourceID rmResourceId = new ResourceID(resourceManagerAddress); final TestingResourceManagerGateway resourceManagerGateway = new TestingResourceManagerGateway( resourceManagerId, rmResourceId, resourceManagerAddress, "localhost"); final CompletableFuture<Tuple3<JobMasterId, ResourceID, JobID>> jobManagerRegistrationFuture = new CompletableFuture<>(); final CompletableFuture<JobID> disconnectedJobManagerFuture = new CompletableFuture<>(); final CountDownLatch registrationAttempts = new CountDownLatch(2); resourceManagerGateway.setRegisterJobManagerConsumer(tuple -> { jobManagerRegistrationFuture.complete( Tuple3.of( tuple.f0, tuple.f1, tuple.f3)); registrationAttempts.countDown(); }); resourceManagerGateway.setDisconnectJobManagerConsumer(tuple -> disconnectedJobManagerFuture.complete(tuple.f0)); rpcService.registerGateway(resourceManagerAddress, resourceManagerGateway); final JobManagerSharedServices jobManagerSharedServices = new TestingJobManagerSharedServicesBuilder().build(); final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, jobManagerSharedServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); rmLeaderRetrievalService.notifyListener(resourceManagerAddress, resourceManagerId.toUUID()); final Tuple3<JobMasterId, ResourceID, JobID> registrationInformation = jobManagerRegistrationFuture.get( testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(registrationInformation.f0, Matchers.equalTo(jobMasterId)); assertThat(registrationInformation.f1, Matchers.equalTo(jmResourceId)); assertThat(registrationInformation.f2, Matchers.equalTo(jobGraph.getJobID())); final JobID disconnectedJobManager = disconnectedJobManagerFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(disconnectedJobManager, Matchers.equalTo(jobGraph.getJobID())); registrationAttempts.await(); } finally { jobManagerSharedServices.shutdown(); RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that a JobMaster will restore the given JobGraph from its savepoint upon * initial submission. */ @Test public void testRestoringFromSavepoint() throws Exception { final long savepointId = 42L; final File savepointFile = createSavepoint(savepointId); final SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.forPath( savepointFile.getAbsolutePath(), true); final JobGraph jobGraph = createJobGraphWithCheckpointing(savepointRestoreSettings); final StandaloneCompletedCheckpointStore completedCheckpointStore = new StandaloneCompletedCheckpointStore(1); final TestingCheckpointRecoveryFactory testingCheckpointRecoveryFactory = new TestingCheckpointRecoveryFactory(completedCheckpointStore, new StandaloneCheckpointIDCounter()); haServices.setCheckpointRecoveryFactory(testingCheckpointRecoveryFactory); final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, new TestingJobManagerSharedServicesBuilder().build()); try { final CompletedCheckpoint savepointCheckpoint = completedCheckpointStore.getLatestCheckpoint(); assertThat(savepointCheckpoint, Matchers.notNullValue()); assertThat(savepointCheckpoint.getCheckpointID(), is(savepointId)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that a JobMaster will only restore a modified JobGraph if non * restored state is allowed. */ @Test public void testRestoringModifiedJobFromSavepoint() throws Exception { final long savepointId = 42L; final OperatorID operatorID = new OperatorID(); final File savepointFile = createSavepointWithOperatorState(savepointId, operatorID); final SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.forPath( savepointFile.getAbsolutePath(), false); final JobVertex jobVertex = new JobVertex("New operator"); jobVertex.setInvokableClass(NoOpInvokable.class); final JobGraph jobGraphWithNewOperator = createJobGraphFromJobVerticesWithCheckpointing(savepointRestoreSettings, jobVertex); final StandaloneCompletedCheckpointStore completedCheckpointStore = new StandaloneCompletedCheckpointStore(1); final TestingCheckpointRecoveryFactory testingCheckpointRecoveryFactory = new TestingCheckpointRecoveryFactory(completedCheckpointStore, new StandaloneCheckpointIDCounter()); haServices.setCheckpointRecoveryFactory(testingCheckpointRecoveryFactory); try { createJobMaster( configuration, jobGraphWithNewOperator, haServices, new TestingJobManagerSharedServicesBuilder().build()); fail("Should fail because we cannot resume the changed JobGraph from the savepoint."); } catch (IllegalStateException expected) { } jobGraphWithNewOperator.setSavepointRestoreSettings( SavepointRestoreSettings.forPath( savepointFile.getAbsolutePath(), true)); final JobMaster jobMaster = createJobMaster( configuration, jobGraphWithNewOperator, haServices, new TestingJobManagerSharedServicesBuilder().build()); try { final CompletedCheckpoint savepointCheckpoint = completedCheckpointStore.getLatestCheckpoint(); assertThat(savepointCheckpoint, Matchers.notNullValue()); assertThat(savepointCheckpoint.getCheckpointID(), is(savepointId)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that in a streaming use case where checkpointing is enabled, a * fixed delay with Integer.MAX_VALUE retries is instantiated if no other restart * strategy has been specified. */ @Test public void testAutomaticRestartingWhenCheckpointing() throws Exception { final long savepointId = 42L; final File savepointFile = createSavepoint(savepointId); final SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.forPath( savepointFile.getAbsolutePath(), true); final JobGraph jobGraph = createJobGraphWithCheckpointing(savepointRestoreSettings); final StandaloneCompletedCheckpointStore completedCheckpointStore = new StandaloneCompletedCheckpointStore(1); final TestingCheckpointRecoveryFactory testingCheckpointRecoveryFactory = new TestingCheckpointRecoveryFactory( completedCheckpointStore, new StandaloneCheckpointIDCounter()); haServices.setCheckpointRecoveryFactory(testingCheckpointRecoveryFactory); final JobMaster jobMaster = createJobMaster( new Configuration(), jobGraph, haServices, new TestingJobManagerSharedServicesBuilder() .setRestartStrategyFactory(RestartStrategyFactory.createRestartStrategyFactory(configuration)) .build()); RestartStrategy restartStrategy = jobMaster.getRestartStrategy(); assertNotNull(restartStrategy); assertTrue(restartStrategy instanceof FixedDelayRestartStrategy); } /** * Tests that an existing checkpoint will have precedence over an savepoint. */ @Test public void testCheckpointPrecedesSavepointRecovery() throws Exception { final long savepointId = 42L; final File savepointFile = createSavepoint(savepointId); final SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.forPath("" + savepointFile.getAbsolutePath(), true); final JobGraph jobGraph = createJobGraphWithCheckpointing(savepointRestoreSettings); final long checkpointId = 1L; final CompletedCheckpoint completedCheckpoint = new CompletedCheckpoint( jobGraph.getJobID(), checkpointId, 1L, 1L, Collections.emptyMap(), null, CheckpointProperties.forCheckpoint(CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), new DummyCheckpointStorageLocation()); final StandaloneCompletedCheckpointStore completedCheckpointStore = new StandaloneCompletedCheckpointStore(1); completedCheckpointStore.addCheckpoint(completedCheckpoint); final TestingCheckpointRecoveryFactory testingCheckpointRecoveryFactory = new TestingCheckpointRecoveryFactory(completedCheckpointStore, new StandaloneCheckpointIDCounter()); haServices.setCheckpointRecoveryFactory(testingCheckpointRecoveryFactory); final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, new TestingJobManagerSharedServicesBuilder().build()); try { final CompletedCheckpoint savepointCheckpoint = completedCheckpointStore.getLatestCheckpoint(); assertThat(savepointCheckpoint, Matchers.notNullValue()); assertThat(savepointCheckpoint.getCheckpointID(), is(checkpointId)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that the JobMaster retries the scheduling of a job * in case of a missing slot offering from a registered TaskExecutor. */ @Test public void testSlotRequestTimeoutWhenNoSlotOffering() throws Exception { final JobGraph restartingJobGraph = createSingleVertexJobWithRestartStrategy(); final long slotRequestTimeout = 10L; configuration.setLong(JobManagerOptions.SLOT_REQUEST_TIMEOUT, slotRequestTimeout); final JobMaster jobMaster = createJobMaster( configuration, restartingJobGraph, haServices, new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); try { final long start = System.nanoTime(); jobMaster.start(JobMasterId.generate(), testingTimeout).get(); final TestingResourceManagerGateway resourceManagerGateway = new TestingResourceManagerGateway(); final ArrayBlockingQueue<SlotRequest> blockingQueue = new ArrayBlockingQueue<>(2); resourceManagerGateway.setRequestSlotConsumer(blockingQueue::offer); rpcService.registerGateway(resourceManagerGateway.getAddress(), resourceManagerGateway); rmLeaderRetrievalService.notifyListener(resourceManagerGateway.getAddress(), resourceManagerGateway.getFencingToken().toUUID()); blockingQueue.take(); final CompletableFuture<TaskDeploymentDescriptor> submittedTaskFuture = new CompletableFuture<>(); final LocalTaskManagerLocation taskManagerLocation = new LocalTaskManagerLocation(); final TestingTaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setSubmitTaskConsumer((tdd, ignored) -> { submittedTaskFuture.complete(tdd); return CompletableFuture.completedFuture(Acknowledge.get()); }) .createTestingTaskExecutorGateway(); rpcService.registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway); jobMasterGateway.registerTaskManager(taskExecutorGateway.getAddress(), taskManagerLocation, testingTimeout).get(); final SlotRequest slotRequest = blockingQueue.take(); final long end = System.nanoTime(); assertThat((end - start) / 1_000_000L, Matchers.greaterThanOrEqualTo(slotRequestTimeout)); assertThat(submittedTaskFuture.isDone(), is(false)); final SlotOffer slotOffer = new SlotOffer(slotRequest.getAllocationId(), 0, ResourceProfile.UNKNOWN); final CompletableFuture<Collection<SlotOffer>> acceptedSlotsFuture = jobMasterGateway.offerSlots(taskManagerLocation.getResourceID(), Collections.singleton(slotOffer), testingTimeout); final Collection<SlotOffer> acceptedSlots = acceptedSlotsFuture.get(); assertThat(acceptedSlots, hasSize(1)); final SlotOffer acceptedSlot = acceptedSlots.iterator().next(); assertThat(acceptedSlot.getAllocationId(), equalTo(slotRequest.getAllocationId())); final TaskDeploymentDescriptor taskDeploymentDescriptor = submittedTaskFuture.get(); assertThat(taskDeploymentDescriptor.getAllocationId(), equalTo(slotRequest.getAllocationId())); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that we can close an unestablished ResourceManager connection. */ @Test public void testCloseUnestablishedResourceManagerConnection() throws Exception { final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, new TestingJobManagerSharedServicesBuilder().build()); try { jobMaster.start(JobMasterId.generate(), testingTimeout).get(); final ResourceManagerId resourceManagerId = ResourceManagerId.generate(); final String firstResourceManagerAddress = "address1"; final String secondResourceManagerAddress = "address2"; final TestingResourceManagerGateway firstResourceManagerGateway = new TestingResourceManagerGateway(); final TestingResourceManagerGateway secondResourceManagerGateway = new TestingResourceManagerGateway(); rpcService.registerGateway(firstResourceManagerAddress, firstResourceManagerGateway); rpcService.registerGateway(secondResourceManagerAddress, secondResourceManagerGateway); final OneShotLatch firstJobManagerRegistration = new OneShotLatch(); final OneShotLatch secondJobManagerRegistration = new OneShotLatch(); firstResourceManagerGateway.setRegisterJobManagerConsumer( jobMasterIdResourceIDStringJobIDTuple4 -> firstJobManagerRegistration.trigger()); secondResourceManagerGateway.setRegisterJobManagerConsumer( jobMasterIdResourceIDStringJobIDTuple4 -> secondJobManagerRegistration.trigger()); rmLeaderRetrievalService.notifyListener(firstResourceManagerAddress, resourceManagerId.toUUID()); firstJobManagerRegistration.await(); rmLeaderRetrievalService.notifyListener(secondResourceManagerAddress, resourceManagerId.toUUID()); secondJobManagerRegistration.await(); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that we continue reconnecting to the latest known RM after a disconnection * message. */ @Test public void testReconnectionAfterDisconnect() throws Exception { final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, new TestingJobManagerSharedServicesBuilder().build()); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final TestingResourceManagerGateway testingResourceManagerGateway = new TestingResourceManagerGateway(); final BlockingQueue<JobMasterId> registrationsQueue = new ArrayBlockingQueue<>(1); testingResourceManagerGateway.setRegisterJobManagerConsumer( jobMasterIdResourceIDStringJobIDTuple4 -> registrationsQueue.offer(jobMasterIdResourceIDStringJobIDTuple4.f0)); rpcService.registerGateway(testingResourceManagerGateway.getAddress(), testingResourceManagerGateway); final ResourceManagerId resourceManagerId = testingResourceManagerGateway.getFencingToken(); rmLeaderRetrievalService.notifyListener( testingResourceManagerGateway.getAddress(), resourceManagerId.toUUID()); final JobMasterId firstRegistrationAttempt = registrationsQueue.take(); assertThat(firstRegistrationAttempt, equalTo(jobMasterId)); assertThat(registrationsQueue.isEmpty(), is(true)); jobMasterGateway.disconnectResourceManager(resourceManagerId, new FlinkException("Test exception")); assertThat(registrationsQueue.take(), equalTo(jobMasterId)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that the a JM connects to the leading RM after regaining leadership. */ @Test public void testResourceManagerConnectionAfterRegainingLeadership() throws Exception { final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, new TestingJobManagerSharedServicesBuilder().build()); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final TestingResourceManagerGateway testingResourceManagerGateway = new TestingResourceManagerGateway(); final BlockingQueue<JobMasterId> registrationQueue = new ArrayBlockingQueue<>(1); testingResourceManagerGateway.setRegisterJobManagerConsumer( jobMasterIdResourceIDStringJobIDTuple4 -> registrationQueue.offer(jobMasterIdResourceIDStringJobIDTuple4.f0)); final String resourceManagerAddress = testingResourceManagerGateway.getAddress(); rpcService.registerGateway(resourceManagerAddress, testingResourceManagerGateway); rmLeaderRetrievalService.notifyListener(resourceManagerAddress, testingResourceManagerGateway.getFencingToken().toUUID()); final JobMasterId firstRegistrationAttempt = registrationQueue.take(); assertThat(firstRegistrationAttempt, equalTo(jobMasterId)); jobMaster.suspend(new FlinkException("Test exception."), testingTimeout).get(); final JobMasterId jobMasterId2 = JobMasterId.generate(); jobMaster.start(jobMasterId2, testingTimeout).get(); final JobMasterId secondRegistrationAttempt = registrationQueue.take(); assertThat(secondRegistrationAttempt, equalTo(jobMasterId2)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testRequestNextInputSplit() throws Exception { final List<TestingInputSplit> expectedInputSplits = Arrays.asList( new TestingInputSplit(1), new TestingInputSplit(42), new TestingInputSplit(1337)); InputSplitSource<TestingInputSplit> inputSplitSource = new TestingInputSplitSource(expectedInputSplits); JobVertex source = new JobVertex("vertex1"); source.setParallelism(1); source.setInputSplitSource(inputSplitSource); source.setInvokableClass(AbstractInvokable.class); final JobGraph testJobGraph = new JobGraph(source); testJobGraph.setAllowQueuedScheduling(true); configuration.setLong(ConfigConstants.RESTART_STRATEGY_FIXED_DELAY_ATTEMPTS, 1); configuration.setString(ConfigConstants.RESTART_STRATEGY_FIXED_DELAY_DELAY, "0 s"); final JobManagerSharedServices jobManagerSharedServices = new TestingJobManagerSharedServicesBuilder() .setRestartStrategyFactory(RestartStrategyFactory.createRestartStrategyFactory(configuration)) .build(); final JobMaster jobMaster = createJobMaster( configuration, testJobGraph, haServices, jobManagerSharedServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); ExecutionGraph eg = jobMaster.getExecutionGraph(); ExecutionVertex ev = eg.getAllExecutionVertices().iterator().next(); final SupplierWithException<SerializedInputSplit, Exception> inputSplitSupplier = () -> jobMasterGateway.requestNextInputSplit( source.getID(), ev.getCurrentExecutionAttempt().getAttemptId()).get(); List<InputSplit> actualInputSplits = getInputSplits( expectedInputSplits.size(), inputSplitSupplier); final Matcher<Iterable<? extends InputSplit>> expectedInputSplitsMatcher = containsInAnyOrder(expectedInputSplits.toArray(EMPTY_TESTING_INPUT_SPLITS)); assertThat(actualInputSplits, expectedInputSplitsMatcher); final long maxWaitMillis = 2000L; ExecutionGraphTestUtils.waitUntilExecutionVertexState(ev, ExecutionState.SCHEDULED, maxWaitMillis); eg.failGlobal(new Exception("Testing exception")); ExecutionGraphTestUtils.waitUntilExecutionVertexState(ev, ExecutionState.SCHEDULED, maxWaitMillis); actualInputSplits = getInputSplits( expectedInputSplits.size(), inputSplitSupplier); assertThat(actualInputSplits, expectedInputSplitsMatcher); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Nonnull private static List<InputSplit> getInputSplits(int numberInputSplits, SupplierWithException<SerializedInputSplit, Exception> nextInputSplit) throws Exception { final List<InputSplit> actualInputSplits = new ArrayList<>(numberInputSplits); for (int i = 0; i < numberInputSplits; i++) { final SerializedInputSplit serializedInputSplit = nextInputSplit.get(); assertThat(serializedInputSplit.isEmpty(), is(false)); actualInputSplits.add(InstantiationUtil.deserializeObject(serializedInputSplit.getInputSplitData(), ClassLoader.getSystemClassLoader())); } final SerializedInputSplit serializedInputSplit = nextInputSplit.get(); if (!serializedInputSplit.isEmpty()) { InputSplit emptyInputSplit = InstantiationUtil.deserializeObject(serializedInputSplit.getInputSplitData(), ClassLoader.getSystemClassLoader()); assertThat(emptyInputSplit, is(nullValue())); } return actualInputSplits; } private static final class TestingInputSplitSource implements InputSplitSource<TestingInputSplit> { private static final long serialVersionUID = -2344684048759139086L; private final List<TestingInputSplit> inputSplits; private TestingInputSplitSource(List<TestingInputSplit> inputSplits) { this.inputSplits = inputSplits; } @Override public TestingInputSplit[] createInputSplits(int minNumSplits) { return inputSplits.toArray(EMPTY_TESTING_INPUT_SPLITS); } @Override public InputSplitAssigner getInputSplitAssigner(TestingInputSplit[] inputSplits) { return new DefaultInputSplitAssigner(inputSplits); } } private static final class TestingInputSplit implements InputSplit { private static final long serialVersionUID = -5404803705463116083L; private final int splitNumber; TestingInputSplit(int number) { this.splitNumber = number; } public int getSplitNumber() { return splitNumber; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TestingInputSplit that = (TestingInputSplit) o; return splitNumber == that.splitNumber; } @Override public int hashCode() { return Objects.hash(splitNumber); } } @Test public void testRequestKvStateWithoutRegistration() throws Exception { final JobGraph graph = createKvJobGraph(); final JobMaster jobMaster = createJobMaster( configuration, graph, haServices, new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); try { jobMasterGateway.requestKvStateLocation(graph.getJobID(), "unknown").get(); fail("Expected to fail with UnknownKvStateLocation"); } catch (Exception e) { assertTrue(ExceptionUtils.findThrowable(e, UnknownKvStateLocation.class).isPresent()); } } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testRequestKvStateOfWrongJob() throws Exception { final JobGraph graph = createKvJobGraph(); final JobMaster jobMaster = createJobMaster( configuration, graph, haServices, new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); try { jobMasterGateway.requestKvStateLocation(new JobID(), "unknown").get(); fail("Expected to fail with FlinkJobNotFoundException"); } catch (Exception e) { assertTrue(ExceptionUtils.findThrowable(e, FlinkJobNotFoundException.class).isPresent()); } } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Nonnull public JobGraph createKvJobGraph() { final JobVertex vertex1 = new JobVertex("v1"); vertex1.setParallelism(4); vertex1.setMaxParallelism(16); vertex1.setInvokableClass(BlockingNoOpInvokable.class); final JobVertex vertex2 = new JobVertex("v2"); vertex2.setParallelism(4); vertex2.setMaxParallelism(16); vertex2.setInvokableClass(BlockingNoOpInvokable.class); return new JobGraph(vertex1, vertex2); } @Test public void testRequestKvStateWithIrrelevantRegistration() throws Exception { final JobGraph graph = createKvJobGraph(); final JobMaster jobMaster = createJobMaster( configuration, graph, haServices, new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); try { jobMasterGateway.notifyKvStateRegistered( new JobID(), new JobVertexID(), new KeyGroupRange(0, 0), "any-name", new KvStateID(), new InetSocketAddress(InetAddress.getLocalHost(), 1233)).get(); fail("Expected to fail with FlinkJobNotFoundException."); } catch (Exception e) { assertTrue(ExceptionUtils.findThrowable(e, FlinkJobNotFoundException.class).isPresent()); } } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testRegisterAndUnregisterKvState() throws Exception { final JobGraph graph = createKvJobGraph(); final List<JobVertex> jobVertices = graph.getVerticesSortedTopologicallyFromSources(); final JobVertex vertex1 = jobVertices.get(0); final JobMaster jobMaster = createJobMaster( configuration, graph, haServices, new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final String registrationName = "register-me"; final KvStateID kvStateID = new KvStateID(); final KeyGroupRange keyGroupRange = new KeyGroupRange(0, 0); final InetSocketAddress address = new InetSocketAddress(InetAddress.getLocalHost(), 1029); jobMasterGateway.notifyKvStateRegistered( graph.getJobID(), vertex1.getID(), keyGroupRange, registrationName, kvStateID, address).get(); final KvStateLocation location = jobMasterGateway.requestKvStateLocation(graph.getJobID(), registrationName).get(); assertEquals(graph.getJobID(), location.getJobId()); assertEquals(vertex1.getID(), location.getJobVertexId()); assertEquals(vertex1.getMaxParallelism(), location.getNumKeyGroups()); assertEquals(1, location.getNumRegisteredKeyGroups()); assertEquals(1, keyGroupRange.getNumberOfKeyGroups()); assertEquals(kvStateID, location.getKvStateID(keyGroupRange.getStartKeyGroup())); assertEquals(address, location.getKvStateServerAddress(keyGroupRange.getStartKeyGroup())); jobMasterGateway.notifyKvStateUnregistered( graph.getJobID(), vertex1.getID(), keyGroupRange, registrationName).get(); try { jobMasterGateway.requestKvStateLocation(graph.getJobID(), registrationName).get(); fail("Expected to fail with an UnknownKvStateLocation."); } catch (Exception e) { assertTrue(ExceptionUtils.findThrowable(e, UnknownKvStateLocation.class).isPresent()); } } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testDuplicatedKvStateRegistrationsFailTask() throws Exception { final JobGraph graph = createKvJobGraph(); final List<JobVertex> jobVertices = graph.getVerticesSortedTopologicallyFromSources(); final JobVertex vertex1 = jobVertices.get(0); final JobVertex vertex2 = jobVertices.get(1); final JobMaster jobMaster = createJobMaster( configuration, graph, haServices, new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final String registrationName = "duplicate-me"; final KvStateID kvStateID = new KvStateID(); final KeyGroupRange keyGroupRange = new KeyGroupRange(0, 0); final InetSocketAddress address = new InetSocketAddress(InetAddress.getLocalHost(), 4396); jobMasterGateway.notifyKvStateRegistered( graph.getJobID(), vertex1.getID(), keyGroupRange, registrationName, kvStateID, address).get(); try { jobMasterGateway.notifyKvStateRegistered( graph.getJobID(), vertex2.getID(), keyGroupRange, registrationName, kvStateID, address).get(); fail("Expected to fail because of clashing registration message."); } catch (Exception e) { assertTrue(ExceptionUtils.findThrowableWithMessage(e, "Registration name clash").isPresent()); assertEquals(JobStatus.FAILED, jobMaster.getExecutionGraph().getState()); } } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests the {@link JobMaster * call for a finished result partition. */ @Test public void testRequestPartitionState() throws Exception { final JobGraph producerConsumerJobGraph = producerConsumerJobGraph(); final JobMaster jobMaster = createJobMaster( configuration, producerConsumerJobGraph, haServices, new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices); CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); try { startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final TestingResourceManagerGateway testingResourceManagerGateway = new TestingResourceManagerGateway(); final CompletableFuture<AllocationID> allocationIdFuture = new CompletableFuture<>(); testingResourceManagerGateway.setRequestSlotConsumer(slotRequest -> allocationIdFuture.complete(slotRequest.getAllocationId())); rpcService.registerGateway(testingResourceManagerGateway.getAddress(), testingResourceManagerGateway); final CompletableFuture<TaskDeploymentDescriptor> tddFuture = new CompletableFuture<>(); final TestingTaskExecutorGateway testingTaskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setSubmitTaskConsumer((taskDeploymentDescriptor, jobMasterId) -> { tddFuture.complete(taskDeploymentDescriptor); return CompletableFuture.completedFuture(Acknowledge.get()); }) .createTestingTaskExecutorGateway(); rpcService.registerGateway(testingTaskExecutorGateway.getAddress(), testingTaskExecutorGateway); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); rmLeaderRetrievalService.notifyListener(testingResourceManagerGateway.getAddress(), testingResourceManagerGateway.getFencingToken().toUUID()); final AllocationID allocationId = allocationIdFuture.get(); final LocalTaskManagerLocation taskManagerLocation = new LocalTaskManagerLocation(); jobMasterGateway.registerTaskManager(testingTaskExecutorGateway.getAddress(), taskManagerLocation, testingTimeout).get(); final SlotOffer slotOffer = new SlotOffer(allocationId, 0, ResourceProfile.UNKNOWN); final Collection<SlotOffer> slotOffers = jobMasterGateway.offerSlots(taskManagerLocation.getResourceID(), Collections.singleton(slotOffer), testingTimeout).get(); assertThat(slotOffers, hasSize(1)); assertThat(slotOffers, contains(slotOffer)); final TaskDeploymentDescriptor tdd = tddFuture.get(); assertThat(tdd.getProducedPartitions(), hasSize(1)); final ResultPartitionDeploymentDescriptor partition = tdd.getProducedPartitions().iterator().next(); final ExecutionAttemptID executionAttemptId = tdd.getExecutionAttemptId(); final ExecutionAttemptID copiedExecutionAttemptId = new ExecutionAttemptID(executionAttemptId.getLowerPart(), executionAttemptId.getUpperPart()); jobMasterGateway.updateTaskExecutionState(new TaskExecutionState(producerConsumerJobGraph.getJobID(), executionAttemptId, ExecutionState.FINISHED)).get(); final ResultPartitionID partitionId = new ResultPartitionID(partition.getPartitionId(), copiedExecutionAttemptId); CompletableFuture<ExecutionState> partitionStateFuture = jobMasterGateway.requestPartitionState(partition.getResultId(), partitionId); assertThat(partitionStateFuture.get(), equalTo(ExecutionState.FINISHED)); partitionStateFuture = jobMasterGateway.requestPartitionState(partition.getResultId(), new ResultPartitionID()); try { partitionStateFuture.get(); fail("Expected failure."); } catch (ExecutionException e) { assertThat(ExceptionUtils.findThrowable(e, IllegalArgumentException.class).isPresent(), is(true)); } partitionStateFuture = jobMasterGateway.requestPartitionState(new IntermediateDataSetID(), partitionId); try { partitionStateFuture.get(); fail("Expected failure."); } catch (ExecutionException e) { assertThat(ExceptionUtils.findThrowable(e, IllegalArgumentException.class).isPresent(), is(true)); } partitionStateFuture = jobMasterGateway.requestPartitionState(partition.getResultId(), new ResultPartitionID(partition.getPartitionId(), new ExecutionAttemptID())); try { partitionStateFuture.get(); fail("Expected failure."); } catch (ExecutionException e) { assertThat(ExceptionUtils.findThrowable(e, PartitionProducerDisposedException.class).isPresent(), is(true)); } } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that the timeout in {@link JobMasterGateway * is respected. */ @Test public void testTriggerSavepointTimeout() throws Exception { final JobMaster jobMaster = new JobMaster( rpcService, JobMasterConfiguration.fromConfiguration(configuration), jmResourceId, jobGraph, haServices, DefaultSlotPoolFactory.fromConfiguration(configuration, rpcService), new TestingJobManagerSharedServicesBuilder().build(), heartbeatServices, blobServer, UnregisteredJobManagerJobMetricGroupFactory.INSTANCE, new NoOpOnCompletionActions(), testingFatalErrorHandler, JobMasterTest.class.getClassLoader()) { @Override public CompletableFuture<String> triggerSavepoint( @Nullable final String targetDirectory, final boolean cancelJob, final Time timeout) { return new CompletableFuture<>(); } }; try { final CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId, testingTimeout); startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); final CompletableFuture<String> savepointFutureLowTimeout = jobMasterGateway.triggerSavepoint("/tmp", false, Time.milliseconds(1)); final CompletableFuture<String> savepointFutureHighTimeout = jobMasterGateway.triggerSavepoint("/tmp", false, RpcUtils.INF_TIMEOUT); try { savepointFutureLowTimeout.get(testingTimeout.getSize(), testingTimeout.getUnit()); fail(); } catch (final ExecutionException e) { final Throwable cause = ExceptionUtils.stripExecutionException(e); assertThat(cause, instanceOf(TimeoutException.class)); } assertThat(savepointFutureHighTimeout.isDone(), is(equalTo(false))); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that the TaskExecutor is released if all of its slots have been freed. */ @Test public void testReleasingTaskExecutorIfNoMoreSlotsRegistered() throws Exception { final JobManagerSharedServices jobManagerSharedServices = new TestingJobManagerSharedServicesBuilder().build(); final JobGraph jobGraph = createSingleVertexJobWithRestartStrategy(); final JobMaster jobMaster = createJobMaster( configuration, jobGraph, haServices, jobManagerSharedServices, heartbeatServices); final TestingResourceManagerGateway testingResourceManagerGateway = new TestingResourceManagerGateway(); rpcService.registerGateway(testingResourceManagerGateway.getAddress(), testingResourceManagerGateway); rmLeaderRetrievalService.notifyListener(testingResourceManagerGateway.getAddress(), testingResourceManagerGateway.getFencingToken().toUUID()); final CompletableFuture<AllocationID> allocationIdFuture = new CompletableFuture<>(); testingResourceManagerGateway.setRequestSlotConsumer( slotRequest -> allocationIdFuture.complete(slotRequest.getAllocationId())); final CompletableFuture<JobID> disconnectTaskExecutorFuture = new CompletableFuture<>(); final CompletableFuture<AllocationID> freedSlotFuture = new CompletableFuture<>(); final TestingTaskExecutorGateway testingTaskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setFreeSlotFunction( (allocationID, throwable) -> { freedSlotFuture.complete(allocationID); return CompletableFuture.completedFuture(Acknowledge.get()); }) .setDisconnectJobManagerConsumer((jobID, throwable) -> disconnectTaskExecutorFuture.complete(jobID)) .createTestingTaskExecutorGateway(); final TaskManagerLocation taskManagerLocation = new LocalTaskManagerLocation(); rpcService.registerGateway(testingTaskExecutorGateway.getAddress(), testingTaskExecutorGateway); try { jobMaster.start(jobMasterId, testingTimeout).get(); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); final AllocationID allocationId = allocationIdFuture.get(); jobMasterGateway.registerTaskManager(testingTaskExecutorGateway.getAddress(), taskManagerLocation, testingTimeout).get(); final SlotOffer slotOffer = new SlotOffer(allocationId, 0, ResourceProfile.UNKNOWN); final CompletableFuture<Collection<SlotOffer>> acceptedSlotOffers = jobMasterGateway.offerSlots(taskManagerLocation.getResourceID(), Collections.singleton(slotOffer), testingTimeout); final Collection<SlotOffer> slotOffers = acceptedSlotOffers.get(); assertThat(slotOffers, hasSize(1)); jobMasterGateway.notifyAllocationFailure(allocationId, new FlinkException("Fail alloction test exception")); assertThat(freedSlotFuture.get(), equalTo(allocationId)); assertThat(disconnectTaskExecutorFuture.get(), equalTo(jobGraph.getJobID())); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } private JobGraph producerConsumerJobGraph() { final JobVertex producer = new JobVertex("Producer"); producer.setInvokableClass(NoOpInvokable.class); final JobVertex consumer = new JobVertex("Consumer"); consumer.setInvokableClass(NoOpInvokable.class); consumer.connectNewDataSetAsInput(producer, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING); final JobGraph jobGraph = new JobGraph(producer, consumer); jobGraph.setAllowQueuedScheduling(true); return jobGraph; } private File createSavepoint(long savepointId) throws IOException { return createSavepointWithOperatorState(savepointId); } private File createSavepointWithOperatorState(long savepointId, OperatorID... operatorIds) throws IOException { final File savepointFile = temporaryFolder.newFile(); final Collection<OperatorState> operatorStates = createOperatorState(operatorIds); final SavepointV2 savepoint = new SavepointV2(savepointId, operatorStates, Collections.emptyList()); try (FileOutputStream fileOutputStream = new FileOutputStream(savepointFile)) { Checkpoints.storeCheckpointMetadata(savepoint, fileOutputStream); } return savepointFile; } private Collection<OperatorState> createOperatorState(OperatorID... operatorIds) { Collection<OperatorState> operatorStates = new ArrayList<>(operatorIds.length); for (OperatorID operatorId : operatorIds) { final OperatorState operatorState = new OperatorState(operatorId, 1, 42); final OperatorSubtaskState subtaskState = new OperatorSubtaskState( new OperatorStreamStateHandle( Collections.emptyMap(), new ByteStreamStateHandle("foobar", new byte[0])), null, null, null); operatorState.putState(0, subtaskState); operatorStates.add(operatorState); } return operatorStates; } @Nonnull private JobGraph createJobGraphWithCheckpointing(SavepointRestoreSettings savepointRestoreSettings) { return createJobGraphFromJobVerticesWithCheckpointing(savepointRestoreSettings); } @Nonnull private JobGraph createJobGraphFromJobVerticesWithCheckpointing(SavepointRestoreSettings savepointRestoreSettings, JobVertex... jobVertices) { final JobGraph jobGraph = new JobGraph(jobVertices); final CheckpointCoordinatorConfiguration checkpoinCoordinatorConfiguration = new CheckpointCoordinatorConfiguration( 1000L, 1000L, 1000L, 1, CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION, true); final JobCheckpointingSettings checkpointingSettings = new JobCheckpointingSettings( Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), checkpoinCoordinatorConfiguration, null); jobGraph.setSnapshotSettings(checkpointingSettings); jobGraph.setSavepointRestoreSettings(savepointRestoreSettings); return jobGraph; } @Nonnull private JobMaster createJobMaster( Configuration configuration, JobGraph jobGraph, HighAvailabilityServices highAvailabilityServices, JobManagerSharedServices jobManagerSharedServices) throws Exception { return createJobMaster( configuration, jobGraph, highAvailabilityServices, jobManagerSharedServices, fastHeartbeatServices); } @Nonnull private JobMaster createJobMaster( Configuration configuration, JobGraph jobGraph, HighAvailabilityServices highAvailabilityServices, JobManagerSharedServices jobManagerSharedServices, HeartbeatServices heartbeatServices) throws Exception { final JobMasterConfiguration jobMasterConfiguration = JobMasterConfiguration.fromConfiguration(configuration); return new JobMaster( rpcService, jobMasterConfiguration, jmResourceId, jobGraph, highAvailabilityServices, DefaultSlotPoolFactory.fromConfiguration(configuration, rpcService), jobManagerSharedServices, heartbeatServices, blobServer, UnregisteredJobManagerJobMetricGroupFactory.INSTANCE, new NoOpOnCompletionActions(), testingFatalErrorHandler, JobMasterTest.class.getClassLoader()); } private JobGraph createSingleVertexJobWithRestartStrategy() throws IOException { final JobVertex jobVertex = new JobVertex("Test vertex"); jobVertex.setInvokableClass(NoOpInvokable.class); final ExecutionConfig executionConfig = new ExecutionConfig(); executionConfig.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 0L)); final JobGraph jobGraph = new JobGraph(jobVertex); jobGraph.setAllowQueuedScheduling(true); jobGraph.setExecutionConfig(executionConfig); return jobGraph; } /** * No op implementation of {@link OnCompletionActions}. */ private static final class NoOpOnCompletionActions implements OnCompletionActions { @Override public void jobReachedGloballyTerminalState(ArchivedExecutionGraph executionGraph) { } @Override public void jobFinishedByOther() { } @Override public void jobMasterFailed(Throwable cause) { } } private static final class DummyCheckpointStorageLocation implements CompletedCheckpointStorageLocation { private static final long serialVersionUID = 164095949572620688L; @Override public String getExternalPointer() { return null; } @Override public StreamStateHandle getMetadataHandle() { return null; } @Override public void disposeStorageLocation() throws IOException { } } }
"lock" is already in the path so no need for it in the node name imho
public Lock lockVespaServerPool() { return lock(root.append("locks").append("vespaServerPoolLock"), defaultLockTimeout); }
return lock(root.append("locks").append("vespaServerPoolLock"), defaultLockTimeout);
public Lock lockVespaServerPool() { return lock(root.append("locks").append("vespaServerPoolLock"), Duration.ofSeconds(1)); }
class CuratorDb { /** Use a nonstandard zk port to avoid interfering with connection to the config server zk cluster */ private static final int zooKeeperPort = 2281; private static final Logger log = Logger.getLogger(CuratorDb.class.getName()); private static final Path root = Path.fromString("/controller/v1"); private static final Duration defaultLockTimeout = Duration.ofMinutes(5); private final StringSetSerializer stringSetSerializer = new StringSetSerializer(); private final JobQueueSerializer jobQueueSerializer = new JobQueueSerializer(); @SuppressWarnings("unused") private final ZooKeeperServer zooKeeperServer; private final Curator curator; /** * All keys, to allow reentrancy. * This will grow forever, but this should be too slow to be a problem. */ private final ConcurrentHashMap<Path, Lock> locks = new ConcurrentHashMap<>(); /** Create a curator db which also set up a ZooKeeper server (such that this instance is both client and server) */ @Inject public CuratorDb(ClusterInfoConfig clusterInfo) { this.zooKeeperServer = new ZooKeeperServer(toZookeeperServerConfig(clusterInfo)); this.curator = new Curator(toConnectionSpec(clusterInfo)); } /** Create a curator db which does not set up a server, using the given Curator instance */ protected CuratorDb(Curator curator) { this.zooKeeperServer = null; this.curator = curator; } private static ZookeeperServerConfig toZookeeperServerConfig(ClusterInfoConfig clusterInfo) { ZookeeperServerConfig.Builder b = new ZookeeperServerConfig.Builder(); b.zooKeeperConfigFile("conf/zookeeper/controller-zookeeper.cfg"); b.dataDir("var/controller-zookeeper"); b.clientPort(zooKeeperPort); b.myidFile("var/controller-zookeeper/myid"); b.myid(myIndex(clusterInfo)); for (ClusterInfoConfig.Services clusterMember : clusterInfo.services()) { ZookeeperServerConfig.Server.Builder server = new ZookeeperServerConfig.Server.Builder(); server.id(clusterMember.index()); server.hostname(clusterMember.hostname()); server.quorumPort(zooKeeperPort + 1); server.electionPort(zooKeeperPort + 2); b.server(server); } return new ZookeeperServerConfig(b); } private static Integer myIndex(ClusterInfoConfig clusterInfo) { String hostname = HostName.getLocalhost(); return clusterInfo.services().stream() .filter(service -> service.hostname().equals(hostname)) .map(ClusterInfoConfig.Services::index) .findFirst() .orElseThrow(() -> new IllegalStateException("Unable to find index for this node by hostname '" + hostname + "'")); } private static String toConnectionSpec(ClusterInfoConfig clusterInfo) { return clusterInfo.services().stream() .map(member -> member.hostname() + ":" + zooKeeperPort) .collect(Collectors.joining(",")); } public Lock lock(TenantId id, Duration timeout) { return lock(lockPath(id), timeout); } public Lock lock(ApplicationId id, Duration timeout) { return lock(lockPath(id), timeout); } /** Create a reentrant lock */ private Lock lock(Path path, Duration timeout) { Lock lock = locks.computeIfAbsent(path, (pathArg) -> new Lock(pathArg.getAbsolute(), curator)); lock.acquire(timeout); return lock; } public Lock lockInactiveJobs() { return lock(root.append("locks").append("inactiveJobsLock"), defaultLockTimeout); } public Lock lockJobQueues() { return lock(root.append("locks").append("jobQueuesLock"), defaultLockTimeout); } public Lock lockMaintenanceJob(String jobName) { return lock(root.append("locks").append("maintenanceJobLocks").append(jobName), Duration.ofSeconds(1)); } public Lock lockProvisionState(String provisionStateId) { return lock(lockPath(provisionStateId), Duration.ofMinutes(30)); } public Lock lockOpenStackServerPool() { return lock(root.append("locks").append("openStackServerPoolLock"), defaultLockTimeout); } public Set<String> readInactiveJobs() { try { Optional<byte[]> data = curator.getData(inactiveJobsPath()); if (! data.isPresent() || data.get().length == 0) return new HashSet<>(); return stringSetSerializer.fromJson(data.get()); } catch (RuntimeException e) { log.log(Level.WARNING, "Error reading inactive jobs, deleting inactive state"); writeInactiveJobs(Collections.emptySet()); return new HashSet<>(); } } public void writeInactiveJobs(Set<String> inactiveJobs) { NestedTransaction transaction = new NestedTransaction(); curator.set(inactiveJobsPath(), stringSetSerializer.toJson(inactiveJobs)); transaction.commit(); } public Deque<ApplicationId> readJobQueue(DeploymentJobs.JobType jobType) { try { Optional<byte[]> data = curator.getData(jobQueuePath(jobType)); if (! data.isPresent() || data.get().length == 0) return new ArrayDeque<>(); return jobQueueSerializer.fromJson(data.get()); } catch (RuntimeException e) { log.log(Level.WARNING, "Error reading job queue, deleting inactive state"); writeInactiveJobs(Collections.emptySet()); return new ArrayDeque<>(); } } public void writeJobQueue(DeploymentJobs.JobType jobType, Deque<ApplicationId> queue) { NestedTransaction transaction = new NestedTransaction(); curator.set(jobQueuePath(jobType), jobQueueSerializer.toJson(queue)); transaction.commit(); } public double readUpgradesPerMinute() { Optional<byte[]> n = curator.getData(upgradesPerMinutePath()); if (!n.isPresent() || n.get().length == 0) { return 0.5; } return ByteBuffer.wrap(n.get()).getDouble(); } public void writeUpgradesPerMinute(double n) { if (n < 0) { throw new IllegalArgumentException("Upgrades per minute must be >= 0"); } NestedTransaction transaction = new NestedTransaction(); curator.set(upgradesPerMinutePath(), ByteBuffer.allocate(Double.BYTES).putDouble(n).array()); transaction.commit(); } public void writeVersionStatus(VersionStatus status) { VersionStatusSerializer serializer = new VersionStatusSerializer(); NestedTransaction transaction = new NestedTransaction(); try { if (curator.getData(systemVersionPath()).isPresent()) { curator.delete(systemVersionPath()); } curator.set(versionStatusPath(), SlimeUtils.toJsonBytes(serializer.toSlime(status))); } catch (IOException e) { throw new UncheckedIOException("Failed to serialize version status", e); } transaction.commit(); } public VersionStatus readVersionStatus() { Optional<byte[]> data = curator.getData(versionStatusPath()); if (!data.isPresent() || data.get().length == 0) { return VersionStatus.empty(); } VersionStatusSerializer serializer = new VersionStatusSerializer(); return serializer.fromSlime(SlimeUtils.jsonToSlime(data.get())); } public Optional<byte[]> readProvisionState(String provisionId) { return curator.getData(provisionStatePath(provisionId)); } public void writeProvisionState(String provisionId, byte[] data) { curator.set(provisionStatePath(provisionId), data); } public List<String> readProvisionStateIds() { return curator.getChildren(provisionStatePath()); } public Optional<byte[]> readVespaServerPool() { return curator.getData(vespaServerPoolPath()); } public void writeVespaServerPool(byte[] data) { curator.set(vespaServerPoolPath(), data); } public Optional<byte[]> readOpenStackServerPool() { return curator.getData(openStackServerPoolPath()); } public void writeOpenStackServerPool(byte[] data) { curator.set(openStackServerPoolPath(), data); } private Path systemVersionPath() { return root.append("systemVersion"); } private Path lockPath(TenantId tenant) { Path lockPath = root.append("locks") .append(tenant.id()); curator.create(lockPath); return lockPath; } private Path lockPath(ApplicationId application) { Path lockPath = root.append("locks") .append(application.tenant().value()) .append(application.application().value()) .append(application.instance().value()); curator.create(lockPath); return lockPath; } private Path lockPath(String provisionId) { Path lockPath = root.append("locks") .append(provisionStatePath()); curator.create(lockPath); return lockPath; } private Path inactiveJobsPath() { return root.append("inactiveJobs"); } private Path jobQueuePath(DeploymentJobs.JobType jobType) { return root.append("jobQueues").append(jobType.name()); } private Path upgradesPerMinutePath() { return root.append("upgrader").append("upgradesPerMinute"); } private Path versionStatusPath() { return root.append("versionStatus"); } private Path provisionStatePath() { return root.append("provisioning").append("states"); } private Path provisionStatePath(String provisionId) { return provisionStatePath().append(provisionId); } private Path vespaServerPoolPath() { return root.append("vespaServerPool"); } private Path openStackServerPoolPath() { return root.append("openStackServerPool"); } }
class CuratorDb { /** Use a nonstandard zk port to avoid interfering with connection to the config server zk cluster */ private static final int zooKeeperPort = 2281; private static final Logger log = Logger.getLogger(CuratorDb.class.getName()); private static final Path root = Path.fromString("/controller/v1"); private static final Duration defaultLockTimeout = Duration.ofMinutes(5); private final StringSetSerializer stringSetSerializer = new StringSetSerializer(); private final JobQueueSerializer jobQueueSerializer = new JobQueueSerializer(); @SuppressWarnings("unused") private final ZooKeeperServer zooKeeperServer; private final Curator curator; /** * All keys, to allow reentrancy. * This will grow forever, but this should be too slow to be a problem. */ private final ConcurrentHashMap<Path, Lock> locks = new ConcurrentHashMap<>(); /** Create a curator db which also set up a ZooKeeper server (such that this instance is both client and server) */ @Inject public CuratorDb(ClusterInfoConfig clusterInfo) { this.zooKeeperServer = new ZooKeeperServer(toZookeeperServerConfig(clusterInfo)); this.curator = new Curator(toConnectionSpec(clusterInfo)); } /** Create a curator db which does not set up a server, using the given Curator instance */ protected CuratorDb(Curator curator) { this.zooKeeperServer = null; this.curator = curator; } private static ZookeeperServerConfig toZookeeperServerConfig(ClusterInfoConfig clusterInfo) { ZookeeperServerConfig.Builder b = new ZookeeperServerConfig.Builder(); b.zooKeeperConfigFile("conf/zookeeper/controller-zookeeper.cfg"); b.dataDir("var/controller-zookeeper"); b.clientPort(zooKeeperPort); b.myidFile("var/controller-zookeeper/myid"); b.myid(myIndex(clusterInfo)); for (ClusterInfoConfig.Services clusterMember : clusterInfo.services()) { ZookeeperServerConfig.Server.Builder server = new ZookeeperServerConfig.Server.Builder(); server.id(clusterMember.index()); server.hostname(clusterMember.hostname()); server.quorumPort(zooKeeperPort + 1); server.electionPort(zooKeeperPort + 2); b.server(server); } return new ZookeeperServerConfig(b); } private static Integer myIndex(ClusterInfoConfig clusterInfo) { String hostname = HostName.getLocalhost(); return clusterInfo.services().stream() .filter(service -> service.hostname().equals(hostname)) .map(ClusterInfoConfig.Services::index) .findFirst() .orElseThrow(() -> new IllegalStateException("Unable to find index for this node by hostname '" + hostname + "'")); } private static String toConnectionSpec(ClusterInfoConfig clusterInfo) { return clusterInfo.services().stream() .map(member -> member.hostname() + ":" + zooKeeperPort) .collect(Collectors.joining(",")); } public Lock lock(TenantId id, Duration timeout) { return lock(lockPath(id), timeout); } public Lock lock(ApplicationId id, Duration timeout) { return lock(lockPath(id), timeout); } /** Create a reentrant lock */ private Lock lock(Path path, Duration timeout) { Lock lock = locks.computeIfAbsent(path, (pathArg) -> new Lock(pathArg.getAbsolute(), curator)); lock.acquire(timeout); return lock; } public Lock lockInactiveJobs() { return lock(root.append("locks").append("inactiveJobsLock"), defaultLockTimeout); } public Lock lockJobQueues() { return lock(root.append("locks").append("jobQueuesLock"), defaultLockTimeout); } public Lock lockMaintenanceJob(String jobName) { return lock(root.append("locks").append("maintenanceJobLocks").append(jobName), Duration.ofSeconds(1)); } public Lock lockProvisionState(String provisionStateId) { return lock(lockPath(provisionStateId), Duration.ofSeconds(1)); } public Lock lockOpenStackServerPool() { return lock(root.append("locks").append("openStackServerPoolLock"), Duration.ofSeconds(1)); } public Set<String> readInactiveJobs() { try { Optional<byte[]> data = curator.getData(inactiveJobsPath()); if (! data.isPresent() || data.get().length == 0) return new HashSet<>(); return stringSetSerializer.fromJson(data.get()); } catch (RuntimeException e) { log.log(Level.WARNING, "Error reading inactive jobs, deleting inactive state"); writeInactiveJobs(Collections.emptySet()); return new HashSet<>(); } } public void writeInactiveJobs(Set<String> inactiveJobs) { NestedTransaction transaction = new NestedTransaction(); curator.set(inactiveJobsPath(), stringSetSerializer.toJson(inactiveJobs)); transaction.commit(); } public Deque<ApplicationId> readJobQueue(DeploymentJobs.JobType jobType) { try { Optional<byte[]> data = curator.getData(jobQueuePath(jobType)); if (! data.isPresent() || data.get().length == 0) return new ArrayDeque<>(); return jobQueueSerializer.fromJson(data.get()); } catch (RuntimeException e) { log.log(Level.WARNING, "Error reading job queue, deleting inactive state"); writeInactiveJobs(Collections.emptySet()); return new ArrayDeque<>(); } } public void writeJobQueue(DeploymentJobs.JobType jobType, Deque<ApplicationId> queue) { NestedTransaction transaction = new NestedTransaction(); curator.set(jobQueuePath(jobType), jobQueueSerializer.toJson(queue)); transaction.commit(); } public double readUpgradesPerMinute() { Optional<byte[]> n = curator.getData(upgradesPerMinutePath()); if (!n.isPresent() || n.get().length == 0) { return 0.5; } return ByteBuffer.wrap(n.get()).getDouble(); } public void writeUpgradesPerMinute(double n) { if (n < 0) { throw new IllegalArgumentException("Upgrades per minute must be >= 0"); } NestedTransaction transaction = new NestedTransaction(); curator.set(upgradesPerMinutePath(), ByteBuffer.allocate(Double.BYTES).putDouble(n).array()); transaction.commit(); } public void writeVersionStatus(VersionStatus status) { VersionStatusSerializer serializer = new VersionStatusSerializer(); NestedTransaction transaction = new NestedTransaction(); try { if (curator.getData(systemVersionPath()).isPresent()) { curator.delete(systemVersionPath()); } curator.set(versionStatusPath(), SlimeUtils.toJsonBytes(serializer.toSlime(status))); } catch (IOException e) { throw new UncheckedIOException("Failed to serialize version status", e); } transaction.commit(); } public VersionStatus readVersionStatus() { Optional<byte[]> data = curator.getData(versionStatusPath()); if (!data.isPresent() || data.get().length == 0) { return VersionStatus.empty(); } VersionStatusSerializer serializer = new VersionStatusSerializer(); return serializer.fromSlime(SlimeUtils.jsonToSlime(data.get())); } public Optional<byte[]> readProvisionState(String provisionId) { return curator.getData(provisionStatePath(provisionId)); } public void writeProvisionState(String provisionId, byte[] data) { curator.set(provisionStatePath(provisionId), data); } public List<String> readProvisionStateIds() { return curator.getChildren(provisionStatePath()); } public Optional<byte[]> readVespaServerPool() { return curator.getData(vespaServerPoolPath()); } public void writeVespaServerPool(byte[] data) { curator.set(vespaServerPoolPath(), data); } public Optional<byte[]> readOpenStackServerPool() { return curator.getData(openStackServerPoolPath()); } public void writeOpenStackServerPool(byte[] data) { curator.set(openStackServerPoolPath(), data); } private Path systemVersionPath() { return root.append("systemVersion"); } private Path lockPath(TenantId tenant) { Path lockPath = root.append("locks") .append(tenant.id()); curator.create(lockPath); return lockPath; } private Path lockPath(ApplicationId application) { Path lockPath = root.append("locks") .append(application.tenant().value()) .append(application.application().value()) .append(application.instance().value()); curator.create(lockPath); return lockPath; } private Path lockPath(String provisionId) { Path lockPath = root.append("locks") .append(provisionStatePath()); curator.create(lockPath); return lockPath; } private Path inactiveJobsPath() { return root.append("inactiveJobs"); } private Path jobQueuePath(DeploymentJobs.JobType jobType) { return root.append("jobQueues").append(jobType.name()); } private Path upgradesPerMinutePath() { return root.append("upgrader").append("upgradesPerMinute"); } private Path versionStatusPath() { return root.append("versionStatus"); } private Path provisionStatePath() { return root.append("provisioning").append("states"); } private Path provisionStatePath(String provisionId) { return provisionStatePath().append(provisionId); } private Path vespaServerPoolPath() { return root.append("vespaServerPool"); } private Path openStackServerPoolPath() { return root.append("openStackServerPool"); } }
@gastaldi How does it look now ? (FYI, I've dropped a `dynamicTenantsConfig` map check as it is initialized in a `PostConstruct` method)
private TenantConfigContext getTenantConfigFromTenantResolver(RoutingContext context) { Assert.assertNotNull(staticTenantsConfig); String tenantId = null; if (tenantResolver.isResolvable()) { tenantId = tenantResolver.get().resolve(context); } TenantConfigContext configContext = tenantId != null ? staticTenantsConfig.get(tenantId) : null; if (configContext == null) { if (tenantId != null && !tenantId.isEmpty()) { LOG.debugf("No configuration with a tenant id '%s' has been found, using the default configuration"); } configContext = defaultTenant; } return configContext; }
Assert.assertNotNull(staticTenantsConfig);
private TenantConfigContext getTenantConfigFromTenantResolver(RoutingContext context) { if (staticTenantsConfig == null) { throw new IllegalStateException("staticTenantsConfig is null"); } String tenantId = null; if (tenantResolver.isResolvable()) { tenantId = tenantResolver.get().resolve(context); } TenantConfigContext configContext = tenantId != null ? staticTenantsConfig.get(tenantId) : null; if (configContext == null) { if (tenantId != null && !tenantId.isEmpty()) { LOG.debugf("No configuration with a tenant id '%s' has been found, using the default configuration"); } configContext = defaultTenant; } return configContext; }
class + " beans registered"); } } /** * Resolve {@linkplain TenantConfigContext}
class + " beans registered"); } } /** * Resolve {@linkplain TenantConfigContext}