comment stringlengths 22 3.02k | method_body stringlengths 46 368k | target_code stringlengths 0 181 | method_body_after stringlengths 12 368k | context_before stringlengths 11 634k | context_after stringlengths 11 632k |
|---|---|---|---|---|---|
Do we need this short timeout? We already have a TIMEOUT constant defined that can be reused. You can bump the seconds up if you need. https://github.com/Azure/azure-sdk-for-java/blob/0902c492de42ed25164e22fc55ec388041ca12df/sdk/servicebus/azure-messaging-servicebus/src/test/java/com/azure/messaging/servicebus/ServiceBusSessionManagerTest.java#L69 | void singleUnnamedSessionCleanupAfterTimeout() {
Duration shortTimeout = Duration.ofSeconds(15);
ReceiverOptions receiverOptions = new ReceiverOptions(ServiceBusReceiveMode.PEEK_LOCK, 1, MAX_LOCK_RENEWAL, false, null,
2);
sessionManager = new ServiceBusSessionManager(ENTITY_PATH, ENTITY_TYPE, connectionProcessor,
tracerProvider, messageSerializer, receiverOptions);
final String sessionId = "session-1";
final String lockToken = "a-lock-token";
final String linkName = "my-link-name";
final OffsetDateTime sessionLockedUntil = OffsetDateTime.now().plus(Duration.ofSeconds(30));
final Message message = mock(Message.class);
final ServiceBusReceivedMessage receivedMessage = mock(ServiceBusReceivedMessage.class);
when(messageSerializer.deserialize(message, ServiceBusReceivedMessage.class)).thenReturn(receivedMessage);
when(receivedMessage.getSessionId()).thenReturn(sessionId);
when(receivedMessage.getLockToken()).thenReturn(lockToken);
when(amqpReceiveLink.getLinkName()).thenReturn(linkName);
when(amqpReceiveLink.getSessionId()).thenReturn(Mono.just(sessionId));
when(amqpReceiveLink.getSessionLockedUntil())
.thenAnswer(invocation -> Mono.just(sessionLockedUntil));
when(connection.createReceiveLink(anyString(), eq(ENTITY_PATH), any(ServiceBusReceiveMode.class), isNull(),
any(MessagingEntityType.class), isNull())).thenReturn(Mono.just(amqpReceiveLink));
StepVerifier.create(sessionManager.receive())
.then(() -> {
messageSink.next(message);
})
.assertNext(context -> {
assertMessageEquals(sessionId, receivedMessage, context);
})
.then(() -> {
try {
assertNotNull(sessionManager.getLinkName(sessionId));
TimeUnit.SECONDS.sleep(TIMEOUT.getSeconds());
assertNull(sessionManager.getLinkName(sessionId));
} catch (InterruptedException e) { }
})
.thenCancel()
.verify(shortTimeout);
} | Duration shortTimeout = Duration.ofSeconds(15); | void singleUnnamedSessionCleanupAfterTimeout() {
ReceiverOptions receiverOptions = new ReceiverOptions(ServiceBusReceiveMode.PEEK_LOCK, 1, MAX_LOCK_RENEWAL, false, null,
2);
sessionManager = new ServiceBusSessionManager(ENTITY_PATH, ENTITY_TYPE, connectionProcessor,
tracerProvider, messageSerializer, receiverOptions);
final String sessionId = "session-1";
final String lockToken = "a-lock-token";
final String linkName = "my-link-name";
final OffsetDateTime sessionLockedUntil = OffsetDateTime.now().plus(Duration.ofSeconds(30));
final Message message = mock(Message.class);
final ServiceBusReceivedMessage receivedMessage = mock(ServiceBusReceivedMessage.class);
when(messageSerializer.deserialize(message, ServiceBusReceivedMessage.class)).thenReturn(receivedMessage);
when(receivedMessage.getSessionId()).thenReturn(sessionId);
when(receivedMessage.getLockToken()).thenReturn(lockToken);
when(amqpReceiveLink.getLinkName()).thenReturn(linkName);
when(amqpReceiveLink.getSessionId()).thenReturn(Mono.just(sessionId));
when(amqpReceiveLink.getSessionLockedUntil())
.thenAnswer(invocation -> Mono.just(sessionLockedUntil));
when(connection.createReceiveLink(anyString(), eq(ENTITY_PATH), any(ServiceBusReceiveMode.class), isNull(),
any(MessagingEntityType.class), isNull())).thenReturn(Mono.just(amqpReceiveLink));
StepVerifier.create(sessionManager.receive())
.then(() -> {
messageSink.next(message);
})
.assertNext(context -> {
assertMessageEquals(sessionId, receivedMessage, context);
})
.then(() -> {
try {
assertNotNull(sessionManager.getLinkName(sessionId));
TimeUnit.SECONDS.sleep(TIMEOUT.getSeconds());
assertNull(sessionManager.getLinkName(sessionId));
} catch (InterruptedException e) { }
})
.thenCancel()
.verify(TIMEOUT);
} | class ServiceBusSessionManagerTest {
private static final ClientOptions CLIENT_OPTIONS = new ClientOptions();
private static final Duration TIMEOUT = Duration.ofSeconds(10);
private static final Duration MAX_LOCK_RENEWAL = Duration.ofSeconds(5);
private static final String NAMESPACE = "my-namespace-foo.net";
private static final String ENTITY_PATH = "queue-name";
private static final MessagingEntityType ENTITY_TYPE = MessagingEntityType.QUEUE;
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClientTest.class);
private final ReplayProcessor<AmqpEndpointState> endpointProcessor = ReplayProcessor.cacheLast();
private final FluxSink<AmqpEndpointState> endpointSink = endpointProcessor.sink(FluxSink.OverflowStrategy.BUFFER);
private final EmitterProcessor<Message> messageProcessor = EmitterProcessor.create();
private final FluxSink<Message> messageSink = messageProcessor.sink(FluxSink.OverflowStrategy.BUFFER);
private final TracerProvider tracerProvider = new TracerProvider(Collections.emptyList());
private ServiceBusConnectionProcessor connectionProcessor;
private ServiceBusSessionManager sessionManager;
@Mock
private ServiceBusReceiveLink amqpReceiveLink;
@Mock
private ServiceBusAmqpConnection connection;
@Mock
private TokenCredential tokenCredential;
@Mock
private MessageSerializer messageSerializer;
@Mock
private ServiceBusManagementNode managementNode;
@Captor
private ArgumentCaptor<String> linkNameCaptor;
@BeforeAll
static void beforeAll() {
StepVerifier.setDefaultTimeout(Duration.ofSeconds(60));
}
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
@BeforeEach
void beforeEach(TestInfo testInfo) {
logger.info("===== [{}] Setting up. =====", testInfo.getDisplayName());
MockitoAnnotations.initMocks(this);
when(amqpReceiveLink.receive()).thenReturn(messageProcessor.publishOn(Schedulers.single()));
when(amqpReceiveLink.getHostname()).thenReturn(NAMESPACE);
when(amqpReceiveLink.getEntityPath()).thenReturn(ENTITY_PATH);
when(amqpReceiveLink.getEndpointStates()).thenReturn(endpointProcessor);
ConnectionOptions connectionOptions = new ConnectionOptions(NAMESPACE, tokenCredential,
CbsAuthorizationType.SHARED_ACCESS_SIGNATURE, AmqpTransportType.AMQP,
new AmqpRetryOptions().setTryTimeout(TIMEOUT), ProxyOptions.SYSTEM_DEFAULTS, Schedulers.boundedElastic(),
CLIENT_OPTIONS, SslDomain.VerifyMode.VERIFY_PEER_NAME);
when(connection.getEndpointStates()).thenReturn(endpointProcessor);
endpointSink.next(AmqpEndpointState.ACTIVE);
when(connection.getManagementNode(ENTITY_PATH, ENTITY_TYPE))
.thenReturn(Mono.just(managementNode));
connectionProcessor =
Flux.<ServiceBusAmqpConnection>create(sink -> sink.next(connection))
.subscribeWith(new ServiceBusConnectionProcessor(connectionOptions.getFullyQualifiedNamespace(),
connectionOptions.getRetry()));
}
@AfterEach
void afterEach(TestInfo testInfo) {
logger.info("===== [{}] Tearing down. =====", testInfo.getDisplayName());
if (sessionManager != null) {
sessionManager.close();
}
if (connectionProcessor != null) {
connectionProcessor.dispose();
}
Mockito.framework().clearInlineMocks();
}
@Test
void receiveNull() {
ReceiverOptions receiverOptions = new ReceiverOptions(ServiceBusReceiveMode.PEEK_LOCK, 1, MAX_LOCK_RENEWAL, false, null, 5);
sessionManager = new ServiceBusSessionManager(ENTITY_PATH, ENTITY_TYPE, connectionProcessor,
tracerProvider, messageSerializer, receiverOptions);
StepVerifier.create(sessionManager.receive())
.expectError(NullPointerException.class)
.verify();
}
/**
* Verify that when we receive for a single, unnamed session, when no more items are emitted, it completes.
*/
@Test
void singleUnnamedSession() {
ReceiverOptions receiverOptions = new ReceiverOptions(ServiceBusReceiveMode.PEEK_LOCK, 1, MAX_LOCK_RENEWAL, false, null,
5);
sessionManager = new ServiceBusSessionManager(ENTITY_PATH, ENTITY_TYPE, connectionProcessor,
tracerProvider, messageSerializer, receiverOptions);
final String sessionId = "session-1";
final String lockToken = "a-lock-token";
final String linkName = "my-link-name";
final OffsetDateTime sessionLockedUntil = OffsetDateTime.now().plus(Duration.ofSeconds(30));
final Message message = mock(Message.class);
final ServiceBusReceivedMessage receivedMessage = mock(ServiceBusReceivedMessage.class);
when(messageSerializer.deserialize(message, ServiceBusReceivedMessage.class)).thenReturn(receivedMessage);
when(receivedMessage.getSessionId()).thenReturn(sessionId);
when(receivedMessage.getLockToken()).thenReturn(lockToken);
final int numberOfMessages = 5;
when(amqpReceiveLink.getLinkName()).thenReturn(linkName);
when(amqpReceiveLink.getSessionId()).thenReturn(Mono.just(sessionId));
when(amqpReceiveLink.getSessionLockedUntil())
.thenAnswer(invocation -> Mono.just(sessionLockedUntil));
when(amqpReceiveLink.updateDisposition(lockToken, Accepted.getInstance())).thenReturn(Mono.empty());
when(connection.createReceiveLink(anyString(), eq(ENTITY_PATH), any(ServiceBusReceiveMode.class), isNull(),
any(MessagingEntityType.class), isNull())).thenReturn(Mono.just(amqpReceiveLink));
when(managementNode.renewSessionLock(sessionId, linkName)).thenReturn(
Mono.fromCallable(() -> OffsetDateTime.now().plus(Duration.ofSeconds(5))));
StepVerifier.create(sessionManager.receive())
.then(() -> {
for (int i = 0; i < numberOfMessages; i++) {
messageSink.next(message);
}
})
.assertNext(context -> assertMessageEquals(sessionId, receivedMessage, context))
.assertNext(context -> assertMessageEquals(sessionId, receivedMessage, context))
.assertNext(context -> assertMessageEquals(sessionId, receivedMessage, context))
.assertNext(context -> assertMessageEquals(sessionId, receivedMessage, context))
.assertNext(context -> assertMessageEquals(sessionId, receivedMessage, context))
.thenCancel()
.verify(Duration.ofSeconds(45));
}
/**
* Verify that when we receive multiple sessions, it'll change to the next session when one is complete.
*/
@Test
void multipleSessions() {
final ReceiverOptions receiverOptions = new ReceiverOptions(ServiceBusReceiveMode.PEEK_LOCK, 1, MAX_LOCK_RENEWAL, true,
null, 5);
sessionManager = new ServiceBusSessionManager(ENTITY_PATH, ENTITY_TYPE, connectionProcessor,
tracerProvider, messageSerializer, receiverOptions);
final int numberOfMessages = 5;
final Callable<OffsetDateTime> onRenewal = () -> OffsetDateTime.now().plus(Duration.ofSeconds(5));
final String sessionId = "session-1";
final String lockToken = "a-lock-token";
final String linkName = "my-link-name";
final Message message = mock(Message.class);
final ServiceBusReceivedMessage receivedMessage = mock(ServiceBusReceivedMessage.class);
when(receivedMessage.getSessionId()).thenReturn(sessionId);
when(receivedMessage.getLockToken()).thenReturn(lockToken);
when(amqpReceiveLink.getLinkName()).thenReturn(linkName);
when(amqpReceiveLink.getSessionId()).thenReturn(Mono.just(sessionId));
when(amqpReceiveLink.getSessionLockedUntil()).thenReturn(Mono.fromCallable(onRenewal));
when(amqpReceiveLink.updateDisposition(lockToken, Accepted.getInstance())).thenReturn(Mono.empty());
final ServiceBusReceiveLink amqpReceiveLink2 = mock(ServiceBusReceiveLink.class);
final Message message2 = mock(Message.class);
final ServiceBusReceivedMessage receivedMessage2 = mock(ServiceBusReceivedMessage.class);
final String sessionId2 = "session-2";
final String lockToken2 = "a-lock-token-2";
final String linkName2 = "my-link-name-2";
final TestPublisher<Message> messagePublisher2 = TestPublisher.create();
final Flux<Message> messageFlux2 = messagePublisher2.flux();
when(receivedMessage2.getSessionId()).thenReturn(sessionId2);
when(receivedMessage2.getLockToken()).thenReturn(lockToken2);
when(amqpReceiveLink2.receive()).thenReturn(messageFlux2);
when(amqpReceiveLink2.getHostname()).thenReturn(NAMESPACE);
when(amqpReceiveLink2.getEntityPath()).thenReturn(ENTITY_PATH);
when(amqpReceiveLink2.getEndpointStates()).thenReturn(endpointProcessor);
when(amqpReceiveLink2.getLinkName()).thenReturn(linkName2);
when(amqpReceiveLink2.getSessionId()).thenReturn(Mono.just(sessionId2));
when(amqpReceiveLink2.getSessionLockedUntil()).thenReturn(Mono.fromCallable(onRenewal));
when(amqpReceiveLink2.updateDisposition(lockToken2, Accepted.getInstance())).thenReturn(Mono.empty());
final AtomicInteger count = new AtomicInteger();
when(connection.createReceiveLink(anyString(), eq(ENTITY_PATH), any(ServiceBusReceiveMode.class), isNull(),
any(MessagingEntityType.class), isNull())).thenAnswer(invocation -> {
final int number = count.getAndIncrement();
switch (number) {
case 0:
return Mono.just(amqpReceiveLink);
case 1:
return Mono.just(amqpReceiveLink2);
default:
return Mono.empty();
}
});
when(messageSerializer.deserialize(message, ServiceBusReceivedMessage.class)).thenReturn(receivedMessage);
when(messageSerializer.deserialize(message2, ServiceBusReceivedMessage.class)).thenReturn(receivedMessage2);
when(managementNode.renewSessionLock(sessionId, linkName)).thenReturn(Mono.fromCallable(onRenewal));
when(managementNode.renewSessionLock(sessionId2, linkName2)).thenReturn(Mono.fromCallable(onRenewal));
StepVerifier.create(sessionManager.receive())
.then(() -> {
for (int i = 0; i < numberOfMessages; i++) {
messageSink.next(message);
}
})
.assertNext(context -> {
System.out.println("1");
assertMessageEquals(sessionId, receivedMessage, context);
})
.assertNext(context -> {
System.out.println("2");
assertMessageEquals(sessionId, receivedMessage, context);
})
.assertNext(context -> {
System.out.println("3");
assertMessageEquals(sessionId, receivedMessage, context);
})
.assertNext(context -> {
System.out.println("4");
assertMessageEquals(sessionId, receivedMessage, context);
})
.assertNext(context -> {
System.out.println("5");
assertMessageEquals(sessionId, receivedMessage, context);
})
.thenAwait(Duration.ofSeconds(13))
.then(() -> {
for (int i = 0; i < 3; i++) {
messagePublisher2.next(message2);
}
})
.assertNext(context -> {
System.out.println("6");
assertMessageEquals(sessionId2, receivedMessage2, context);
})
.assertNext(context -> {
System.out.println("7");
assertMessageEquals(sessionId2, receivedMessage2, context);
})
.assertNext(context -> {
System.out.println("8");
assertMessageEquals(sessionId2, receivedMessage2, context);
})
.thenAwait(Duration.ofSeconds(15))
.thenCancel()
.verify();
}
/**
* Verify that when we can call multiple receive, it'll create a new link.
*/
@Test
void multipleReceiveUnnamedSession() {
final int expectedLinksCreated = 2;
final Callable<OffsetDateTime> onRenewal = () -> OffsetDateTime.now().plus(Duration.ofSeconds(5));
final ReceiverOptions receiverOptions = new ReceiverOptions(ServiceBusReceiveMode.PEEK_LOCK, 1, Duration.ZERO, false,
null, 1);
sessionManager = new ServiceBusSessionManager(ENTITY_PATH, ENTITY_TYPE, connectionProcessor,
tracerProvider, messageSerializer, receiverOptions);
final String sessionId = "session-1";
final String linkName = "my-link-name";
when(amqpReceiveLink.getLinkName()).thenReturn(linkName);
when(amqpReceiveLink.getSessionId()).thenReturn(Mono.just(sessionId));
when(amqpReceiveLink.getSessionLockedUntil()).thenReturn(Mono.fromCallable(onRenewal));
final ServiceBusReceiveLink amqpReceiveLink2 = mock(ServiceBusReceiveLink.class);
final String sessionId2 = "session-2";
final String linkName2 = "my-link-name-2";
final TestPublisher<Message> messagePublisher2 = TestPublisher.create();
final Flux<Message> messageFlux2 = messagePublisher2.flux();
when(amqpReceiveLink2.receive()).thenReturn(messageFlux2);
when(amqpReceiveLink2.getHostname()).thenReturn(NAMESPACE);
when(amqpReceiveLink2.getEntityPath()).thenReturn(ENTITY_PATH);
when(amqpReceiveLink2.getEndpointStates()).thenReturn(endpointProcessor);
when(amqpReceiveLink2.getLinkName()).thenReturn(linkName2);
when(amqpReceiveLink2.getSessionId()).thenReturn(Mono.just(sessionId2));
when(amqpReceiveLink2.getSessionLockedUntil()).thenReturn(Mono.fromCallable(onRenewal));
final AtomicInteger count = new AtomicInteger();
when(connection.createReceiveLink(anyString(), eq(ENTITY_PATH), any(ServiceBusReceiveMode.class), isNull(),
any(MessagingEntityType.class), isNull())).thenAnswer(invocation -> {
final int number = count.getAndIncrement();
switch (number) {
case 0:
return Mono.just(amqpReceiveLink);
case 1:
return Mono.just(amqpReceiveLink2);
default:
return Mono.empty();
}
});
StepVerifier.create(sessionManager.receive())
.thenAwait(Duration.ofSeconds(5))
.thenCancel()
.verify();
StepVerifier.create(sessionManager.receive())
.thenAwait(Duration.ofSeconds(5))
.thenCancel()
.verify();
verify(connection, times(2)).createReceiveLink(linkNameCaptor.capture(), eq(ENTITY_PATH), any(
ServiceBusReceiveMode.class), isNull(),
any(MessagingEntityType.class), isNull());
final List<String> actualLinksCreated = linkNameCaptor.getAllValues();
assertNotNull(actualLinksCreated);
assertEquals(expectedLinksCreated, actualLinksCreated.size());
assertFalse(actualLinksCreated.get(0).equalsIgnoreCase(actualLinksCreated.get(1)));
}
/**
* Validate that session-id specific session receiver is removed after {@link AmqpRetryOptions
*/
@Test
private static void assertMessageEquals(String sessionId, ServiceBusReceivedMessage expected,
ServiceBusMessageContext actual) {
assertEquals(sessionId, actual.getSessionId());
assertNull(actual.getThrowable());
assertEquals(expected, actual.getMessage());
}
} | class ServiceBusSessionManagerTest {
private static final ClientOptions CLIENT_OPTIONS = new ClientOptions();
private static final Duration TIMEOUT = Duration.ofSeconds(10);
private static final Duration MAX_LOCK_RENEWAL = Duration.ofSeconds(5);
private static final String NAMESPACE = "my-namespace-foo.net";
private static final String ENTITY_PATH = "queue-name";
private static final MessagingEntityType ENTITY_TYPE = MessagingEntityType.QUEUE;
private final ClientLogger logger = new ClientLogger(ServiceBusReceiverAsyncClientTest.class);
private final ReplayProcessor<AmqpEndpointState> endpointProcessor = ReplayProcessor.cacheLast();
private final FluxSink<AmqpEndpointState> endpointSink = endpointProcessor.sink(FluxSink.OverflowStrategy.BUFFER);
private final EmitterProcessor<Message> messageProcessor = EmitterProcessor.create();
private final FluxSink<Message> messageSink = messageProcessor.sink(FluxSink.OverflowStrategy.BUFFER);
private final TracerProvider tracerProvider = new TracerProvider(Collections.emptyList());
private ServiceBusConnectionProcessor connectionProcessor;
private ServiceBusSessionManager sessionManager;
@Mock
private ServiceBusReceiveLink amqpReceiveLink;
@Mock
private ServiceBusAmqpConnection connection;
@Mock
private TokenCredential tokenCredential;
@Mock
private MessageSerializer messageSerializer;
@Mock
private ServiceBusManagementNode managementNode;
@Captor
private ArgumentCaptor<String> linkNameCaptor;
@BeforeAll
static void beforeAll() {
StepVerifier.setDefaultTimeout(Duration.ofSeconds(60));
}
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
@BeforeEach
void beforeEach(TestInfo testInfo) {
logger.info("===== [{}] Setting up. =====", testInfo.getDisplayName());
MockitoAnnotations.initMocks(this);
when(amqpReceiveLink.receive()).thenReturn(messageProcessor.publishOn(Schedulers.single()));
when(amqpReceiveLink.getHostname()).thenReturn(NAMESPACE);
when(amqpReceiveLink.getEntityPath()).thenReturn(ENTITY_PATH);
when(amqpReceiveLink.getEndpointStates()).thenReturn(endpointProcessor);
ConnectionOptions connectionOptions = new ConnectionOptions(NAMESPACE, tokenCredential,
CbsAuthorizationType.SHARED_ACCESS_SIGNATURE, AmqpTransportType.AMQP,
new AmqpRetryOptions().setTryTimeout(TIMEOUT), ProxyOptions.SYSTEM_DEFAULTS, Schedulers.boundedElastic(),
CLIENT_OPTIONS, SslDomain.VerifyMode.VERIFY_PEER_NAME);
when(connection.getEndpointStates()).thenReturn(endpointProcessor);
endpointSink.next(AmqpEndpointState.ACTIVE);
when(connection.getManagementNode(ENTITY_PATH, ENTITY_TYPE))
.thenReturn(Mono.just(managementNode));
connectionProcessor =
Flux.<ServiceBusAmqpConnection>create(sink -> sink.next(connection))
.subscribeWith(new ServiceBusConnectionProcessor(connectionOptions.getFullyQualifiedNamespace(),
connectionOptions.getRetry()));
}
@AfterEach
void afterEach(TestInfo testInfo) {
logger.info("===== [{}] Tearing down. =====", testInfo.getDisplayName());
if (sessionManager != null) {
sessionManager.close();
}
if (connectionProcessor != null) {
connectionProcessor.dispose();
}
Mockito.framework().clearInlineMocks();
}
@Test
void receiveNull() {
ReceiverOptions receiverOptions = new ReceiverOptions(ServiceBusReceiveMode.PEEK_LOCK, 1, MAX_LOCK_RENEWAL, false, null, 5);
sessionManager = new ServiceBusSessionManager(ENTITY_PATH, ENTITY_TYPE, connectionProcessor,
tracerProvider, messageSerializer, receiverOptions);
StepVerifier.create(sessionManager.receive())
.expectError(NullPointerException.class)
.verify();
}
/**
* Verify that when we receive for a single, unnamed session, when no more items are emitted, it completes.
*/
@Test
void singleUnnamedSession() {
ReceiverOptions receiverOptions = new ReceiverOptions(ServiceBusReceiveMode.PEEK_LOCK, 1, MAX_LOCK_RENEWAL, false, null,
5);
sessionManager = new ServiceBusSessionManager(ENTITY_PATH, ENTITY_TYPE, connectionProcessor,
tracerProvider, messageSerializer, receiverOptions);
final String sessionId = "session-1";
final String lockToken = "a-lock-token";
final String linkName = "my-link-name";
final OffsetDateTime sessionLockedUntil = OffsetDateTime.now().plus(Duration.ofSeconds(30));
final Message message = mock(Message.class);
final ServiceBusReceivedMessage receivedMessage = mock(ServiceBusReceivedMessage.class);
when(messageSerializer.deserialize(message, ServiceBusReceivedMessage.class)).thenReturn(receivedMessage);
when(receivedMessage.getSessionId()).thenReturn(sessionId);
when(receivedMessage.getLockToken()).thenReturn(lockToken);
final int numberOfMessages = 5;
when(amqpReceiveLink.getLinkName()).thenReturn(linkName);
when(amqpReceiveLink.getSessionId()).thenReturn(Mono.just(sessionId));
when(amqpReceiveLink.getSessionLockedUntil())
.thenAnswer(invocation -> Mono.just(sessionLockedUntil));
when(amqpReceiveLink.updateDisposition(lockToken, Accepted.getInstance())).thenReturn(Mono.empty());
when(connection.createReceiveLink(anyString(), eq(ENTITY_PATH), any(ServiceBusReceiveMode.class), isNull(),
any(MessagingEntityType.class), isNull())).thenReturn(Mono.just(amqpReceiveLink));
when(managementNode.renewSessionLock(sessionId, linkName)).thenReturn(
Mono.fromCallable(() -> OffsetDateTime.now().plus(Duration.ofSeconds(5))));
StepVerifier.create(sessionManager.receive())
.then(() -> {
for (int i = 0; i < numberOfMessages; i++) {
messageSink.next(message);
}
})
.assertNext(context -> assertMessageEquals(sessionId, receivedMessage, context))
.assertNext(context -> assertMessageEquals(sessionId, receivedMessage, context))
.assertNext(context -> assertMessageEquals(sessionId, receivedMessage, context))
.assertNext(context -> assertMessageEquals(sessionId, receivedMessage, context))
.assertNext(context -> assertMessageEquals(sessionId, receivedMessage, context))
.thenCancel()
.verify(Duration.ofSeconds(45));
}
/**
* Verify that when we receive multiple sessions, it'll change to the next session when one is complete.
*/
@Test
void multipleSessions() {
final ReceiverOptions receiverOptions = new ReceiverOptions(ServiceBusReceiveMode.PEEK_LOCK, 1, MAX_LOCK_RENEWAL, true,
null, 5);
sessionManager = new ServiceBusSessionManager(ENTITY_PATH, ENTITY_TYPE, connectionProcessor,
tracerProvider, messageSerializer, receiverOptions);
final int numberOfMessages = 5;
final Callable<OffsetDateTime> onRenewal = () -> OffsetDateTime.now().plus(Duration.ofSeconds(5));
final String sessionId = "session-1";
final String lockToken = "a-lock-token";
final String linkName = "my-link-name";
final Message message = mock(Message.class);
final ServiceBusReceivedMessage receivedMessage = mock(ServiceBusReceivedMessage.class);
when(receivedMessage.getSessionId()).thenReturn(sessionId);
when(receivedMessage.getLockToken()).thenReturn(lockToken);
when(amqpReceiveLink.getLinkName()).thenReturn(linkName);
when(amqpReceiveLink.getSessionId()).thenReturn(Mono.just(sessionId));
when(amqpReceiveLink.getSessionLockedUntil()).thenReturn(Mono.fromCallable(onRenewal));
when(amqpReceiveLink.updateDisposition(lockToken, Accepted.getInstance())).thenReturn(Mono.empty());
final ServiceBusReceiveLink amqpReceiveLink2 = mock(ServiceBusReceiveLink.class);
final Message message2 = mock(Message.class);
final ServiceBusReceivedMessage receivedMessage2 = mock(ServiceBusReceivedMessage.class);
final String sessionId2 = "session-2";
final String lockToken2 = "a-lock-token-2";
final String linkName2 = "my-link-name-2";
final TestPublisher<Message> messagePublisher2 = TestPublisher.create();
final Flux<Message> messageFlux2 = messagePublisher2.flux();
when(receivedMessage2.getSessionId()).thenReturn(sessionId2);
when(receivedMessage2.getLockToken()).thenReturn(lockToken2);
when(amqpReceiveLink2.receive()).thenReturn(messageFlux2);
when(amqpReceiveLink2.getHostname()).thenReturn(NAMESPACE);
when(amqpReceiveLink2.getEntityPath()).thenReturn(ENTITY_PATH);
when(amqpReceiveLink2.getEndpointStates()).thenReturn(endpointProcessor);
when(amqpReceiveLink2.getLinkName()).thenReturn(linkName2);
when(amqpReceiveLink2.getSessionId()).thenReturn(Mono.just(sessionId2));
when(amqpReceiveLink2.getSessionLockedUntil()).thenReturn(Mono.fromCallable(onRenewal));
when(amqpReceiveLink2.updateDisposition(lockToken2, Accepted.getInstance())).thenReturn(Mono.empty());
final AtomicInteger count = new AtomicInteger();
when(connection.createReceiveLink(anyString(), eq(ENTITY_PATH), any(ServiceBusReceiveMode.class), isNull(),
any(MessagingEntityType.class), isNull())).thenAnswer(invocation -> {
final int number = count.getAndIncrement();
switch (number) {
case 0:
return Mono.just(amqpReceiveLink);
case 1:
return Mono.just(amqpReceiveLink2);
default:
return Mono.empty();
}
});
when(messageSerializer.deserialize(message, ServiceBusReceivedMessage.class)).thenReturn(receivedMessage);
when(messageSerializer.deserialize(message2, ServiceBusReceivedMessage.class)).thenReturn(receivedMessage2);
when(managementNode.renewSessionLock(sessionId, linkName)).thenReturn(Mono.fromCallable(onRenewal));
when(managementNode.renewSessionLock(sessionId2, linkName2)).thenReturn(Mono.fromCallable(onRenewal));
StepVerifier.create(sessionManager.receive())
.then(() -> {
for (int i = 0; i < numberOfMessages; i++) {
messageSink.next(message);
}
})
.assertNext(context -> {
System.out.println("1");
assertMessageEquals(sessionId, receivedMessage, context);
})
.assertNext(context -> {
System.out.println("2");
assertMessageEquals(sessionId, receivedMessage, context);
})
.assertNext(context -> {
System.out.println("3");
assertMessageEquals(sessionId, receivedMessage, context);
})
.assertNext(context -> {
System.out.println("4");
assertMessageEquals(sessionId, receivedMessage, context);
})
.assertNext(context -> {
System.out.println("5");
assertMessageEquals(sessionId, receivedMessage, context);
})
.thenAwait(Duration.ofSeconds(13))
.then(() -> {
for (int i = 0; i < 3; i++) {
messagePublisher2.next(message2);
}
})
.assertNext(context -> {
System.out.println("6");
assertMessageEquals(sessionId2, receivedMessage2, context);
})
.assertNext(context -> {
System.out.println("7");
assertMessageEquals(sessionId2, receivedMessage2, context);
})
.assertNext(context -> {
System.out.println("8");
assertMessageEquals(sessionId2, receivedMessage2, context);
})
.thenAwait(Duration.ofSeconds(15))
.thenCancel()
.verify();
}
/**
* Verify that when we can call multiple receive, it'll create a new link.
*/
@Test
void multipleReceiveUnnamedSession() {
final int expectedLinksCreated = 2;
final Callable<OffsetDateTime> onRenewal = () -> OffsetDateTime.now().plus(Duration.ofSeconds(5));
final ReceiverOptions receiverOptions = new ReceiverOptions(ServiceBusReceiveMode.PEEK_LOCK, 1, Duration.ZERO, false,
null, 1);
sessionManager = new ServiceBusSessionManager(ENTITY_PATH, ENTITY_TYPE, connectionProcessor,
tracerProvider, messageSerializer, receiverOptions);
final String sessionId = "session-1";
final String linkName = "my-link-name";
when(amqpReceiveLink.getLinkName()).thenReturn(linkName);
when(amqpReceiveLink.getSessionId()).thenReturn(Mono.just(sessionId));
when(amqpReceiveLink.getSessionLockedUntil()).thenReturn(Mono.fromCallable(onRenewal));
final ServiceBusReceiveLink amqpReceiveLink2 = mock(ServiceBusReceiveLink.class);
final String sessionId2 = "session-2";
final String linkName2 = "my-link-name-2";
final TestPublisher<Message> messagePublisher2 = TestPublisher.create();
final Flux<Message> messageFlux2 = messagePublisher2.flux();
when(amqpReceiveLink2.receive()).thenReturn(messageFlux2);
when(amqpReceiveLink2.getHostname()).thenReturn(NAMESPACE);
when(amqpReceiveLink2.getEntityPath()).thenReturn(ENTITY_PATH);
when(amqpReceiveLink2.getEndpointStates()).thenReturn(endpointProcessor);
when(amqpReceiveLink2.getLinkName()).thenReturn(linkName2);
when(amqpReceiveLink2.getSessionId()).thenReturn(Mono.just(sessionId2));
when(amqpReceiveLink2.getSessionLockedUntil()).thenReturn(Mono.fromCallable(onRenewal));
final AtomicInteger count = new AtomicInteger();
when(connection.createReceiveLink(anyString(), eq(ENTITY_PATH), any(ServiceBusReceiveMode.class), isNull(),
any(MessagingEntityType.class), isNull())).thenAnswer(invocation -> {
final int number = count.getAndIncrement();
switch (number) {
case 0:
return Mono.just(amqpReceiveLink);
case 1:
return Mono.just(amqpReceiveLink2);
default:
return Mono.empty();
}
});
StepVerifier.create(sessionManager.receive())
.thenAwait(Duration.ofSeconds(5))
.thenCancel()
.verify();
StepVerifier.create(sessionManager.receive())
.thenAwait(Duration.ofSeconds(5))
.thenCancel()
.verify();
verify(connection, times(2)).createReceiveLink(linkNameCaptor.capture(), eq(ENTITY_PATH), any(
ServiceBusReceiveMode.class), isNull(),
any(MessagingEntityType.class), isNull());
final List<String> actualLinksCreated = linkNameCaptor.getAllValues();
assertNotNull(actualLinksCreated);
assertEquals(expectedLinksCreated, actualLinksCreated.size());
assertFalse(actualLinksCreated.get(0).equalsIgnoreCase(actualLinksCreated.get(1)));
}
/**
* Validate that session-id specific session receiver is removed after {@link AmqpRetryOptions
*/
@Test
private static void assertMessageEquals(String sessionId, ServiceBusReceivedMessage expected,
ServiceBusMessageContext actual) {
assertEquals(sessionId, actual.getSessionId());
assertNull(actual.getThrowable());
assertEquals(expected, actual.getMessage());
}
} |
Yeah, exactly! It's meant to be used by Quarkus developers (as it's part of the runtime package, so not meant to be part of the public API) | public QuarkusBindException(List<Integer> ports) {
if (ports.isEmpty()) {
throw new IllegalStateException("ports must not be empty");
}
this.ports = ports;
} | throw new IllegalStateException("ports must not be empty"); | public QuarkusBindException(List<Integer> ports) {
if (ports.isEmpty()) {
throw new IllegalStateException("ports must not be empty");
}
this.ports = ports;
} | class QuarkusBindException extends BindException {
private final List<Integer> ports;
public QuarkusBindException(int port) {
this(Collections.singletonList(port));
}
public List<Integer> getPorts() {
return ports;
}
} | class QuarkusBindException extends BindException {
private final List<Integer> ports;
public QuarkusBindException(int port) {
this(Collections.singletonList(port));
}
public List<Integer> getPorts() {
return ports;
}
} |
Maybe we could always `pin` as before, but migrate the checking logic from `ResultPartitionManger` to `ResultPartition` as I mentioned before. I mean `ResultPartitionManager` is not aware of the external issue, the tag in `ResultPartitionManager#isReleaseExternallyManagedPartitionsOnConsumption` could be merged with `ResultPartition#isManagedExternally` to generate a final tag in `ResultPartition`. Then while `ResultPartition#onConsumedSubpartition`, it could check the final tag to decide whether to call `ResultPartitionManager#onConsumedPartition`. And the reference counter only works when the final tag is false. | void onConsumedSubpartition(int subpartitionIndex) {
if (isReleased.get()) {
return;
}
if (isManagedExternally) {
partitionManager.onConsumedPartition(this);
} else {
int refCnt = pendingReferences.decrementAndGet();
if (refCnt == 0) {
partitionManager.onConsumedPartition(this);
} else if (refCnt < 0) {
throw new IllegalStateException("All references released.");
}
LOG.debug("{}: Received release notification for subpartition {} (reference count now at: {}).",
this, subpartitionIndex, pendingReferences);
}
} | partitionManager.onConsumedPartition(this); | void onConsumedSubpartition(int subpartitionIndex) {
if (isReleased.get()) {
return;
}
LOG.debug("{}: Received release notification for subpartition {}.",
this, subpartitionIndex);
} | class ResultPartition implements ResultPartitionWriter, BufferPoolOwner {
private static final Logger LOG = LoggerFactory.getLogger(ResultPartition.class);
private final String owningTaskName;
private final ResultPartitionID partitionId;
/** Type of this partition. Defines the concrete subpartition implementation to use. */
private final ResultPartitionType partitionType;
private final boolean isManagedExternally;
/** The subpartitions of this partition. At least one. */
private final ResultSubpartition[] subpartitions;
private final ResultPartitionManager partitionManager;
public final int numTargetKeyGroups;
private final AtomicBoolean isReleased = new AtomicBoolean();
/**
* The total number of references to subpartitions of this result. The result partition can be
* safely released, iff the reference count is zero. A reference count of -1 denotes that the
* result partition has been released.
*/
private final AtomicInteger pendingReferences = new AtomicInteger();
private BufferPool bufferPool;
private boolean isFinished;
private volatile Throwable cause;
private final FunctionWithException<BufferPoolOwner, BufferPool, IOException> bufferPoolFactory;
public ResultPartition(
String owningTaskName,
ResultPartitionID partitionId,
ResultPartitionType partitionType,
ResultSubpartition[] subpartitions,
int numTargetKeyGroups,
boolean isManagedExternally,
ResultPartitionManager partitionManager,
FunctionWithException<BufferPoolOwner, BufferPool, IOException> bufferPoolFactory) {
this.owningTaskName = checkNotNull(owningTaskName);
this.partitionId = checkNotNull(partitionId);
this.partitionType = checkNotNull(partitionType);
this.subpartitions = checkNotNull(subpartitions);
this.numTargetKeyGroups = numTargetKeyGroups;
this.isManagedExternally = isManagedExternally;
this.partitionManager = checkNotNull(partitionManager);
this.bufferPoolFactory = bufferPoolFactory;
}
/**
* Registers a buffer pool with this result partition.
*
* <p>There is one pool for each result partition, which is shared by all its sub partitions.
*
* <p>The pool is registered with the partition *after* it as been constructed in order to conform
* to the life-cycle of task registrations in the {@link TaskExecutor}.
*/
@Override
public void setup() throws IOException {
checkState(this.bufferPool == null, "Bug in result partition setup logic: Already registered buffer pool.");
BufferPool bufferPool = checkNotNull(bufferPoolFactory.apply(this));
checkArgument(bufferPool.getNumberOfRequiredMemorySegments() >= getNumberOfSubpartitions(),
"Bug in result partition setup logic: Buffer pool has not enough guaranteed buffers for this result partition.");
this.bufferPool = bufferPool;
partitionManager.registerResultPartition(this);
}
public String getOwningTaskName() {
return owningTaskName;
}
public ResultPartitionID getPartitionId() {
return partitionId;
}
@Override
public int getNumberOfSubpartitions() {
return subpartitions.length;
}
public BufferPool getBufferPool() {
return bufferPool;
}
public int getNumberOfQueuedBuffers() {
int totalBuffers = 0;
for (ResultSubpartition subpartition : subpartitions) {
totalBuffers += subpartition.unsynchronizedGetNumberOfQueuedBuffers();
}
return totalBuffers;
}
/**
* Returns the type of this result partition.
*
* @return result partition type
*/
public ResultPartitionType getPartitionType() {
return partitionType;
}
boolean isManagedExternally() {
return isManagedExternally;
}
@Override
public BufferBuilder getBufferBuilder() throws IOException, InterruptedException {
checkInProduceState();
return bufferPool.requestBufferBuilderBlocking();
}
@Override
public boolean addBufferConsumer(BufferConsumer bufferConsumer, int subpartitionIndex) throws IOException {
checkNotNull(bufferConsumer);
ResultSubpartition subpartition;
try {
checkInProduceState();
subpartition = subpartitions[subpartitionIndex];
}
catch (Exception ex) {
bufferConsumer.close();
throw ex;
}
return subpartition.add(bufferConsumer);
}
@Override
public void flushAll() {
for (ResultSubpartition subpartition : subpartitions) {
subpartition.flush();
}
}
@Override
public void flush(int subpartitionIndex) {
subpartitions[subpartitionIndex].flush();
}
/**
* Finishes the result partition.
*
* <p>After this operation, it is not possible to add further data to the result partition.
*
* <p>For BLOCKING results, this will trigger the deployment of consuming tasks.
*/
@Override
public void finish() throws IOException {
checkInProduceState();
for (ResultSubpartition subpartition : subpartitions) {
subpartition.finish();
}
isFinished = true;
}
public void release() {
release(null);
}
/**
* Releases the result partition.
*/
public void release(Throwable cause) {
if (isReleased.compareAndSet(false, true)) {
LOG.debug("{}: Releasing {}.", owningTaskName, this);
if (cause != null) {
this.cause = cause;
}
for (ResultSubpartition subpartition : subpartitions) {
try {
subpartition.release();
}
catch (Throwable t) {
LOG.error("Error during release of result subpartition: " + t.getMessage(), t);
}
}
}
}
@Override
public void close() {
if (bufferPool != null) {
bufferPool.lazyDestroy();
}
}
@Override
public void fail(@Nullable Throwable throwable) {
partitionManager.releasePartition(partitionId, throwable);
}
/**
* Returns the requested subpartition.
*/
public ResultSubpartitionView createSubpartitionView(int index, BufferAvailabilityListener availabilityListener) throws IOException {
if (!isManagedExternally) {
int refCnt = pendingReferences.get();
checkState(refCnt != -1, "Partition released.");
checkState(refCnt > 0, "Partition not pinned.");
}
checkElementIndex(index, subpartitions.length, "Subpartition not found.");
ResultSubpartitionView readView = subpartitions[index].createReadView(availabilityListener);
LOG.debug("Created {}", readView);
return readView;
}
public Throwable getFailureCause() {
return cause;
}
@Override
public int getNumTargetKeyGroups() {
return numTargetKeyGroups;
}
/**
* Releases buffers held by this result partition.
*
* <p>This is a callback from the buffer pool, which is registered for result partitions, which
* are back pressure-free.
*/
@Override
public void releaseMemory(int toRelease) throws IOException {
checkArgument(toRelease > 0);
for (ResultSubpartition subpartition : subpartitions) {
toRelease -= subpartition.releaseMemory();
if (toRelease <= 0) {
break;
}
}
}
/**
* Whether this partition is released.
*
* <p>A partition is released when each subpartition is either consumed and communication is closed by consumer
* or failed. A partition is also released if task is cancelled.
*/
public boolean isReleased() {
return isReleased.get();
}
@Override
public String toString() {
return "ResultPartition " + partitionId.toString() + " [" + partitionType + ", "
+ subpartitions.length + " subpartitions, "
+ pendingReferences + " pending references]";
}
/**
* Pins the result partition.
*
* <p>The partition can only be released after each subpartition has been consumed once per pin
* operation.
*/
void pin() {
while (true) {
int refCnt = pendingReferences.get();
if (refCnt >= 0) {
if (pendingReferences.compareAndSet(refCnt, refCnt + subpartitions.length)) {
break;
}
}
else {
throw new IllegalStateException("Released.");
}
}
}
/**
* Notification when a subpartition is released.
*/
public ResultSubpartition[] getAllPartitions() {
return subpartitions;
}
private void checkInProduceState() throws IllegalStateException {
checkState(!isFinished, "Partition already finished.");
}
} | class ResultPartition implements ResultPartitionWriter, BufferPoolOwner {
protected static final Logger LOG = LoggerFactory.getLogger(ResultPartition.class);
private final String owningTaskName;
protected final ResultPartitionID partitionId;
/** Type of this partition. Defines the concrete subpartition implementation to use. */
protected final ResultPartitionType partitionType;
/** The subpartitions of this partition. At least one. */
protected final ResultSubpartition[] subpartitions;
protected final ResultPartitionManager partitionManager;
public final int numTargetKeyGroups;
private final AtomicBoolean isReleased = new AtomicBoolean();
private BufferPool bufferPool;
private boolean isFinished;
private volatile Throwable cause;
private final FunctionWithException<BufferPoolOwner, BufferPool, IOException> bufferPoolFactory;
public ResultPartition(
String owningTaskName,
ResultPartitionID partitionId,
ResultPartitionType partitionType,
ResultSubpartition[] subpartitions,
int numTargetKeyGroups,
ResultPartitionManager partitionManager,
FunctionWithException<BufferPoolOwner, BufferPool, IOException> bufferPoolFactory) {
this.owningTaskName = checkNotNull(owningTaskName);
this.partitionId = checkNotNull(partitionId);
this.partitionType = checkNotNull(partitionType);
this.subpartitions = checkNotNull(subpartitions);
this.numTargetKeyGroups = numTargetKeyGroups;
this.partitionManager = checkNotNull(partitionManager);
this.bufferPoolFactory = bufferPoolFactory;
}
/**
* Registers a buffer pool with this result partition.
*
* <p>There is one pool for each result partition, which is shared by all its sub partitions.
*
* <p>The pool is registered with the partition *after* it as been constructed in order to conform
* to the life-cycle of task registrations in the {@link TaskExecutor}.
*/
@Override
public void setup() throws IOException {
checkState(this.bufferPool == null, "Bug in result partition setup logic: Already registered buffer pool.");
BufferPool bufferPool = checkNotNull(bufferPoolFactory.apply(this));
checkArgument(bufferPool.getNumberOfRequiredMemorySegments() >= getNumberOfSubpartitions(),
"Bug in result partition setup logic: Buffer pool has not enough guaranteed buffers for this result partition.");
this.bufferPool = bufferPool;
partitionManager.registerResultPartition(this);
}
public String getOwningTaskName() {
return owningTaskName;
}
public ResultPartitionID getPartitionId() {
return partitionId;
}
@Override
public int getNumberOfSubpartitions() {
return subpartitions.length;
}
public BufferPool getBufferPool() {
return bufferPool;
}
public int getNumberOfQueuedBuffers() {
int totalBuffers = 0;
for (ResultSubpartition subpartition : subpartitions) {
totalBuffers += subpartition.unsynchronizedGetNumberOfQueuedBuffers();
}
return totalBuffers;
}
/**
* Returns the type of this result partition.
*
* @return result partition type
*/
public ResultPartitionType getPartitionType() {
return partitionType;
}
@Override
public BufferBuilder getBufferBuilder() throws IOException, InterruptedException {
checkInProduceState();
return bufferPool.requestBufferBuilderBlocking();
}
@Override
public boolean addBufferConsumer(BufferConsumer bufferConsumer, int subpartitionIndex) throws IOException {
checkNotNull(bufferConsumer);
ResultSubpartition subpartition;
try {
checkInProduceState();
subpartition = subpartitions[subpartitionIndex];
}
catch (Exception ex) {
bufferConsumer.close();
throw ex;
}
return subpartition.add(bufferConsumer);
}
@Override
public void flushAll() {
for (ResultSubpartition subpartition : subpartitions) {
subpartition.flush();
}
}
@Override
public void flush(int subpartitionIndex) {
subpartitions[subpartitionIndex].flush();
}
/**
* Finishes the result partition.
*
* <p>After this operation, it is not possible to add further data to the result partition.
*
* <p>For BLOCKING results, this will trigger the deployment of consuming tasks.
*/
@Override
public void finish() throws IOException {
checkInProduceState();
for (ResultSubpartition subpartition : subpartitions) {
subpartition.finish();
}
isFinished = true;
}
public void release() {
release(null);
}
/**
* Releases the result partition.
*/
public void release(Throwable cause) {
if (isReleased.compareAndSet(false, true)) {
LOG.debug("{}: Releasing {}.", owningTaskName, this);
if (cause != null) {
this.cause = cause;
}
for (ResultSubpartition subpartition : subpartitions) {
try {
subpartition.release();
}
catch (Throwable t) {
LOG.error("Error during release of result subpartition: " + t.getMessage(), t);
}
}
}
}
@Override
public void close() {
if (bufferPool != null) {
bufferPool.lazyDestroy();
}
}
@Override
public void fail(@Nullable Throwable throwable) {
partitionManager.releasePartition(partitionId, throwable);
}
/**
* Returns the requested subpartition.
*/
public ResultSubpartitionView createSubpartitionView(int index, BufferAvailabilityListener availabilityListener) throws IOException {
checkElementIndex(index, subpartitions.length, "Subpartition not found.");
ResultSubpartitionView readView = subpartitions[index].createReadView(availabilityListener);
LOG.debug("Created {}", readView);
return readView;
}
public Throwable getFailureCause() {
return cause;
}
@Override
public int getNumTargetKeyGroups() {
return numTargetKeyGroups;
}
/**
* Releases buffers held by this result partition.
*
* <p>This is a callback from the buffer pool, which is registered for result partitions, which
* are back pressure-free.
*/
@Override
public void releaseMemory(int toRelease) throws IOException {
checkArgument(toRelease > 0);
for (ResultSubpartition subpartition : subpartitions) {
toRelease -= subpartition.releaseMemory();
if (toRelease <= 0) {
break;
}
}
}
/**
* Whether this partition is released.
*
* <p>A partition is released when each subpartition is either consumed and communication is closed by consumer
* or failed. A partition is also released if task is cancelled.
*/
public boolean isReleased() {
return isReleased.get();
}
@Override
public String toString() {
return "ResultPartition " + partitionId.toString() + " [" + partitionType + ", "
+ subpartitions.length + " subpartitions]";
}
/**
* Pins the result partition.
*/
void pin() {
}
/**
* Notification when a subpartition is released.
*/
public ResultSubpartition[] getAllPartitions() {
return subpartitions;
}
private void checkInProduceState() throws IllegalStateException {
checkState(!isFinished, "Partition already finished.");
}
} |
we need to validate whether there are any whitespaces/trivia between the tokens. Can log an error and continue. | private STNode parseTrippleGTToken() {
STNode openGTToken = parseGTToken();
STNode middleLGToken = parseGTToken();
STNode endLGToken = parseGTToken();
return STNodeFactory.createTrippleGTTokenNode(openGTToken, middleLGToken, endLGToken);
} | STNode endLGToken = parseGTToken(); | private STNode parseTrippleGTToken() {
STNode openGTToken = parseGTToken();
reportInvalidShiftOperator(openGTToken);
STNode middleGTToken = parseGTToken();
reportInvalidShiftOperator(middleGTToken);
STNode endLGToken = parseGTToken();
return STNodeFactory.createTrippleGTTokenNode(openGTToken, middleGTToken, endLGToken);
} | class BallerinaParser extends AbstractParser {
private static final OperatorPrecedence DEFAULT_OP_PRECEDENCE = OperatorPrecedence.ACTION;
protected BallerinaParser(AbstractTokenReader tokenReader) {
super(tokenReader, new BallerinaParserErrorHandler(tokenReader));
}
/**
* Start parsing the given input.
*
* @return Parsed node
*/
@Override
public STNode parse() {
return parseCompUnit();
}
/**
* Start parsing the input from a given context. Supported starting points are:
* <ul>
* <li>Module part (a file)</li>
* <li>Top level node</li>
* <li>Statement</li>
* <li>Expression</li>
* </ul>
*
* @param context Context to start parsing
* @return Parsed node
*/
public STNode parse(ParserRuleContext context) {
switch (context) {
case COMP_UNIT:
return parseCompUnit();
case TOP_LEVEL_NODE:
startContext(ParserRuleContext.COMP_UNIT);
return parseTopLevelNode();
case STATEMENT:
startContext(ParserRuleContext.COMP_UNIT);
startContext(ParserRuleContext.FUNC_BODY_BLOCK);
return parseStatement();
case EXPRESSION:
startContext(ParserRuleContext.COMP_UNIT);
startContext(ParserRuleContext.FUNC_BODY_BLOCK);
startContext(ParserRuleContext.STATEMENT);
return parseExpression();
default:
throw new UnsupportedOperationException("Cannot start parsing from: " + context);
}
}
/**
* Resume the parsing from the given context.
*
* @param context Context to resume parsing
* @param args Arguments that requires to continue parsing from the given parser context
* @return Parsed node
*/
@Override
public STNode resumeParsing(ParserRuleContext context, Object... args) {
switch (context) {
case COMP_UNIT:
return parseCompUnit();
case EXTERNAL_FUNC_BODY:
return parseExternalFunctionBody();
case FUNC_BODY:
return parseFunctionBody();
case OPEN_BRACE:
return parseOpenBrace();
case CLOSE_BRACE:
return parseCloseBrace();
case FUNC_NAME:
return parseFunctionName();
case OPEN_PARENTHESIS:
return parseOpenParenthesis();
case SIMPLE_TYPE_DESCRIPTOR:
return parseSimpleTypeDescriptor();
case ASSIGN_OP:
return parseAssignOp();
case EXTERNAL_KEYWORD:
return parseExternalKeyword();
case FUNC_BODY_BLOCK:
return parseFunctionBodyBlock();
case SEMICOLON:
return parseSemicolon();
case CLOSE_PARENTHESIS:
return parseCloseParenthesis();
case VARIABLE_NAME:
return parseVariableName();
case TERMINAL_EXPRESSION:
return parseTerminalExpression((boolean) args[0], (boolean) args[1]);
case STATEMENT:
return parseStatement();
case STATEMENT_WITHOUT_ANNOTS:
return parseStatement((STNode) args[0]);
case EXPRESSION_RHS:
return parseExpressionRhs((OperatorPrecedence) args[0], (STNode) args[1], (boolean) args[2],
(boolean) args[3]);
case PARAMETER_START:
return parseParameter((SyntaxKind) args[0], (STNode) args[1], (int) args[2], (boolean) args[3]);
case PARAMETER_WITHOUT_ANNOTS:
return parseParamGivenAnnots((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (int) args[3],
(boolean) args[4]);
case AFTER_PARAMETER_TYPE:
return parseAfterParamType((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],
(STNode) args[4], (boolean) args[5]);
case PARAMETER_NAME_RHS:
return parseParameterRhs((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],
(STNode) args[4], (STNode) args[5]);
case TOP_LEVEL_NODE:
return parseTopLevelNode();
case TOP_LEVEL_NODE_WITHOUT_METADATA:
return parseTopLevelNode((STNode) args[0]);
case TOP_LEVEL_NODE_WITHOUT_MODIFIER:
return parseTopLevelNode((STNode) args[0], (STNode) args[1]);
case STATEMENT_START_IDENTIFIER:
return parseStatementStartIdentifier();
case VAR_DECL_STMT_RHS:
return parseVarDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],
(boolean) args[4]);
case TYPE_REFERENCE:
return parseTypeReference();
case FIELD_DESCRIPTOR_RHS:
return parseFieldDescriptorRhs((STNode) args[0], (STNode) args[1], (STNode) args[2]);
case NAMED_OR_POSITIONAL_ARG_RHS:
return parseNamedOrPositionalArg((STNode) args[0]);
case RECORD_BODY_START:
return parseRecordBodyStartDelimiter();
case TYPE_DESCRIPTOR:
return parseTypeDescriptorInternal((ParserRuleContext) args[0]);
case OBJECT_MEMBER:
return parseObjectMember();
case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY:
return parseObjectMethodOrField((STNode) args[0], (STNode) args[1]);
case OBJECT_FIELD_RHS:
return parseObjectFieldRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]);
case OBJECT_TYPE_FIRST_QUALIFIER:
return parseObjectTypeQualifiers();
case OBJECT_TYPE_SECOND_QUALIFIER:
return parseObjectTypeSecondQualifier((STNode) args[0]);
case OBJECT_KEYWORD:
return parseObjectKeyword();
case TYPE_NAME:
return parseTypeName();
case IF_KEYWORD:
return parseIfKeyword();
case ELSE_KEYWORD:
return parseElseKeyword();
case ELSE_BODY:
return parseElseBody();
case WHILE_KEYWORD:
return parseWhileKeyword();
case PANIC_KEYWORD:
return parsePanicKeyword();
case MAJOR_VERSION:
return parseMajorVersion();
case IMPORT_DECL_RHS:
return parseImportDecl((STNode) args[0], (STNode) args[1]);
case IMPORT_PREFIX:
return parseImportPrefix();
case IMPORT_MODULE_NAME:
case IMPORT_ORG_OR_MODULE_NAME:
case VARIABLE_REF:
case FIELD_OR_FUNC_NAME:
case SERVICE_NAME:
return parseIdentifier(context);
case IMPORT_KEYWORD:
return parseImportKeyword();
case SLASH:
return parseSlashToken();
case DOT:
return parseDotToken();
case IMPORT_VERSION_DECL:
return parseVersion();
case VERSION_KEYWORD:
return parseVersionKeywrod();
case VERSION_NUMBER:
return parseVersionNumber();
case DECIMAL_INTEGER_LITERAL:
return parseDecimalIntLiteral(context);
case IMPORT_SUB_VERSION:
return parseSubVersion(context);
case IMPORT_PREFIX_DECL:
return parseImportPrefixDecl();
case AS_KEYWORD:
return parseAsKeyword();
case CONTINUE_KEYWORD:
return parseContinueKeyword();
case BREAK_KEYWORD:
return parseBreakKeyword();
case RETURN_KEYWORD:
return parseReturnKeyword();
case MAPPING_FIELD:
return parseMappingField((STNode) args[0]);
case SPECIFIC_FIELD_RHS:
return parseSpecificFieldRhs((STNode) args[0], (STNode) args[1]);
case STRING_LITERAL:
return parseStringLiteral();
case COLON:
return parseColon();
case OPEN_BRACKET:
return parseOpenBracket();
case RESOURCE_DEF:
return parseResource();
case OPTIONAL_SERVICE_NAME:
return parseServiceName();
case SERVICE_KEYWORD:
return parseServiceKeyword();
case ON_KEYWORD:
return parseOnKeyword();
case RESOURCE_KEYWORD:
return parseResourceKeyword();
case LISTENER_KEYWORD:
return parseListenerKeyword();
case NIL_TYPE_DESCRIPTOR:
return parseNilTypeDescriptor();
case COMPOUND_ASSIGNMENT_STMT:
return parseCompoundAssignmentStmt();
case TYPEOF_KEYWORD:
return parseTypeofKeyword();
case ARRAY_TYPE_DESCRIPTOR:
return parseArrayTypeDescriptor((STNode) args[0]);
case ARRAY_LENGTH:
return parseArrayLength();
case FUNC_DEF_OR_FUNC_TYPE:
case REQUIRED_PARAM:
case ANNOT_REFERENCE:
return parseIdentifier(context);
case IS_KEYWORD:
return parseIsKeyword();
case STMT_START_WITH_EXPR_RHS:
return parseStamentStartWithExpr((STNode) args[0], (STNode) args[1]);
case COMMA:
return parseComma();
case CONST_DECL_TYPE:
return parseConstDecl((STNode) args[0], (STNode) args[1], (STNode) args[2]);
case STMT_START_WITH_IDENTIFIER:
return parseStatementStartsWithIdentifier((STNode) args[0], (STNode) args[1]);
case LT:
return parseLTToken();
case GT:
return parseGTToken();
case NIL_LITERAL:
return parseNilLiteral();
case RECORD_FIELD_OR_RECORD_END:
return parseFieldOrRestDescriptor((boolean) args[0]);
case ANNOTATION_KEYWORD:
return parseAnnotationKeyword();
case ANNOT_DECL_OPTIONAL_TYPE:
return parseAnnotationDeclFromType((STNode) args[0], (STNode) args[1], (STNode) args[2],
(STNode) args[3]);
case ANNOT_DECL_RHS:
return parseAnnotationDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],
(STNode) args[4]);
case ANNOT_OPTIONAL_ATTACH_POINTS:
return parseAnnotationDeclAttachPoints((STNode) args[0], (STNode) args[1], (STNode) args[2],
(STNode) args[3], (STNode) args[4], (STNode) args[5]);
case SOURCE_KEYWORD:
return parseSourceKeyword();
case ATTACH_POINT_IDENT:
return parseAttachPointIdent((STNode) args[0]);
case IDENT_AFTER_OBJECT_IDENT:
return parseIdentAfterObjectIdent();
case FUNCTION_IDENT:
return parseFunctionIdent();
case FIELD_IDENT:
return parseFieldIdent();
case ATTACH_POINT_END:
return parseAttachPointEnd();
case XMLNS_KEYWORD:
return parseXMLNSKeyword();
case XML_NAMESPACE_PREFIX_DECL:
return parseXMLDeclRhs((STNode) args[0], (STNode) args[1]);
case NAMESPACE_PREFIX:
return parseNamespacePrefix();
case WORKER_KEYWORD:
return parseWorkerKeyword();
case WORKER_NAME:
return parseWorkerName();
case FORK_KEYWORD:
return parseForkKeyword();
case DECIMAL_FLOATING_POINT_LITERAL:
return parseDecimalFloatingPointLiteral();
case HEX_FLOATING_POINT_LITERAL:
return parseHexFloatingPointLiteral();
case TRAP_KEYWORD:
return parseTrapKeyword();
case IN_KEYWORD:
return parseInKeyword();
case FOREACH_KEYWORD:
return parseForEachKeyword();
case TABLE_KEYWORD:
return parseTableKeyword();
case KEY_KEYWORD:
return parseKeyKeyword();
case TABLE_KEYWORD_RHS:
return parseTableConstructorOrQuery((STNode) args[0], (boolean) args[1]);
case ERROR_KEYWORD:
return parseErrorKeyWord();
case LET_KEYWORD:
return parseLetKeyword();
case STREAM_KEYWORD:
return parseStreamKeyword();
case STREAM_TYPE_FIRST_PARAM_RHS:
return parseStreamTypeParamsNode((STNode) args[0], (STNode) args[1]);
case TEMPLATE_START:
case TEMPLATE_END:
return parseBacktickToken(context);
case KEY_CONSTRAINTS_RHS:
return parseKeyConstraint((STNode) args[0]);
case FUNCTION_KEYWORD_RHS:
return parseFunctionKeywordRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (boolean) args[3]);
case FUNC_OPTIONAL_RETURNS:
return parseFuncReturnTypeDescriptor((boolean) args[0]);
case RETURNS_KEYWORD:
return parseReturnsKeyword();
case NEW_KEYWORD_RHS:
return parseNewKeywordRhs((STNode) args[0]);
case NEW_KEYWORD:
return parseNewKeyword();
case IMPLICIT_NEW:
return parseImplicitNewRhs((STNode) args[0]);
case READONLY_KEYWORD:
return parseReadonlyKeyword();
case FROM_KEYWORD:
return parseFromKeyword();
case WHERE_KEYWORD:
return parseWhereKeyword();
case SELECT_KEYWORD:
return parseSelectKeyword();
case TABLE_CONSTRUCTOR_OR_QUERY_START:
return parseTableConstructorOrQuery((boolean) args[0]);
case TABLE_CONSTRUCTOR_OR_QUERY_RHS:
return parseTableConstructorOrQueryRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]);
case QUERY_EXPRESSION_RHS:
return parseIntermediateClause((boolean) args[0]);
default:
throw new IllegalStateException("cannot resume parsing the rule: " + context);
}
}
/*
* Private methods.
*/
/**
* Parse a given input and returns the AST. Starts parsing from the top of a compilation unit.
*
* @return Parsed node
*/
private STNode parseCompUnit() {
startContext(ParserRuleContext.COMP_UNIT);
STToken token = peek();
List<STNode> otherDecls = new ArrayList<>();
List<STNode> importDecls = new ArrayList<>();
boolean processImports = true;
while (token.kind != SyntaxKind.EOF_TOKEN) {
STNode decl = parseTopLevelNode(token.kind);
if (decl.kind == SyntaxKind.IMPORT_DECLARATION) {
if (processImports) {
importDecls.add(decl);
} else {
otherDecls.add(decl);
this.errorHandler.reportInvalidNode(token, "imports must be declared before other declarations");
}
} else {
if (processImports) {
processImports = false;
}
otherDecls.add(decl);
}
token = peek();
}
STToken eof = consume();
endContext();
return STNodeFactory.createModulePartNode(STNodeFactory.createNodeList(importDecls),
STNodeFactory.createNodeList(otherDecls), eof);
}
/**
* Parse top level node having an optional modifier preceding it.
*
* @return Parsed node
*/
private STNode parseTopLevelNode() {
STToken token = peek();
return parseTopLevelNode(token.kind);
}
protected STNode parseTopLevelNode(SyntaxKind tokenKind) {
STNode metadata;
switch (tokenKind) {
case EOF_TOKEN:
return consume();
case DOCUMENTATION_LINE:
case AT_TOKEN:
metadata = parseMetaData(tokenKind);
return parseTopLevelNode(metadata);
case IMPORT_KEYWORD:
case FINAL_KEYWORD:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case TYPE_KEYWORD:
case LISTENER_KEYWORD:
case CONST_KEYWORD:
case ANNOTATION_KEYWORD:
case XMLNS_KEYWORD:
case SERVICE_KEYWORD:
metadata = createEmptyMetadata();
break;
case IDENTIFIER_TOKEN:
if (isModuleVarDeclStart(1)) {
return parseModuleVarDecl(createEmptyMetadata(), null);
}
default:
if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {
metadata = createEmptyMetadata();
break;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE);
if (solution.action == Action.KEEP) {
metadata = STNodeFactory.createNodeList(new ArrayList<>());
break;
}
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseTopLevelNode(solution.tokenKind);
}
return parseTopLevelNode(tokenKind, metadata);
}
/**
* Parse top level node having an optional modifier preceding it, given the next token kind.
*
* @param metadata Next token kind
* @return Parsed node
*/
private STNode parseTopLevelNode(STNode metadata) {
STToken nextToken = peek();
return parseTopLevelNode(nextToken.kind, metadata);
}
private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata) {
STNode qualifier = null;
switch (tokenKind) {
case EOF_TOKEN:
if (metadata != null) {
this.errorHandler.reportInvalidNode(null, "invalid metadata");
}
return consume();
case PUBLIC_KEYWORD:
qualifier = parseQualifier();
tokenKind = peek().kind;
break;
case FUNCTION_KEYWORD:
case TYPE_KEYWORD:
case LISTENER_KEYWORD:
case CONST_KEYWORD:
case FINAL_KEYWORD:
case IMPORT_KEYWORD:
case ANNOTATION_KEYWORD:
case XMLNS_KEYWORD:
break;
case IDENTIFIER_TOKEN:
if (isModuleVarDeclStart(1)) {
return parseModuleVarDecl(metadata, null);
}
default:
if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {
break;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
if (solution.action == Action.KEEP) {
qualifier = STNodeFactory.createEmptyNode();
break;
}
return parseTopLevelNode(solution.tokenKind, metadata);
}
return parseTopLevelNode(tokenKind, metadata, qualifier);
}
/**
* Check whether the cursor is at the start of a module level var-decl.
*
* @param lookahead Offset of the token to to check
* @return <code>true</code> if the cursor is at the start of a module level var-decl.
* <code>false</code> otherwise.
*/
private boolean isModuleVarDeclStart(int lookahead) {
STToken nextToken = peek(lookahead + 1);
switch (nextToken.kind) {
case EQUAL_TOKEN:
case OPEN_BRACKET_TOKEN:
case QUESTION_MARK_TOKEN:
case PIPE_TOKEN:
return true;
case IDENTIFIER_TOKEN:
switch (peek(lookahead + 2).kind) {
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
case COLON_TOKEN:
if (lookahead > 1) {
return false;
}
if (peek(lookahead + 2).kind != SyntaxKind.IDENTIFIER_TOKEN) {
return false;
}
return isModuleVarDeclStart(lookahead + 2);
default:
return false;
}
}
/**
* Parse import declaration.
* <p>
* <code>import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;</code>
*
* @return Parsed node
*/
private STNode parseImportDecl() {
startContext(ParserRuleContext.IMPORT_DECL);
this.tokenReader.startMode(ParserMode.IMPORT);
STNode importKeyword = parseImportKeyword();
STNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME);
STToken token = peek();
STNode importDecl = parseImportDecl(token.kind, importKeyword, identifier);
this.tokenReader.endMode();
endContext();
return importDecl;
}
/**
* Parse import keyword.
*
* @return Parsed node
*/
private STNode parseImportKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IMPORT_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.IMPORT_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse identifier.
*
* @return Parsed node
*/
private STNode parseIdentifier(ParserRuleContext currentCtx) {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(token, currentCtx);
return sol.recoveredNode;
}
}
/**
* Parse RHS of the import declaration. This includes the components after the
* starting identifier (org-name/module-name) of the import decl.
*
* @param importKeyword Import keyword
* @param identifier Org-name or the module name
* @return Parsed node
*/
private STNode parseImportDecl(STNode importKeyword, STNode identifier) {
STToken nextToken = peek();
return parseImportDecl(nextToken.kind, importKeyword, identifier);
}
private STNode parseImportDecl(SyntaxKind tokenKind, STNode importKeyword, STNode identifier) {
STNode orgName;
STNode moduleName;
STNode version;
STNode alias;
switch (tokenKind) {
case SLASH_TOKEN:
STNode slash = parseSlashToken();
orgName = STNodeFactory.createImportOrgNameNode(identifier, slash);
moduleName = parseModuleName();
version = parseVersion();
alias = parseImportPrefixDecl();
break;
case DOT_TOKEN:
case VERSION_KEYWORD:
orgName = STNodeFactory.createEmptyNode();
moduleName = parseModuleName(tokenKind, identifier);
version = parseVersion();
alias = parseImportPrefixDecl();
break;
case AS_KEYWORD:
orgName = STNodeFactory.createEmptyNode();
moduleName = parseModuleName(tokenKind, identifier);
version = STNodeFactory.createEmptyNode();
alias = parseImportPrefixDecl();
break;
case SEMICOLON_TOKEN:
orgName = STNodeFactory.createEmptyNode();
moduleName = parseModuleName(tokenKind, identifier);
version = STNodeFactory.createEmptyNode();
alias = STNodeFactory.createEmptyNode();
break;
default:
Solution solution = recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseImportDecl(solution.tokenKind, importKeyword, identifier);
}
STNode semicolon = parseSemicolon();
return STNodeFactory.createImportDeclarationNode(importKeyword, orgName, moduleName, version, alias, semicolon);
}
/**
* parse slash token.
*
* @return Parsed node
*/
private STNode parseSlashToken() {
STToken token = peek();
if (token.kind == SyntaxKind.SLASH_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.SLASH);
return sol.recoveredNode;
}
}
/**
* Parse dot token.
*
* @return Parsed node
*/
private STNode parseDotToken() {
STToken nextToken = peek();
return parseDotToken(nextToken.kind);
}
private STNode parseDotToken(SyntaxKind tokenKind) {
if (tokenKind == SyntaxKind.DOT_TOKEN) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.DOT);
return sol.recoveredNode;
}
}
/**
* Parse module name of a import declaration.
*
* @return Parsed node
*/
private STNode parseModuleName() {
STNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME);
return parseModuleName(peek().kind, moduleNameStart);
}
/**
* Parse import module name of a import declaration, given the module name start identifier.
*
* @param moduleNameStart Starting identifier of the module name
* @return Parsed node
*/
private STNode parseModuleName(SyntaxKind nextTokenKind, STNode moduleNameStart) {
List<STNode> moduleNameParts = new ArrayList<>();
moduleNameParts.add(moduleNameStart);
while (!isEndOfImportModuleName(nextTokenKind)) {
moduleNameParts.add(parseDotToken());
moduleNameParts.add(parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME));
nextTokenKind = peek().kind;
}
return STNodeFactory.createNodeList(moduleNameParts);
}
private boolean isEndOfImportModuleName(SyntaxKind nextTokenKind) {
return nextTokenKind != SyntaxKind.DOT_TOKEN && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN;
}
private boolean isEndOfImportDecl(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case SEMICOLON_TOKEN:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case TYPE_KEYWORD:
case ABSTRACT_KEYWORD:
case CONST_KEYWORD:
case EOF_TOKEN:
case SERVICE_KEYWORD:
case IMPORT_KEYWORD:
case FINAL_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse version component of a import declaration.
* <p>
* <code>version-decl := version sem-ver</code>
*
* @return Parsed node
*/
private STNode parseVersion() {
STToken nextToken = peek();
return parseVersion(nextToken.kind);
}
private STNode parseVersion(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case VERSION_KEYWORD:
STNode versionKeyword = parseVersionKeywrod();
STNode versionNumber = parseVersionNumber();
return STNodeFactory.createImportVersionNode(versionKeyword, versionNumber);
case AS_KEYWORD:
case SEMICOLON_TOKEN:
return STNodeFactory.createEmptyNode();
default:
if (isEndOfImportDecl(nextTokenKind)) {
return STNodeFactory.createEmptyNode();
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.IMPORT_VERSION_DECL);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseVersion(solution.tokenKind);
}
}
/**
* Parse version keywrod.
*
* @return Parsed node
*/
private STNode parseVersionKeywrod() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.VERSION_KEYWORD) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.VERSION_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse version number.
* <p>
* <code>sem-ver := major-num [. minor-num [. patch-num]]
* <br/>
* major-num := DecimalNumber
* <br/>
* minor-num := DecimalNumber
* <br/>
* patch-num := DecimalNumber
* </code>
*
* @return Parsed node
*/
private STNode parseVersionNumber() {
STToken nextToken = peek();
return parseVersionNumber(nextToken.kind);
}
private STNode parseVersionNumber(SyntaxKind nextTokenKind) {
STNode majorVersion;
switch (nextTokenKind) {
case DECIMAL_INTEGER_LITERAL:
majorVersion = parseMajorVersion();
break;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.VERSION_NUMBER);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseVersionNumber(solution.tokenKind);
}
List<STNode> versionParts = new ArrayList<>();
versionParts.add(majorVersion);
STNode minorVersion = parseMinorVersion();
if (minorVersion != null) {
versionParts.add(minorVersion);
STNode patchVersion = parsePatchVersion();
if (patchVersion != null) {
versionParts.add(patchVersion);
}
}
return STNodeFactory.createNodeList(versionParts);
}
private STNode parseMajorVersion() {
return parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION);
}
private STNode parseMinorVersion() {
return parseSubVersion(ParserRuleContext.MINOR_VERSION);
}
private STNode parsePatchVersion() {
return parseSubVersion(ParserRuleContext.PATCH_VERSION);
}
/**
* Parse decimal literal.
*
* @param context Context in which the decimal literal is used.
* @return Parsed node
*/
private STNode parseDecimalIntLiteral(ParserRuleContext context) {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL) {
return consume();
} else {
Solution sol = recover(peek(), context);
return sol.recoveredNode;
}
}
/**
* Parse sub version. i.e: minor-version/patch-version.
*
* @param context Context indicating what kind of sub-version is being parsed.
* @return Parsed node
*/
private STNode parseSubVersion(ParserRuleContext context) {
STToken nextToken = peek();
return parseSubVersion(nextToken.kind, context);
}
private STNode parseSubVersion(SyntaxKind nextTokenKind, ParserRuleContext context) {
switch (nextTokenKind) {
case AS_KEYWORD:
case SEMICOLON_TOKEN:
return null;
case DOT_TOKEN:
STNode leadingDot = parseDotToken();
STNode versionNumber = parseDecimalIntLiteral(context);
return STNodeFactory.createImportSubVersionNode(leadingDot, versionNumber);
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.IMPORT_SUB_VERSION);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseSubVersion(solution.tokenKind, context);
}
}
/**
* Parse import prefix declaration.
* <p>
* <code>import-prefix-decl := as import-prefix
* <br/>
* import-prefix := a identifier | _
* </code>
*
* @return Parsed node
*/
private STNode parseImportPrefixDecl() {
STToken token = peek();
return parseImportPrefixDecl(token.kind);
}
private STNode parseImportPrefixDecl(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case AS_KEYWORD:
STNode asKeyword = parseAsKeyword();
STNode prefix = parseImportPrefix();
return STNodeFactory.createImportPrefixNode(asKeyword, prefix);
case SEMICOLON_TOKEN:
return STNodeFactory.createEmptyNode();
default:
if (isEndOfImportDecl(nextTokenKind)) {
return STNodeFactory.createEmptyNode();
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.IMPORT_PREFIX_DECL);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseImportPrefixDecl(solution.tokenKind);
}
}
/**
* Parse <code>as</code> keyword.
*
* @return Parsed node
*/
private STNode parseAsKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.AS_KEYWORD) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.AS_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse import prefix.
*
* @return Parsed node
*/
private STNode parseImportPrefix() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.IMPORT_PREFIX);
return sol.recoveredNode;
}
}
/**
* Parse top level node, given the modifier that precedes it.
*
* @param qualifier Qualifier that precedes the top level node
* @return Parsed node
*/
private STNode parseTopLevelNode(STNode metadata, STNode qualifier) {
STToken token = peek();
return parseTopLevelNode(token.kind, metadata, qualifier);
}
/**
* Parse top level node given the next token kind and the modifier that precedes it.
*
* @param tokenKind Next token kind
* @param qualifier Qualifier that precedes the top level node
* @return Parsed top-level node
*/
private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata, STNode qualifier) {
switch (tokenKind) {
case FUNCTION_KEYWORD:
return parseFuncDefOrFuncTypeDesc(metadata, getQualifier(qualifier));
case TYPE_KEYWORD:
return parseModuleTypeDefinition(metadata, getQualifier(qualifier));
case LISTENER_KEYWORD:
return parseListenerDeclaration(metadata, getQualifier(qualifier));
case CONST_KEYWORD:
return parseConstantDeclaration(metadata, getQualifier(qualifier));
case ANNOTATION_KEYWORD:
STNode constKeyword = STNodeFactory.createEmptyNode();
return parseAnnotationDeclaration(metadata, getQualifier(qualifier), constKeyword);
case IMPORT_KEYWORD:
reportInvalidQualifier(qualifier);
return parseImportDecl();
case XMLNS_KEYWORD:
reportInvalidQualifier(qualifier);
return parseXMLNamepsaceDeclaration();
case FINAL_KEYWORD:
reportInvalidQualifier(qualifier);
STNode finalKeyword = parseFinalKeyword();
return parseVariableDecl(metadata, finalKeyword, true);
case SERVICE_KEYWORD:
if (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) {
reportInvalidQualifier(qualifier);
return parseServiceDecl(metadata);
}
return parseModuleVarDecl(metadata, qualifier);
case IDENTIFIER_TOKEN:
if (isModuleVarDeclStart(1)) {
return parseModuleVarDecl(metadata, qualifier);
}
default:
if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {
return parseModuleVarDecl(metadata, qualifier);
}
STToken token = peek();
Solution solution =
recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
if (solution.action == Action.KEEP) {
return parseModuleVarDecl(metadata, qualifier);
}
return parseTopLevelNode(solution.tokenKind, metadata, qualifier);
}
}
private STNode parseModuleVarDecl(STNode metadata, STNode qualifier) {
reportInvalidQualifier(qualifier);
STNode finalKeyword = STNodeFactory.createEmptyNode();
return parseVariableDecl(metadata, finalKeyword, true);
}
private STNode getQualifier(STNode qualifier) {
return qualifier == null ? STNodeFactory.createEmptyNode() : qualifier;
}
private void reportInvalidQualifier(STNode qualifier) {
if (qualifier != null && qualifier.kind != SyntaxKind.NONE) {
this.errorHandler.reportInvalidNode((STToken) qualifier,
"invalid qualifier '" + qualifier.toString().trim() + "'");
}
}
/**
* Parse access modifiers.
*
* @return Parsed node
*/
private STNode parseQualifier() {
STToken token = peek();
if (token.kind == SyntaxKind.PUBLIC_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD);
return sol.recoveredNode;
}
}
private STNode parseFuncDefinition(STNode metadata, STNode visibilityQualifier) {
startContext(ParserRuleContext.FUNC_DEF);
STNode functionKeyword = parseFunctionKeyword();
return parseFunctionKeywordRhs(metadata, visibilityQualifier, functionKeyword, true);
}
/**
* Parse function definition for the function type descriptor.
* <p>
* <code>
* function-defn := FUNCTION identifier function-signature function-body
* <br/>
* function-type-descriptor := function function-signature
* </code>
*
* @param metadata Metadata
* @param visibilityQualifier Visibility qualifier
* @return Parsed node
*/
private STNode parseFuncDefOrFuncTypeDesc(STNode metadata, STNode visibilityQualifier) {
startContext(ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE);
STNode functionKeyword = parseFunctionKeyword();
return parseFunctionKeywordRhs(metadata, visibilityQualifier, functionKeyword, false);
}
private STNode parseFunctionKeywordRhs(STNode metadata, STNode visibilityQualifier, STNode functionKeyword,
boolean isFuncDef) {
return parseFunctionKeywordRhs(peek().kind, metadata, visibilityQualifier, functionKeyword, isFuncDef);
}
private STNode parseFunctionKeywordRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier,
STNode functionKeyword, boolean isFuncDef) {
STNode name;
switch (nextTokenKind) {
case IDENTIFIER_TOKEN:
name = parseFunctionName();
isFuncDef = true;
break;
case OPEN_PAREN_TOKEN:
name = STNodeFactory.createEmptyNode();
break;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.FUNCTION_KEYWORD_RHS, metadata,
visibilityQualifier, functionKeyword, isFuncDef);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseFunctionKeywordRhs(solution.tokenKind, metadata, visibilityQualifier, functionKeyword,
isFuncDef);
}
if (isFuncDef) {
switchContext(ParserRuleContext.FUNC_DEF);
STNode funcSignature = parseFuncSignature(false, false);
STNode body = parseFunctionBody();
return STNodeFactory.createFunctionDefinitionNode(metadata, visibilityQualifier, functionKeyword, name,
funcSignature, body);
}
STNode funcSignature = parseFuncSignature(true, false);
return parseReturnTypeDescRhs(metadata, visibilityQualifier, functionKeyword, funcSignature);
}
/**
* Parse function signature.
* <p>
* <code>
* function-signature := ( param-list ) return-type-descriptor
* <br/>
* return-type-descriptor := [ returns [annots] type-descriptor ]
* </code>
*
* @param isParamNameOptional Whether the parameter names are optional
* @param isInExprContext Whether this function signature is occurred within an expression context
* @return Function signature node
*/
private STNode parseFuncSignature(boolean isParamNameOptional, boolean isInExprContext) {
STNode openParenthesis = parseOpenParenthesis();
STNode parameters = parseParamList(isParamNameOptional);
STNode closeParenthesis = parseCloseParenthesis();
endContext();
STNode returnTypeDesc = parseFuncReturnTypeDescriptor(isInExprContext);
endContext();
return STNodeFactory.createFunctionSignatureNode(openParenthesis, parameters, closeParenthesis, returnTypeDesc);
}
private STNode parseReturnTypeDescRhs(STNode metadata, STNode visibilityQualifier, STNode functionKeyword,
STNode funcSignature) {
switch (peek().kind) {
case SEMICOLON_TOKEN:
case IDENTIFIER_TOKEN:
startContext(ParserRuleContext.VAR_DECL_STMT);
STNode typeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature);
STNode varName = parseVariableName();
STNode varDecl = parseVarDeclRhs(metadata, visibilityQualifier, typeDesc, varName, true);
endContext();
return varDecl;
case OPEN_PAREN_TOKEN:
case EQUAL_TOKEN:
break;
default:
break;
}
this.errorHandler.reportMissingTokenError("missing " + ParserRuleContext.FUNC_NAME);
STNode name = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);
STNode body = parseFunctionBody();
return STNodeFactory.createFunctionDefinitionNode(metadata, visibilityQualifier, functionKeyword, name,
funcSignature, body);
}
/**
* Validate the param list and return. If there are params without param-name,
* then this method will create a new set of params with missing param-name
* and return.
*
* @param signature Function signature
* @return
*/
private STNode validateAndGetFuncParams(STFunctionSignatureNode signature) {
STNode parameters = signature.parameters;
int paramCount = parameters.bucketCount();
int index = 0;
for (; index < paramCount; index++) {
STNode param = parameters.childInBucket(index);
switch (param.kind) {
case REQUIRED_PARAM:
STRequiredParameterNode requiredParam = (STRequiredParameterNode) param;
if (isEmpty(requiredParam.paramName)) {
break;
}
continue;
case DEFAULTABLE_PARAM:
STDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param;
if (isEmpty(defaultableParam.paramName)) {
break;
}
continue;
case REST_PARAM:
STRestParameterNode restParam = (STRestParameterNode) param;
if (isEmpty(restParam.paramName)) {
break;
}
continue;
default:
continue;
}
break;
}
if (index == paramCount) {
return signature;
}
STNode updatedParams = getUpdatedParamList(parameters, index);
return STNodeFactory.createFunctionSignatureNode(signature.openParenToken, updatedParams,
signature.closeParenToken, signature.returnTypeDesc);
}
private STNode getUpdatedParamList(STNode parameters, int index) {
int paramCount = parameters.bucketCount();
int newIndex = 0;
ArrayList<STNode> newParams = new ArrayList<>();
for (; newIndex < index; newIndex++) {
newParams.add(parameters.childInBucket(index));
}
for (; newIndex < paramCount; newIndex++) {
STNode param = parameters.childInBucket(newIndex);
STNode paramName = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
switch (param.kind) {
case REQUIRED_PARAM:
STRequiredParameterNode requiredParam = (STRequiredParameterNode) param;
if (isEmpty(requiredParam.paramName)) {
param = STNodeFactory.createRequiredParameterNode(requiredParam.leadingComma,
requiredParam.annotations, requiredParam.visibilityQualifier, requiredParam.typeName,
paramName);
}
break;
case DEFAULTABLE_PARAM:
STDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param;
if (isEmpty(defaultableParam.paramName)) {
param = STNodeFactory.createDefaultableParameterNode(defaultableParam.leadingComma,
defaultableParam.annotations, defaultableParam.visibilityQualifier,
defaultableParam.typeName, paramName, defaultableParam.equalsToken,
defaultableParam.expression);
}
break;
case REST_PARAM:
STRestParameterNode restParam = (STRestParameterNode) param;
if (isEmpty(restParam.paramName)) {
param = STNodeFactory.createRestParameterNode(restParam.leadingComma, restParam.annotations,
restParam.typeName, restParam.ellipsisToken, paramName);
}
break;
default:
break;
}
newParams.add(param);
}
return STNodeFactory.createNodeList(newParams);
}
private boolean isEmpty(STNode node) {
return node == null;
}
/**
* Parse function keyword. Need to validate the token before consuming,
* since we can reach here while recovering.
*
* @return Parsed node
*/
private STNode parseFunctionKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FUNCTION_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FUNCTION_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse function name.
*
* @return Parsed node
*/
private STNode parseFunctionName() {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FUNC_NAME);
return sol.recoveredNode;
}
}
/**
* Parse open parenthesis.
*
* @return Parsed node
*/
private STNode parseOpenParenthesis() {
STToken token = peek();
if (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.OPEN_PARENTHESIS);
return sol.recoveredNode;
}
}
/**
* Parse close parenthesis.
*
* @return Parsed node
*/
private STNode parseCloseParenthesis() {
STToken token = peek();
if (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CLOSE_PARENTHESIS);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse parameter list.
* </p>
* <code>
* param-list := required-params [, defaultable-params] [, rest-param]
* <br/> | defaultable-params [, rest-param]
* <br/> | [rest-param]
* <br/><br/>
* required-params := required-param (, required-param)*
* <br/><br/>
* required-param := [annots] [public] type-descriptor [param-name]
* <br/><br/>
* defaultable-params := defaultable-param (, defaultable-param)*
* <br/><br/>
* defaultable-param := [annots] [public] type-descriptor [param-name] default-value
* <br/><br/>
* rest-param := [annots] type-descriptor ... [param-name]
* <br/><br/>
* param-name := identifier
* </code>
*
* @param isParamNameOptional Whether the param names in the signature is optional or not.
* @return Parsed node
*/
private STNode parseParamList(boolean isParamNameOptional) {
startContext(ParserRuleContext.PARAM_LIST);
ArrayList<STNode> paramsList = new ArrayList<>();
STToken token = peek();
if (isEndOfParametersList(token.kind)) {
STNode params = STNodeFactory.createNodeList(paramsList);
return params;
}
STNode startingComma = STNodeFactory.createEmptyNode();
startContext(ParserRuleContext.REQUIRED_PARAM);
STNode firstParam = parseParameter(startingComma, SyntaxKind.REQUIRED_PARAM, isParamNameOptional);
SyntaxKind prevParamKind = firstParam.kind;
paramsList.add(firstParam);
token = peek();
while (!isEndOfParametersList(token.kind)) {
switch (prevParamKind) {
case REST_PARAM:
this.errorHandler.reportInvalidNode(token, "cannot have more parameters after the rest-parameter");
startContext(ParserRuleContext.REQUIRED_PARAM);
break;
case DEFAULTABLE_PARAM:
startContext(ParserRuleContext.DEFAULTABLE_PARAM);
break;
case REQUIRED_PARAM:
default:
startContext(ParserRuleContext.REQUIRED_PARAM);
break;
}
STNode leadingComma = parseComma();
STNode param = parseParameter(leadingComma, prevParamKind, isParamNameOptional);
prevParamKind = param.kind;
paramsList.add(param);
token = peek();
}
STNode params = STNodeFactory.createNodeList(paramsList);
return params;
}
/**
* Parse a single parameter. Parameter can be a required parameter, a defaultable
* parameter, or a rest parameter.
*
* @param prevParamKind Kind of the parameter that precedes current parameter
* @param leadingComma Comma that occurs before the param
* @param isParamNameOptional Whether the param names in the signature is optional or not.
* @return Parsed node
*/
private STNode parseParameter(STNode leadingComma, SyntaxKind prevParamKind, boolean isParamNameOptional) {
STToken token = peek();
return parseParameter(token.kind, prevParamKind, leadingComma, 1, isParamNameOptional);
}
private STNode parseParameter(SyntaxKind prevParamKind, STNode leadingComma, int nextTokenOffset,
boolean isParamNameOptional) {
return parseParameter(peek().kind, prevParamKind, leadingComma, nextTokenOffset, isParamNameOptional);
}
private STNode parseParameter(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma,
int nextTokenOffset, boolean isParamNameOptional) {
STNode annots;
switch (nextTokenKind) {
case AT_TOKEN:
annots = parseAnnotations(nextTokenKind);
nextTokenKind = peek().kind;
break;
case PUBLIC_KEYWORD:
case IDENTIFIER_TOKEN:
annots = STNodeFactory.createNodeList(new ArrayList<>());
break;
default:
if (nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN && isTypeStartingToken(nextTokenKind)) {
annots = STNodeFactory.createNodeList(new ArrayList<>());
break;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.PARAMETER_START, prevParamKind, leadingComma,
nextTokenOffset, isParamNameOptional);
if (solution.action == Action.KEEP) {
annots = STNodeFactory.createNodeList(new ArrayList<>());
break;
}
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseParameter(solution.tokenKind, prevParamKind, leadingComma, 0, isParamNameOptional);
}
return parseParamGivenAnnots(nextTokenKind, prevParamKind, leadingComma, annots, 1, isParamNameOptional);
}
private STNode parseParamGivenAnnots(SyntaxKind prevParamKind, STNode leadingComma, STNode annots,
int nextNextTokenOffset, boolean isFuncDef) {
return parseParamGivenAnnots(peek().kind, prevParamKind, leadingComma, annots, nextNextTokenOffset, isFuncDef);
}
private STNode parseParamGivenAnnots(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma,
STNode annots, int nextTokenOffset, boolean isParamNameOptional) {
STNode qualifier;
switch (nextTokenKind) {
case PUBLIC_KEYWORD:
qualifier = parseQualifier();
break;
case IDENTIFIER_TOKEN:
qualifier = STNodeFactory.createEmptyNode();
break;
case AT_TOKEN:
default:
if (isTypeStartingToken(nextTokenKind) && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN) {
qualifier = STNodeFactory.createEmptyNode();
break;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.PARAMETER_WITHOUT_ANNOTS, prevParamKind,
leadingComma, annots, nextTokenOffset, isParamNameOptional);
if (solution.action == Action.KEEP) {
qualifier = STNodeFactory.createEmptyNode();
break;
}
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseParamGivenAnnots(solution.tokenKind, prevParamKind, leadingComma, annots, 0,
isParamNameOptional);
}
return parseParamGivenAnnotsAndQualifier(prevParamKind, leadingComma, annots, qualifier, isParamNameOptional);
}
private STNode parseParamGivenAnnotsAndQualifier(SyntaxKind prevParamKind, STNode leadingComma, STNode annots,
STNode qualifier, boolean isParamNameOptional) {
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode param = parseAfterParamType(prevParamKind, leadingComma, annots, qualifier, type, isParamNameOptional);
endContext();
return param;
}
private STNode parseAfterParamType(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier,
STNode type, boolean isParamNameOptional) {
STToken token = peek();
return parseAfterParamType(token.kind, prevParamKind, leadingComma, annots, qualifier, type,
isParamNameOptional);
}
private STNode parseAfterParamType(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma,
STNode annots, STNode qualifier, STNode type, boolean isParamNameOptional) {
STNode paramName;
switch (tokenKind) {
case ELLIPSIS_TOKEN:
switchContext(ParserRuleContext.REST_PARAM);
reportInvalidQualifier(qualifier);
STNode ellipsis = parseEllipsis();
if (isParamNameOptional && peek().kind != SyntaxKind.IDENTIFIER_TOKEN) {
paramName = STNodeFactory.createEmptyNode();
} else {
paramName = parseVariableName();
}
return STNodeFactory.createRestParameterNode(leadingComma, annots, type, ellipsis, paramName);
case IDENTIFIER_TOKEN:
paramName = parseVariableName();
return parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);
case EQUAL_TOKEN:
if (!isParamNameOptional) {
break;
}
paramName = STNodeFactory.createEmptyNode();
return parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);
default:
if (!isParamNameOptional) {
break;
}
paramName = STNodeFactory.createEmptyNode();
return parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, prevParamKind, leadingComma, annots,
qualifier, type, isParamNameOptional);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseAfterParamType(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type,
isParamNameOptional);
}
/**
* Parse ellipsis.
*
* @return Parsed node
*/
private STNode parseEllipsis() {
STToken token = peek();
if (token.kind == SyntaxKind.ELLIPSIS_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.ELLIPSIS);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse the right hand side of a required/defaultable parameter.
* </p>
* <code>parameter-rhs := [= expression]</code>
*
* @param leadingComma Comma that precedes this parameter
* @param prevParamKind Kind of the parameter that precedes current parameter
* @param annots Annotations attached to the parameter
* @param qualifier Visibility qualifier
* @param type Type descriptor
* @param paramName Name of the parameter
* @return Parsed parameter node
*/
private STNode parseParameterRhs(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier,
STNode type, STNode paramName) {
STToken token = peek();
return parseParameterRhs(token.kind, prevParamKind, leadingComma, annots, qualifier, type, paramName);
}
private STNode parseParameterRhs(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma, STNode annots,
STNode qualifier, STNode type, STNode paramName) {
if (isEndOfParameter(tokenKind)) {
if (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM) {
this.errorHandler.reportInvalidNode(peek(),
"cannot have a required parameter after a defaultable parameter");
}
return STNodeFactory.createRequiredParameterNode(leadingComma, annots, qualifier, type, paramName);
} else if (tokenKind == SyntaxKind.EQUAL_TOKEN) {
if (prevParamKind == SyntaxKind.REQUIRED_PARAM) {
switchContext(ParserRuleContext.DEFAULTABLE_PARAM);
}
STNode equal = parseAssignOp();
STNode expr = parseExpression();
return STNodeFactory.createDefaultableParameterNode(leadingComma, annots, qualifier, type, paramName, equal,
expr);
} else {
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.PARAMETER_NAME_RHS, prevParamKind, leadingComma,
annots, qualifier, type, paramName);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseParameterRhs(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type,
paramName);
}
}
/**
* Parse comma.
*
* @return Parsed node
*/
private STNode parseComma() {
STToken token = peek();
if (token.kind == SyntaxKind.COMMA_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.COMMA);
return sol.recoveredNode;
}
}
/**
* Check whether the given token is an end of a parameter.
*
* @param tokenKind Next token kind
* @return <code>true</code> if the token represents an end of a parameter. <code>false</code> otherwise
*/
private boolean isEndOfParameter(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case COMMA_TOKEN:
case PUBLIC_KEYWORD:
case RETURNS_KEYWORD:
case TYPE_KEYWORD:
case LISTENER_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case AT_TOKEN:
return true;
default:
return false;
}
}
/**
* Check whether the given token is an end of a parameter-list.
*
* @param tokenKind Next token kind
* @return <code>true</code> if the token represents an end of a parameter-list. <code>false</code> otherwise
*/
private boolean isEndOfParametersList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case RETURNS_KEYWORD:
case TYPE_KEYWORD:
case LISTENER_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case OPEN_BRACE_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse return type descriptor of a function. A return type descriptor has the following structure.
*
* <code>return-type-descriptor := [ returns annots type-descriptor ]</code>
*
* @param isInExprContext
* @return Parsed node
*/
private STNode parseFuncReturnTypeDescriptor(boolean isInExprContext) {
return parseFuncReturnTypeDescriptor(peek().kind, isInExprContext);
}
private STNode parseFuncReturnTypeDescriptor(SyntaxKind nextTokenKind, boolean isInExprContext) {
switch (nextTokenKind) {
case IDENTIFIER_TOKEN:
STToken nextNExtToken = getNextNextToken(nextTokenKind);
if (nextNExtToken.kind == SyntaxKind.EQUAL_TOKEN || nextNExtToken.kind == SyntaxKind.SEMICOLON_TOKEN) {
return STNodeFactory.createEmptyNode();
}
break;
case OPEN_BRACE_TOKEN:
case EQUAL_TOKEN:
return STNodeFactory.createEmptyNode();
case RETURNS_KEYWORD:
break;
default:
nextNExtToken = getNextNextToken(nextTokenKind);
if (nextNExtToken.kind == SyntaxKind.RETURNS_KEYWORD) {
break;
}
return STNodeFactory.createEmptyNode();
}
STNode returnsKeyword = parseReturnsKeyword();
STNode annot = parseAnnotations();
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);
return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);
}
/**
* Parse 'returns' keyword.
*
* @return Return-keyword node
*/
private STNode parseReturnsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.RETURNS_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.RETURNS_KEYWORD);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse a type descriptor. A type descriptor has the following structure.
* </p>
* <code>type-descriptor :=
* simple-type-descriptor<br/>
* | structured-type-descriptor<br/>
* | behavioral-type-descriptor<br/>
* | singleton-type-descriptor<br/>
* | union-type-descriptor<br/>
* | optional-type-descriptor<br/>
* | any-type-descriptor<br/>
* | anydata-type-descriptor<br/>
* | byte-type-descriptor<br/>
* | json-type-descriptor<br/>
* | type-descriptor-reference<br/>
* | ( type-descriptor )
* <br/>
* type-descriptor-reference := qualified-identifier</code>
*
* @return Parsed node
*/
private STNode parseTypeDescriptor(ParserRuleContext context) {
startContext(context);
STNode typeDesc = parseTypeDescriptorInternal(context);
endContext();
return typeDesc;
}
private STNode parseTypeDescriptorInternal(ParserRuleContext context) {
STToken token = peek();
STNode typeDesc = parseTypeDescriptorInternal(token.kind, context);
return parseComplexTypeDescriptor(typeDesc, context);
}
/**
* This will handle the parsing of optional,array,union type desc to infinite length.
*
* @param typeDesc
*
* @return Parsed type descriptor node
*/
private STNode parseComplexTypeDescriptor(STNode typeDesc, ParserRuleContext context) {
STToken nextToken = peek();
switch (nextToken.kind) {
case QUESTION_MARK_TOKEN:
return parseComplexTypeDescriptor(parseOptionalTypeDescriptor(typeDesc), context);
case OPEN_BRACKET_TOKEN:
return parseComplexTypeDescriptor(parseArrayTypeDescriptor(typeDesc), context);
case PIPE_TOKEN:
return parseComplexTypeDescriptor(parseUnionTypeDescriptor(typeDesc, context), context);
default:
return typeDesc;
}
}
/**
* <p>
* Parse a type descriptor, given the next token kind.
* </p>
* If the preceding token is <code>?</code> then it is an optional type descriptor
*
* @param tokenKind Next token kind
* @param context Current context
* @return Parsed node
*/
private STNode parseTypeDescriptorInternal(SyntaxKind tokenKind, ParserRuleContext context) {
switch (tokenKind) {
case IDENTIFIER_TOKEN:
return parseTypeReference();
case RECORD_KEYWORD:
return parseRecordTypeDescriptor();
case OBJECT_KEYWORD:
case ABSTRACT_KEYWORD:
case CLIENT_KEYWORD:
return parseObjectTypeDescriptor();
case OPEN_PAREN_TOKEN:
if (getNextNextToken(tokenKind).kind == SyntaxKind.CLOSE_PAREN_TOKEN) {
return parseNilTypeDescriptor();
}
return parseParenthesisedTypeDesc();
case MAP_KEYWORD:
case FUTURE_KEYWORD:
case TYPEDESC_KEYWORD:
return parseParameterizedTypeDescriptor();
case ERROR_KEYWORD:
return parseErrorTypeDescriptor();
case STREAM_KEYWORD:
return parseStreamTypeDescriptor();
case TABLE_KEYWORD:
return parseTableTypeDescriptor();
case FUNCTION_KEYWORD:
return parseFunctionTypeDesc();
case OPEN_BRACKET_TOKEN:
return parseTupleTypeDesc();
case READONLY_KEYWORD:
return parseReadOnlyTypeDesc();
default:
if (isSimpleType(tokenKind)) {
return parseSimpleTypeDescriptor();
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.TYPE_DESCRIPTOR, context);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseTypeDescriptorInternal(solution.tokenKind, context);
}
}
/**
* Parse simple type descriptor.
*
* @return Parsed node
*/
private STNode parseSimpleTypeDescriptor() {
STToken node = peek();
if (isSimpleType(node.kind)) {
STToken token = consume();
SyntaxKind typeKind = getTypeSyntaxKind(token.kind);
return STNodeFactory.createBuiltinSimpleNameReferenceNode(typeKind, token);
} else {
Solution sol = recover(peek(), ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse function body. A function body has the following structure.
* </p>
* <code>
* function-body := function-body-block | external-function-body
* external-function-body := = annots external ;
* function-body-block := { [default-worker-init, named-worker-decl+] default-worker }
* </code>
*
* @return Parsed node
*/
private STNode parseFunctionBody() {
STToken token = peek();
return parseFunctionBody(token.kind);
}
/**
* Parse function body, given the next token kind.
*
* @param tokenKind Next token kind
* @return Parsed node
*/
protected STNode parseFunctionBody(SyntaxKind tokenKind) {
switch (tokenKind) {
case EQUAL_TOKEN:
return parseExternalFunctionBody();
case OPEN_BRACE_TOKEN:
return parseFunctionBodyBlock();
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.FUNC_BODY);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
if (solution.tokenKind == SyntaxKind.NONE) {
return STNodeFactory.createMissingToken(solution.tokenKind);
}
return parseFunctionBody(solution.tokenKind);
}
}
/**
* <p>
* Parse function body block. A function body block has the following structure.
* </p>
*
* <code>
* function-body-block := { [default-worker-init, named-worker-decl+] default-worker }<br/>
* default-worker-init := sequence-stmt<br/>
* default-worker := sequence-stmt<br/>
* named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }<br/>
* worker-name := identifier<br/>
* </code>
*
* @return Parsed node
*/
private STNode parseFunctionBodyBlock() {
startContext(ParserRuleContext.FUNC_BODY_BLOCK);
STNode openBrace = parseOpenBrace();
STToken token = peek();
ArrayList<STNode> firstStmtList = new ArrayList<>();
ArrayList<STNode> workers = new ArrayList<>();
ArrayList<STNode> secondStmtList = new ArrayList<>();
ParserRuleContext currentCtx = ParserRuleContext.DEFAULT_WORKER_INIT;
boolean hasNamedWorkers = false;
while (!isEndOfStatements(token.kind)) {
STNode stmt = parseStatement();
if (stmt == null) {
break;
}
switch (currentCtx) {
case DEFAULT_WORKER_INIT:
if (stmt.kind != SyntaxKind.NAMED_WORKER_DECLARATION) {
firstStmtList.add(stmt);
break;
}
currentCtx = ParserRuleContext.NAMED_WORKERS;
hasNamedWorkers = true;
case NAMED_WORKERS:
if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {
workers.add(stmt);
break;
}
currentCtx = ParserRuleContext.DEFAULT_WORKER;
case DEFAULT_WORKER:
default:
if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {
this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here");
break;
}
secondStmtList.add(stmt);
break;
}
token = peek();
}
STNode namedWorkersList;
STNode statements;
if (hasNamedWorkers) {
STNode workerInitStatements = STNodeFactory.createNodeList(firstStmtList);
STNode namedWorkers = STNodeFactory.createNodeList(workers);
namedWorkersList = STNodeFactory.createNamedWorkerDeclarator(workerInitStatements, namedWorkers);
statements = STNodeFactory.createNodeList(secondStmtList);
} else {
namedWorkersList = STNodeFactory.createEmptyNode();
statements = STNodeFactory.createNodeList(firstStmtList);
}
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createFunctionBodyBlockNode(openBrace, namedWorkersList, statements, closeBrace);
}
private boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
case TYPE_KEYWORD:
case PUBLIC_KEYWORD:
case LISTENER_KEYWORD:
case IMPORT_KEYWORD:
return true;
case SERVICE_KEYWORD:
return isServiceDeclStart(ParserRuleContext.RECORD_FIELD, 1);
default:
return false;
}
}
private boolean isEndOfObjectTypeNode(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
case IMPORT_KEYWORD:
return true;
case SERVICE_KEYWORD:
return isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1);
default:
return false;
}
}
/**
* Parse type reference or variable reference.
*
* @return Parsed node
*/
private STNode parseStatementStartIdentifier() {
return parseQualifiedIdentifier(ParserRuleContext.STATEMENT_START_IDENTIFIER);
}
/**
* Parse variable name.
*
* @return Parsed node
*/
private STNode parseVariableName() {
STToken token = peek();
return parseVariableName(token.kind);
}
/**
* Parse variable name.
*
* @return Parsed node
*/
private STNode parseVariableName(SyntaxKind tokenKind) {
if (tokenKind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.VARIABLE_NAME);
return sol.recoveredNode;
}
}
/**
* Parse open brace.
*
* @return Parsed node
*/
private STNode parseOpenBrace() {
STToken token = peek();
if (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.OPEN_BRACE);
return sol.recoveredNode;
}
}
/**
* Parse close brace.
*
* @return Parsed node
*/
private STNode parseCloseBrace() {
STToken token = peek();
if (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CLOSE_BRACE);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse external function body. An external function body has the following structure.
* </p>
* <code>
* external-function-body := = annots external ;
* </code>
*
* @return Parsed node
*/
private STNode parseExternalFunctionBody() {
startContext(ParserRuleContext.EXTERNAL_FUNC_BODY);
STNode assign = parseAssignOp();
STNode annotation = parseAnnotations();
STNode externalKeyword = parseExternalKeyword();
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createExternalFunctionBodyNode(assign, annotation, externalKeyword, semicolon);
}
/**
* Parse semicolon.
*
* @return Parsed node
*/
private STNode parseSemicolon() {
STToken token = peek();
if (token.kind == SyntaxKind.SEMICOLON_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.SEMICOLON);
return sol.recoveredNode;
}
}
/**
* Parse <code>external</code> keyword.
*
* @return Parsed node
*/
private STNode parseExternalKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.EXTERNAL_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.EXTERNAL_KEYWORD);
return sol.recoveredNode;
}
}
/*
* Operators
*/
/**
* Parse assign operator.
*
* @return Parsed node
*/
private STNode parseAssignOp() {
STToken token = peek();
if (token.kind == SyntaxKind.EQUAL_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.ASSIGN_OP);
return sol.recoveredNode;
}
}
/**
* Parse binary operator.
*
* @return Parsed node
*/
private STNode parseBinaryOperator() {
STToken token = peek();
if (isBinaryOperator(token.kind)) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.BINARY_OPERATOR);
return sol.recoveredNode;
}
}
/**
* Check whether the given token kind is a binary operator.
*
* @param kind STToken kind
* @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise
*/
private boolean isBinaryOperator(SyntaxKind kind) {
switch (kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case SLASH_TOKEN:
case ASTERISK_TOKEN:
case GT_TOKEN:
case LT_TOKEN:
case DOUBLE_EQUAL_TOKEN:
case TRIPPLE_EQUAL_TOKEN:
case LT_EQUAL_TOKEN:
case GT_EQUAL_TOKEN:
case NOT_EQUAL_TOKEN:
case NOT_DOUBLE_EQUAL_TOKEN:
case BITWISE_AND_TOKEN:
case BITWISE_XOR_TOKEN:
case PIPE_TOKEN:
case LOGICAL_AND_TOKEN:
case LOGICAL_OR_TOKEN:
case DOUBLE_LT_TOKEN:
case DOUBLE_GT_TOKEN:
case TRIPPLE_GT_TOKEN:
case ELLIPSIS_TOKEN:
case DOUBLE_DOT_LT_TOKEN:
return true;
default:
return false;
}
}
/**
* Get the precedence of a given operator.
*
* @param binaryOpKind Operator kind
* @return Precedence of the given operator
*/
private OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) {
switch (binaryOpKind) {
case ASTERISK_TOKEN:
case SLASH_TOKEN:
return OperatorPrecedence.MULTIPLICATIVE;
case PLUS_TOKEN:
case MINUS_TOKEN:
return OperatorPrecedence.ADDITIVE;
case GT_TOKEN:
case LT_TOKEN:
case GT_EQUAL_TOKEN:
case LT_EQUAL_TOKEN:
case IS_KEYWORD:
return OperatorPrecedence.BINARY_COMPARE;
case DOT_TOKEN:
case OPEN_BRACKET_TOKEN:
case OPEN_PAREN_TOKEN:
return OperatorPrecedence.MEMBER_ACCESS;
case DOUBLE_EQUAL_TOKEN:
case TRIPPLE_EQUAL_TOKEN:
case NOT_EQUAL_TOKEN:
case NOT_DOUBLE_EQUAL_TOKEN:
return OperatorPrecedence.EQUALITY;
case BITWISE_AND_TOKEN:
return OperatorPrecedence.BITWISE_AND;
case BITWISE_XOR_TOKEN:
return OperatorPrecedence.BITWISE_XOR;
case PIPE_TOKEN:
return OperatorPrecedence.BITWISE_OR;
case LOGICAL_AND_TOKEN:
return OperatorPrecedence.LOGICAL_AND;
case LOGICAL_OR_TOKEN:
return OperatorPrecedence.LOGICAL_OR;
case RIGHT_ARROW_TOKEN:
return OperatorPrecedence.ACTION;
case DOUBLE_LT_TOKEN:
case DOUBLE_GT_TOKEN:
case TRIPPLE_GT_TOKEN:
return OperatorPrecedence.SHIFT;
case ELLIPSIS_TOKEN:
case DOUBLE_DOT_LT_TOKEN:
return OperatorPrecedence.RANGE;
default:
throw new UnsupportedOperationException("Unsupported binary operator '" + binaryOpKind + "'");
}
}
/**
* <p>
* Get the operator kind to insert during recovery, given the precedence level.
* </p>
*
* @param opPrecedenceLevel Precedence of the given operator
* @return Kind of the operator to insert
*/
private SyntaxKind getBinaryOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) {
switch (opPrecedenceLevel) {
case UNARY:
case ACTION:
case MULTIPLICATIVE:
return SyntaxKind.ASTERISK_TOKEN;
case ADDITIVE:
return SyntaxKind.PLUS_TOKEN;
case SHIFT:
return SyntaxKind.DOUBLE_LT_TOKEN;
case RANGE:
return SyntaxKind.ELLIPSIS_TOKEN;
case BINARY_COMPARE:
return SyntaxKind.LT_TOKEN;
case EQUALITY:
return SyntaxKind.DOUBLE_EQUAL_TOKEN;
case BITWISE_AND:
return SyntaxKind.BITWISE_AND_TOKEN;
case BITWISE_XOR:
return SyntaxKind.BITWISE_XOR_TOKEN;
case BITWISE_OR:
return SyntaxKind.PIPE_TOKEN;
case LOGICAL_AND:
return SyntaxKind.LOGICAL_AND_TOKEN;
case LOGICAL_OR:
return SyntaxKind.LOGICAL_OR_TOKEN;
default:
throw new UnsupportedOperationException(
"Unsupported operator precedence level'" + opPrecedenceLevel + "'");
}
}
/**
* <p>
* Parse a module type definition.
* </p>
* <code>module-type-defn := metadata [public] type identifier type-descriptor ;</code>
*
* @param metadata Metadata
* @param qualifier Visibility qualifier
* @return Parsed node
*/
private STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.MODULE_TYPE_DEFINITION);
STNode typeKeyword = parseTypeKeyword();
STNode typeName = parseTypeName();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor,
semicolon);
}
/**
* Parse type keyword.
*
* @return Parsed node
*/
private STNode parseTypeKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TYPE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.TYPE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse type name.
*
* @return Parsed node
*/
private STNode parseTypeName() {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.TYPE_NAME);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse record type descriptor. A record type descriptor body has the following structure.
* </p>
*
* <code>record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor
* <br/><br/>inclusive-record-type-descriptor := record { field-descriptor* }
* <br/><br/>exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |}
* </code>
*
* @return Parsed node
*/
private STNode parseRecordTypeDescriptor() {
startContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR);
STNode recordKeyword = parseRecordKeyword();
STNode bodyStartDelimiter = parseRecordBodyStartDelimiter();
boolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN;
STNode fields = parseFieldDescriptors(isInclusive);
STNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(bodyStartDelimiter.kind);
endContext();
return STNodeFactory.createRecordTypeDescriptorNode(recordKeyword, bodyStartDelimiter, fields,
bodyEndDelimiter);
}
/**
* Parse record body start delimiter.
*
* @return Parsed node
*/
private STNode parseRecordBodyStartDelimiter() {
STToken token = peek();
return parseRecordBodyStartDelimiter(token.kind);
}
private STNode parseRecordBodyStartDelimiter(SyntaxKind kind) {
switch (kind) {
case OPEN_BRACE_PIPE_TOKEN:
return parseClosedRecordBodyStart();
case OPEN_BRACE_TOKEN:
return parseOpenBrace();
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.RECORD_BODY_START);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseRecordBodyStartDelimiter(solution.tokenKind);
}
}
/**
* Parse closed-record body start delimiter.
*
* @return Parsed node
*/
private STNode parseClosedRecordBodyStart() {
STToken token = peek();
if (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START);
return sol.recoveredNode;
}
}
/**
* Parse record body close delimiter.
*
* @return Parsed node
*/
private STNode parseRecordBodyCloseDelimiter(SyntaxKind startingDelimeter) {
switch (startingDelimeter) {
case OPEN_BRACE_PIPE_TOKEN:
return parseClosedRecordBodyEnd();
case OPEN_BRACE_TOKEN:
return parseCloseBrace();
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.RECORD_BODY_END);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseRecordBodyCloseDelimiter(solution.tokenKind);
}
}
/**
* Parse closed-record body end delimiter.
*
* @return Parsed node
*/
private STNode parseClosedRecordBodyEnd() {
STToken token = peek();
if (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END);
return sol.recoveredNode;
}
}
/**
* Parse record keyword.
*
* @return Parsed node
*/
private STNode parseRecordKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.RECORD_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.RECORD_KEYWORD);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse field descriptors.
* </p>
*
* @return Parsed node
*/
private STNode parseFieldDescriptors(boolean isInclusive) {
ArrayList<STNode> recordFields = new ArrayList<>();
STToken token = peek();
boolean endOfFields = false;
while (!isEndOfRecordTypeNode(token.kind)) {
STNode field = parseFieldOrRestDescriptor(isInclusive);
if (field == null) {
endOfFields = true;
break;
}
recordFields.add(field);
token = peek();
if (field.kind == SyntaxKind.RECORD_REST_TYPE) {
break;
}
}
while (!endOfFields && !isEndOfRecordTypeNode(token.kind)) {
parseFieldOrRestDescriptor(isInclusive);
this.errorHandler.reportInvalidNode(token, "cannot have more fields after the rest type descriptor");
token = peek();
}
return STNodeFactory.createNodeList(recordFields);
}
/**
* <p>
* Parse field descriptor or rest descriptor.
* </p>
*
* <code>
* <br/><br/>field-descriptor := individual-field-descriptor | record-type-reference
* <br/><br/><br/>individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ;
* <br/><br/>field-name := identifier
* <br/><br/>default-value := = expression
* <br/><br/>record-type-reference := * type-reference ;
* <br/><br/>record-rest-descriptor := type-descriptor ... ;
* </code>
*
* @return Parsed node
*/
private STNode parseFieldOrRestDescriptor(boolean isInclusive) {
return parseFieldOrRestDescriptor(peek().kind, isInclusive);
}
private STNode parseFieldOrRestDescriptor(SyntaxKind nextTokenKind, boolean isInclusive) {
switch (nextTokenKind) {
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
return null;
case ASTERISK_TOKEN:
startContext(ParserRuleContext.RECORD_FIELD);
STNode asterisk = consume();
STNode type = parseTypeReference();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken);
case AT_TOKEN:
startContext(ParserRuleContext.RECORD_FIELD);
STNode metadata = parseMetaData(nextTokenKind);
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);
STNode fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata);
endContext();
return fieldOrRestDesc;
default:
if (isTypeStartingToken(nextTokenKind)) {
startContext(ParserRuleContext.RECORD_FIELD);
metadata = createEmptyMetadata();
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);
fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata);
endContext();
return fieldOrRestDesc;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.RECORD_FIELD_OR_RECORD_END, isInclusive);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseFieldOrRestDescriptor(solution.tokenKind, isInclusive);
}
}
private STNode parseFieldDescriptor(boolean isInclusive, STNode type, STNode metadata) {
if (isInclusive) {
STNode fieldName = parseVariableName();
return parseFieldDescriptorRhs(metadata, type, fieldName);
} else {
return parseFieldOrRestDescriptorRhs(metadata, type);
}
}
/**
* Parse type reference.
* <code>type-reference := identifier | qualified-identifier</code>
*
* @return Type reference node
*/
private STNode parseTypeReference() {
return parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE);
}
/**
* Parse identifier or qualified identifier.
*
* @return Identifier node
*/
private STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
STNode typeRefOrPkgRef = consume();
return parseQualifiedIdentifier(typeRefOrPkgRef);
} else {
Solution sol = recover(token, currentCtx);
return sol.recoveredNode;
}
}
/**
* Parse identifier or qualified identifier, given the starting identifier.
*
* @param identifier Starting identifier
* @return Parse node
*/
private STNode parseQualifiedIdentifier(STNode identifier) {
STToken nextToken = peek(1);
if (nextToken.kind != SyntaxKind.COLON_TOKEN) {
return STNodeFactory.createSimpleNameReferenceNode(identifier);
}
STToken nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
STToken colon = consume();
STToken varOrFuncName = consume();
return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName);
} else {
this.errorHandler.removeInvalidToken();
return parseQualifiedIdentifier(identifier);
}
}
/**
* Parse RHS of a field or rest type descriptor.
*
* @param metadata Metadata
* @param type Type descriptor
* @return Parsed node
*/
private STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) {
STToken token = peek();
return parseFieldOrRestDescriptorRhs(token.kind, metadata, type);
}
private STNode parseFieldOrRestDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type) {
switch (kind) {
case ELLIPSIS_TOKEN:
STNode ellipsis = parseEllipsis();
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createRecordRestDescriptorNode(type, ellipsis, semicolonToken);
case IDENTIFIER_TOKEN:
STNode fieldName = parseVariableName();
return parseFieldDescriptorRhs(metadata, type, fieldName);
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseFieldOrRestDescriptorRhs(solution.tokenKind, metadata, type);
}
}
/**
* <p>
* Parse field descriptor rhs.
* </p>
*
* @param metadata Metadata
* @param type Type descriptor
* @param fieldName Field name
* @return Parsed node
*/
private STNode parseFieldDescriptorRhs(STNode metadata, STNode type, STNode fieldName) {
STToken token = peek();
return parseFieldDescriptorRhs(token.kind, metadata, type, fieldName);
}
/**
* <p>
* Parse field descriptor rhs.
* </p>
*
* <code>
* field-descriptor := [? | default-value] ;
* <br/>default-value := = expression
* </code>
*
* @param kind Kind of the next token
* @param metadata Metadata
* @param type Type descriptor
* @param fieldName Field name
* @return Parsed node
*/
private STNode parseFieldDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type, STNode fieldName) {
switch (kind) {
case SEMICOLON_TOKEN:
STNode questionMarkToken = STNodeFactory.createEmptyNode();
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken,
semicolonToken);
case QUESTION_MARK_TOKEN:
questionMarkToken = parseQuestionMark();
semicolonToken = parseSemicolon();
return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken,
semicolonToken);
case EQUAL_TOKEN:
STNode equalsToken = parseAssignOp();
STNode expression = parseExpression();
semicolonToken = parseSemicolon();
return STNodeFactory.createRecordFieldWithDefaultValueNode(metadata, type, fieldName, equalsToken,
expression, semicolonToken);
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, type, fieldName);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseFieldDescriptorRhs(solution.tokenKind, metadata, type, fieldName);
}
}
/**
* Parse question mark.
*
* @return Parsed node
*/
private STNode parseQuestionMark() {
STToken token = peek();
if (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.QUESTION_MARK);
return sol.recoveredNode;
}
}
/*
* Statements
*/
/**
* Parse statements, until an end of a block is reached.
*
* @return Parsed node
*/
private STNode parseStatements() {
STToken token = peek();
ArrayList<STNode> stmts = new ArrayList<>();
while (!isEndOfStatements(token.kind)) {
STNode stmt = parseStatement();
if (stmt == null) {
break;
}
if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {
this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here");
break;
}
stmts.add(stmt);
token = peek();
}
return STNodeFactory.createNodeList(stmts);
}
private boolean isEndOfStatements(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
case SERVICE_KEYWORD:
return isServiceDeclStart(ParserRuleContext.STATEMENT, 1);
default:
return false;
}
}
/**
* Parse a single statement.
*
* @return Parsed node
*/
protected STNode parseStatement() {
STToken token = peek();
return parseStatement(token.kind);
}
private STNode parseStatement(SyntaxKind tokenKind) {
STNode annots = null;
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
return null;
case SEMICOLON_TOKEN:
this.errorHandler.removeInvalidToken();
return parseStatement();
case AT_TOKEN:
annots = parseAnnotations(tokenKind);
tokenKind = peek().kind;
break;
case FINAL_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case PANIC_KEYWORD:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case CONTINUE_KEYWORD:
case BREAK_KEYWORD:
case RETURN_KEYWORD:
case TYPE_KEYWORD:
case LOCK_KEYWORD:
case OPEN_BRACE_TOKEN:
case FORK_KEYWORD:
case FOREACH_KEYWORD:
case WORKER_KEYWORD:
break;
default:
if (isTypeStartingToken(tokenKind)) {
break;
}
if (isValidLHSExpression(tokenKind)) {
break;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.STATEMENT);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseStatement(solution.tokenKind);
}
return parseStatement(tokenKind, annots);
}
private STNode getAnnotations(STNode nullbaleAnnot) {
if (nullbaleAnnot != null) {
return nullbaleAnnot;
}
return STNodeFactory.createNodeList(new ArrayList<>());
}
private STNode parseStatement(STNode annots) {
return parseStatement(peek().kind, annots);
}
/**
* Parse a single statement, given the next token kind.
*
* @param tokenKind Next token kind
* @return Parsed node
*/
private STNode parseStatement(SyntaxKind tokenKind, STNode annots) {
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
this.errorHandler.reportInvalidNode(null, "invalid annotations");
return null;
case SEMICOLON_TOKEN:
this.errorHandler.removeInvalidToken();
return parseStatement(tokenKind, annots);
case FINAL_KEYWORD:
STNode finalKeyword = parseFinalKeyword();
return parseVariableDecl(getAnnotations(annots), finalKeyword, false);
case IF_KEYWORD:
return parseIfElseBlock();
case WHILE_KEYWORD:
return parseWhileStatement();
case PANIC_KEYWORD:
return parsePanicStatement();
case CONTINUE_KEYWORD:
return parseContinueStatement();
case BREAK_KEYWORD:
return parseBreakStatement();
case RETURN_KEYWORD:
return parseReturnStatement();
case TYPE_KEYWORD:
return parseLocalTypeDefinitionStatement(getAnnotations(annots));
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
return parseStamentStartsWithExpr(tokenKind, getAnnotations(annots));
case IDENTIFIER_TOKEN:
return parseStatementStartsWithIdentifier(getAnnotations(annots));
case LOCK_KEYWORD:
return parseLockStatement();
case OPEN_BRACE_TOKEN:
return parseBlockNode();
case WORKER_KEYWORD:
return parseNamedWorkerDeclaration(getAnnotations(annots));
case FORK_KEYWORD:
return parseForkStatement();
case FOREACH_KEYWORD:
return parseForEachStatement();
default:
if (isTypeStartingToken(tokenKind)) {
finalKeyword = STNodeFactory.createEmptyNode();
return parseVariableDecl(getAnnotations(annots), finalKeyword, false);
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseStatement(solution.tokenKind, annots);
}
}
/**
* <p>
* Parse variable declaration. Variable declaration can be a local or module level.
* </p>
*
* <code>
* local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt
* <br/><br/>
* local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ;
* <br/><br/>
* local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ;
* </code>
*
* @param annots Annotations or metadata
* @param finalKeyword Final keyword
* @return Parsed node
*/
private STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) {
startContext(ParserRuleContext.VAR_DECL_STMT);
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
STNode varName = parseVariableName();
STNode varDecl = parseVarDeclRhs(annots, finalKeyword, type, varName, isModuleVar);
endContext();
return varDecl;
}
/**
* Parse final keyword.
*
* @return Parsed node
*/
private STNode parseFinalKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FINAL_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FINAL_KEYWORD);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse the right hand side of a variable declaration statement.
* </p>
* <code>
* var-decl-rhs := ; | = action-or-expr ;
* </code>
*
* @param metadata metadata
* @param finalKeyword Final keyword
* @param type Type descriptor
* @param varName Variable name
* @return Parsed node
*/
private STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode type, STNode varName,
boolean isModuleVar) {
STToken token = peek();
return parseVarDeclRhs(token.kind, metadata, finalKeyword, type, varName, isModuleVar);
}
/**
* Parse the right hand side of a variable declaration statement, given the
* next token kind.
*
* @param tokenKind Next token kind
* @param metadata Metadata
* @param finalKeyword Final keyword
* @param type Type descriptor
* @param varName Variable name
* @param isModuleVar flag indicating whether the var is module level
* @return Parsed node
*/
private STNode parseVarDeclRhs(SyntaxKind tokenKind, STNode metadata, STNode finalKeyword, STNode type,
STNode varName, boolean isModuleVar) {
STNode assign;
STNode expr;
STNode semicolon;
switch (tokenKind) {
case EQUAL_TOKEN:
assign = parseAssignOp();
if (isModuleVar) {
expr = parseExpression();
} else {
expr = parseActionOrExpression();
}
semicolon = parseSemicolon();
break;
case SEMICOLON_TOKEN:
if (isModuleVar) {
this.errorHandler.reportMissingTokenError("assignment required");
}
assign = STNodeFactory.createEmptyNode();
expr = STNodeFactory.createEmptyNode();
semicolon = parseSemicolon();
break;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword, type,
varName, isModuleVar);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseVarDeclRhs(solution.tokenKind, metadata, finalKeyword, type, varName, isModuleVar);
}
if (isModuleVar) {
return STNodeFactory.createModuleVariableDeclarationNode(metadata, finalKeyword, type, varName, assign,
expr, semicolon);
}
return STNodeFactory.createVariableDeclarationNode(metadata, finalKeyword, type, varName, assign, expr,
semicolon);
}
/**
* <p>
* Parse the RHS portion of the assignment.
* </p>
* <code>assignment-stmt-rhs := = action-or-expr ;</code>
*
* @param lvExpr LHS expression
* @return Parsed node
*/
private STNode parseAssignmentStmtRhs(STNode lvExpr) {
validateLVExpr(lvExpr);
STNode assign = parseAssignOp();
STNode expr = parseActionOrExpression();
STNode semicolon = parseSemicolon();
return STNodeFactory.createAssignmentStatementNode(lvExpr, assign, expr, semicolon);
}
/*
* Expressions
*/
/**
* Parse expression. This will start parsing expressions from the lowest level of precedence.
*
* @return Parsed node
*/
protected STNode parseExpression() {
return parseExpression(DEFAULT_OP_PRECEDENCE, true, false);
}
/**
* Parse action or expression. This will start parsing actions or expressions from the lowest level of precedence.
*
* @return Parsed node
*/
private STNode parseActionOrExpression() {
return parseExpression(DEFAULT_OP_PRECEDENCE, true, true);
}
private STNode parseActionOrExpression(SyntaxKind tokenKind) {
return parseExpression(tokenKind, DEFAULT_OP_PRECEDENCE, true, true);
}
private STNode parseActionOrExpression(boolean isRhsExpr) {
return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, true);
}
/**
* Parse expression.
*
* @param isRhsExpr Flag indicating whether this is a rhs expression
* @return Parsed node
*/
private STNode parseExpression(boolean isRhsExpr) {
return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, false);
}
private void validateLVExpr(STNode expression) {
if (isValidLVExpr(expression)) {
return;
}
this.errorHandler.reportInvalidNode(null, "invalid expression for assignment lhs");
}
private boolean isValidLVExpr(STNode expression) {
switch (expression.kind) {
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
return true;
case FIELD_ACCESS:
return isValidLVExpr(((STFieldAccessExpressionNode) expression).expression);
case INDEXED_EXPRESSION:
return isValidLVExpr(((STIndexedExpressionNode) expression).containerExpression);
default:
return (expression instanceof STMissingToken);
}
}
/**
* Parse an expression that has an equal or higher precedence than a given level.
*
* @param precedenceLevel Precedence level of expression to be parsed
* @param isRhsExpr Flag indicating whether this is a rhs expression
* @param allowActions Flag indicating whether the current context support actions
* @return Parsed node
*/
private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) {
STToken token = peek();
return parseExpression(token.kind, precedenceLevel, isRhsExpr, allowActions);
}
private STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, boolean isRhsExpr,
boolean allowActions) {
STNode expr = parseTerminalExpression(kind, isRhsExpr, allowActions);
return parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions);
}
/**
* Parse terminal expressions. A terminal expression has the highest precedence level
* out of all expressions, and will be at the leaves of an expression tree.
*
* @param isRhsExpr Is a rhs expression
* @param allowActions Allow actions
* @return Parsed node
*/
private STNode parseTerminalExpression(boolean isRhsExpr, boolean allowActions) {
return parseTerminalExpression(peek().kind, isRhsExpr, allowActions);
}
private STNode parseTerminalExpression(SyntaxKind kind, boolean isRhsExpr, boolean allowActions) {
switch (kind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case STRING_LITERAL:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
return parseBasicLiteral();
case IDENTIFIER_TOKEN:
return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
case OPEN_PAREN_TOKEN:
STToken nextNextToken = getNextNextToken(kind);
if (nextNextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {
return parseNilLiteral();
}
return parseBracedExpression(isRhsExpr, allowActions);
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
return parseCheckExpression(isRhsExpr, allowActions);
case OPEN_BRACE_TOKEN:
return parseMappingConstructorExpr();
case TYPEOF_KEYWORD:
return parseTypeofExpression(isRhsExpr);
case PLUS_TOKEN:
case MINUS_TOKEN:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
return parseUnaryExpression(isRhsExpr);
case TRAP_KEYWORD:
return parseTrapExpression(isRhsExpr);
case OPEN_BRACKET_TOKEN:
return parseListConstructorExpr();
case LT_TOKEN:
return parseTypeCastExpr(isRhsExpr);
case TABLE_KEYWORD:
case STREAM_KEYWORD:
case FROM_KEYWORD:
return parseTableConstructorOrQuery(isRhsExpr);
case ERROR_KEYWORD:
return parseErrorConstructorExpr();
case LET_KEYWORD:
return parseLetExpression(isRhsExpr);
case BACKTICK_TOKEN:
return parseTemplateExpression();
case XML_KEYWORD:
nextNextToken = getNextNextToken(kind);
if (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) {
return parseXMLTemplateExpression();
}
break;
case STRING_KEYWORD:
nextNextToken = getNextNextToken(kind);
if (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) {
return parseStringTemplateExpression();
}
break;
case FUNCTION_KEYWORD:
return parseFunctionExpression(null);
case AT_TOKEN:
break;
case NEW_KEYWORD:
return parseNewExpression();
default:
break;
}
Solution solution = recover(peek(), ParserRuleContext.TERMINAL_EXPRESSION, isRhsExpr, allowActions);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
if (solution.action == Action.KEEP) {
if (kind == SyntaxKind.XML_KEYWORD) {
return parseXMLTemplateExpression();
}
return parseStringTemplateExpression();
}
switch (solution.tokenKind) {
case IDENTIFIER_TOKEN:
this.errorHandler.reportMissingTokenError("missing " + solution.recoveredNode);
return parseQualifiedIdentifier(solution.recoveredNode);
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case STRING_LITERAL:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
this.errorHandler.reportMissingTokenError("missing " + solution.recoveredNode);
return solution.recoveredNode;
default:
return parseTerminalExpression(solution.tokenKind, isRhsExpr, allowActions);
}
}
private STNode parseActionOrExpressionInLhs(SyntaxKind nextTokenKind, STNode lhsExpr) {
return parseExpressionRhs(nextTokenKind, DEFAULT_OP_PRECEDENCE, lhsExpr, false, true);
}
/**
* <p>
* Parse a new expression.
* </p>
* <code>
* new-expr := explicit-new-expr | implicit-new-expr
* explicit-new-expr := new type-descriptor ( arg-list )
* implicit-new-expr := new [( arg-list )]
* </code>
*
* @return Parsed NewExpression node.
*/
private STNode parseNewExpression() {
STNode newKeyword = parseNewKeyword();
return parseNewKeywordRhs(newKeyword);
}
/**
* <p>
* Parse `new` keyword.
* </p>
*
* @return Parsed NEW_KEYWORD Token.
*/
private STNode parseNewKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.NEW_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.NEW_KEYWORD);
return sol.recoveredNode;
}
}
private STNode parseNewKeywordRhs(STNode newKeyword) {
STNode token = peek();
return parseNewKeywordRhs(token.kind, newKeyword);
}
/**
* <p>
* Parse an implicit or explicit expression.
* </p>
* @param kind next token kind.
* @param newKeyword parsed node for `new` keyword.
* @return Parsed new-expression node.
*/
private STNode parseNewKeywordRhs(SyntaxKind kind, STNode newKeyword) {
switch (kind) {
case OPEN_PAREN_TOKEN:
return parseImplicitNewRhs(newKeyword);
case SEMICOLON_TOKEN:
break;
case IDENTIFIER_TOKEN:
case OBJECT_KEYWORD:
return parseTypeDescriptorInNewExpr(newKeyword);
default:
break;
}
return STNodeFactory.createImplicitNewExpressionNode(newKeyword, STNodeFactory.createEmptyNode());
}
/**
* <p>
* Parse an Explicit New expression.
* </p>
* <code>
* explicit-new-expr := new type-descriptor ( arg-list )
* </code>
*
* @param newKeyword Parsed `new` keyword.
* @return the Parsed Explicit New Expression.
*/
private STNode parseTypeDescriptorInNewExpr(STNode newKeyword) {
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_NEW_EXPR);
STNode parenthesizedArgsList = parseParenthesizedArgList();
return STNodeFactory.createExplicitNewExpressionNode(newKeyword, typeDescriptor, parenthesizedArgsList);
}
/**
* <p>
* Parse an <code>implicit-new-expr</code> with arguments.
* </p>
*
* @param newKeyword Parsed `new` keyword.
* @return Parsed implicit-new-expr.
*/
private STNode parseImplicitNewRhs(STNode newKeyword) {
STNode implicitNewArgList = parseParenthesizedArgList();
return STNodeFactory.createImplicitNewExpressionNode(newKeyword, implicitNewArgList);
}
/**
* <p>
* Parse the parenthesized argument list for a <code>new-expr</code>.
* </p>
*
* @return Parsed parenthesized rhs of <code>new-expr</code>.
*/
private STNode parseParenthesizedArgList() {
STNode openParan = parseOpenParenthesis();
STNode arguments = parseArgsList();
STNode closeParan = parseCloseParenthesis();
return STNodeFactory.createParenthesizedArgList(openParan, arguments, closeParan);
}
/**
* <p>
* Parse the right-hand-side of an expression.
* </p>
* <code>expr-rhs := (binary-op expression
* | dot identifier
* | open-bracket expression close-bracket
* )*</code>
*
* @param precedenceLevel Precedence level of the expression that is being parsed currently
* @param lhsExpr LHS expression of the expression
* @param isLVExpr Flag indicating whether this is on a lhsExpr of a statement
* @param allowActions Flag indicating whether the current context support actions
* @return Parsed node
*/
private STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isLVExpr,
boolean allowActions) {
STToken token = peek();
return parseExpressionRhs(token.kind, precedenceLevel, lhsExpr, isLVExpr, allowActions);
}
/**
* Parse the right hand side of an expression given the next token kind.
*
* @param tokenKind Next token kind
* @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently
* @param lhsExpr LHS expression
* @param isRhsExpr Flag indicating whether this is a rhs expr or not
* @param allowActions Flag indicating whether to allow actions or not
* @return Parsed node
*/
private STNode parseExpressionRhs(SyntaxKind tokenKind, OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr,
boolean isRhsExpr, boolean allowActions) {
if (isEndOfExpression(tokenKind, isRhsExpr)) {
return lhsExpr;
}
if (!isValidExprRhsStart(tokenKind)) {
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, currentPrecedenceLevel, lhsExpr,
isRhsExpr, allowActions);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
if (solution.ctx == ParserRuleContext.BINARY_OPERATOR) {
SyntaxKind binaryOpKind = getBinaryOperatorKindToInsert(currentPrecedenceLevel);
return parseExpressionRhs(binaryOpKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions);
} else {
return parseExpressionRhs(solution.tokenKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions);
}
}
if (tokenKind == SyntaxKind.LT_TOKEN && peek(2).kind == SyntaxKind.LT_TOKEN) {
tokenKind = SyntaxKind.DOUBLE_LT_TOKEN;
} else if (tokenKind == SyntaxKind.GT_TOKEN && peek(2).kind == SyntaxKind.GT_TOKEN) {
if (peek(3).kind == SyntaxKind.GT_TOKEN) {
tokenKind = SyntaxKind.TRIPPLE_GT_TOKEN;
} else {
tokenKind = SyntaxKind.DOUBLE_GT_TOKEN;
}
}
OperatorPrecedence nextOperatorPrecedence = getOpPrecedence(tokenKind);
if (currentPrecedenceLevel.isHigherThan(nextOperatorPrecedence)) {
return lhsExpr;
}
STNode newLhsExpr;
STNode operator;
switch (tokenKind) {
case OPEN_PAREN_TOKEN:
newLhsExpr = parseFuncCall(lhsExpr);
break;
case OPEN_BRACKET_TOKEN:
newLhsExpr = parseMemberAccessExpr(lhsExpr);
break;
case DOT_TOKEN:
newLhsExpr = parseFieldAccessOrMethodCall(lhsExpr);
break;
case IS_KEYWORD:
newLhsExpr = parseTypeTestExpression(lhsExpr);
break;
case RIGHT_ARROW_TOKEN:
newLhsExpr = parseAction(tokenKind, lhsExpr);
if (!allowActions) {
this.errorHandler.reportInvalidNode(null, "actions are not allowed here");
}
break;
default:
if (tokenKind == SyntaxKind.DOUBLE_LT_TOKEN) {
operator = parseDoubleLTToken();
} else if (tokenKind == SyntaxKind.DOUBLE_GT_TOKEN) {
operator = parseDoubleGTToken();
} else if (tokenKind == SyntaxKind.TRIPPLE_GT_TOKEN) {
operator = parseTrippleGTToken();
} else {
operator = parseBinaryOperator();
}
STNode rhsExpr = parseExpression(nextOperatorPrecedence, isRhsExpr, false);
newLhsExpr = STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator,
rhsExpr);
break;
}
return parseExpressionRhs(currentPrecedenceLevel, newLhsExpr, isRhsExpr, allowActions);
}
private boolean isValidExprRhsStart(SyntaxKind tokenKind) {
switch (tokenKind) {
case OPEN_PAREN_TOKEN:
case DOT_TOKEN:
case OPEN_BRACKET_TOKEN:
case IS_KEYWORD:
case RIGHT_ARROW_TOKEN:
return true;
default:
return isBinaryOperator(tokenKind);
}
}
/**
* Parse member access expression.
*
* @param lhsExpr Container expression
* @return Member access expression
*/
private STNode parseMemberAccessExpr(STNode lhsExpr) {
STNode openBracket = parseOpenBracket();
STNode keyExpr;
switch (peek().kind) {
case CLOSE_BRACKET_TOKEN:
keyExpr = STNodeFactory.createEmptyNode();
break;
case ASTERISK_TOKEN:
keyExpr = consume();
break;
default:
keyExpr = parseExpression();
break;
}
STNode closeBracket = parseCloseBracket();
return STNodeFactory.createIndexedExpressionNode(lhsExpr, openBracket, keyExpr, closeBracket);
}
/**
* Parse close bracket.
*
* @return Parsed node
*/
private STNode parseCloseBracket() {
STToken token = peek();
if (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CLOSE_BRACKET);
return sol.recoveredNode;
}
}
/**
* Parse field access expression and method call expression.
*
* @param lhsExpr Preceding expression of the field access or method call
* @return One of <code>field-access-expression</code> or <code>method-call-expression</code>.
*/
private STNode parseFieldAccessOrMethodCall(STNode lhsExpr) {
STNode dotToken = parseDotToken();
STNode fieldOrMethodName = parseIdentifier(ParserRuleContext.FIELD_OR_FUNC_NAME);
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {
STNode openParen = parseOpenParenthesis();
STNode args = parseArgsList();
STNode closeParen = parseCloseParenthesis();
return STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, fieldOrMethodName, openParen, args,
closeParen);
}
return STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName);
}
/**
* <p>
* Parse braced expression.
* </p>
* <code>braced-expr := ( expression )</code>
*
* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement
* @param allowActions Allow actions
* @return Parsed node
*/
private STNode parseBracedExpression(boolean isRhsExpr, boolean allowActions) {
STNode openParen = parseOpenParenthesis();
STNode expr;
if (allowActions) {
expr = parseActionOrExpression(isRhsExpr);
} else {
expr = parseExpression(isRhsExpr);
}
STNode closeParen = parseCloseParenthesis();
if (isAction(expr)) {
return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen);
} else {
return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen);
}
}
/**
* Check whether a given node is an action node.
*
* @param node Node to check
* @return <code>true</code> if the node is an action node. <code>false</code> otherwise
*/
private boolean isAction(STNode node) {
switch (node.kind) {
case REMOTE_METHOD_CALL_ACTION:
case BRACED_ACTION:
case CHECK_ACTION:
return true;
default:
return false;
}
}
/**
* Check whether the given token is an end of a expression.
*
* @param tokenKind Token to check
* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement
* @return <code>true</code> if the token represents an end of a block. <code>false</code> otherwise
*/
private boolean isEndOfExpression(SyntaxKind tokenKind, boolean isRhsExpr) {
if (!isRhsExpr) {
if (isCompoundBinaryOperator(tokenKind)) {
return true;
}
return !isValidExprRhsStart(tokenKind);
}
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
case OPEN_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case COMMA_TOKEN:
case PUBLIC_KEYWORD:
case EOF_TOKEN:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case EQUAL_TOKEN:
case AT_TOKEN:
case DOCUMENTATION_LINE:
case AS_KEYWORD:
case IN_KEYWORD:
case BACKTICK_TOKEN:
case FROM_KEYWORD:
case WHERE_KEYWORD:
case LET_KEYWORD:
case SELECT_KEYWORD:
return true;
default:
return isSimpleType(tokenKind);
}
}
/**
* Parse basic literals. It is assumed that we come here after validation.
*
* @return Parsed node
*/
private STNode parseBasicLiteral() {
STToken literalToken = consume();
return STNodeFactory.createBasicLiteralNode(literalToken.kind, literalToken);
}
/**
* Parse function call expression.
* <code>function-call-expr := function-reference ( arg-list )
* function-reference := variable-reference</code>
*
* @param identifier Function name
* @return Function call expression
*/
private STNode parseFuncCall(STNode identifier) {
STNode openParen = parseOpenParenthesis();
STNode args = parseArgsList();
STNode closeParen = parseCloseParenthesis();
return STNodeFactory.createFunctionCallExpressionNode(identifier, openParen, args, closeParen);
}
/**
* <p>
* Parse error constructor expression.
* </p>
* <code>
* error-constructor-expr := error ( arg-list )
* </code>
*
* @return Error constructor expression
*/
private STNode parseErrorConstructorExpr() {
return parseFuncCall(parseErrorKeyWord());
}
/**
* Parse function call argument list.
*
* @return Parsed args list
*/
private STNode parseArgsList() {
startContext(ParserRuleContext.ARG_LIST);
ArrayList<STNode> argsList = new ArrayList<>();
STToken token = peek();
if (isEndOfParametersList(token.kind)) {
STNode args = STNodeFactory.createNodeList(argsList);
endContext();
return args;
}
SyntaxKind lastProcessedArgKind = parseFirstArg(argsList);
parseFollowUpArg(argsList, lastProcessedArgKind);
STNode args = STNodeFactory.createNodeList(argsList);
endContext();
return args;
}
/**
* Parse the first argument of a function call.
*
* @param argsList Arguments list to which the parsed argument must be added
* @return Kind of the argument first argument.
*/
private SyntaxKind parseFirstArg(ArrayList<STNode> argsList) {
STNode leadingComma = STNodeFactory.createEmptyNode();
STNode arg = parseArg(leadingComma);
if (SyntaxKind.POSITIONAL_ARG.ordinal() <= arg.kind.ordinal()) {
argsList.add(arg);
return arg.kind;
} else {
reportInvalidOrderOfArgs(peek(), SyntaxKind.POSITIONAL_ARG, arg.kind);
return SyntaxKind.POSITIONAL_ARG;
}
}
/**
* Parse follow up arguments.
*
* @param argsList Arguments list to which the parsed argument must be added
* @param lastProcessedArgKind Kind of the argument processed prior to this
*/
private void parseFollowUpArg(ArrayList<STNode> argsList, SyntaxKind lastProcessedArgKind) {
STToken nextToken = peek();
while (!isEndOfParametersList(nextToken.kind)) {
STNode leadingComma = parseComma();
nextToken = peek();
if (isEndOfParametersList(nextToken.kind)) {
this.errorHandler.reportInvalidNode((STToken) leadingComma, "invalid token " + leadingComma);
break;
}
STNode arg = parseArg(nextToken.kind, leadingComma);
if (lastProcessedArgKind.ordinal() <= arg.kind.ordinal()) {
if (lastProcessedArgKind == SyntaxKind.REST_ARG && arg.kind == SyntaxKind.REST_ARG) {
this.errorHandler.reportInvalidNode(nextToken, "cannot more than one rest arg");
} else {
argsList.add(arg);
lastProcessedArgKind = arg.kind;
}
} else {
reportInvalidOrderOfArgs(nextToken, lastProcessedArgKind, arg.kind);
}
nextToken = peek();
}
}
/**
* Report invalid order of args.
*
* @param token Staring token of the arg.
* @param lastArgKind Kind of the previously processed arg
* @param argKind Current arg
*/
private void reportInvalidOrderOfArgs(STToken token, SyntaxKind lastArgKind, SyntaxKind argKind) {
this.errorHandler.reportInvalidNode(token, "cannot have a " + argKind + " after the " + lastArgKind);
}
/**
* Parse function call argument.
*
* @param leadingComma Comma that occurs before the param
* @return Parsed argument node
*/
private STNode parseArg(STNode leadingComma) {
STToken token = peek();
return parseArg(token.kind, leadingComma);
}
private STNode parseArg(SyntaxKind kind, STNode leadingComma) {
STNode arg;
switch (kind) {
case ELLIPSIS_TOKEN:
STToken ellipsis = consume();
STNode expr = parseExpression();
arg = STNodeFactory.createRestArgumentNode(leadingComma, ellipsis, expr);
break;
case IDENTIFIER_TOKEN:
arg = parseNamedOrPositionalArg(leadingComma);
break;
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case STRING_LITERAL:
case OPEN_PAREN_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
default:
expr = parseExpression();
arg = STNodeFactory.createPositionalArgumentNode(leadingComma, expr);
break;
}
return arg;
}
/**
* Parse positional or named arg. This method assumed peek()/peek(1)
* is always an identifier.
*
* @param leadingComma Comma that occurs before the param
* @return Parsed argument node
*/
private STNode parseNamedOrPositionalArg(STNode leadingComma) {
STToken secondToken = peek(2);
switch (secondToken.kind) {
case EQUAL_TOKEN:
STNode argNameOrVarRef = STNodeFactory.createSimpleNameReferenceNode(consume());
STNode equal = parseAssignOp();
STNode expr = parseExpression();
return STNodeFactory.createNamedArgumentNode(leadingComma, argNameOrVarRef, equal, expr);
case COMMA_TOKEN:
case CLOSE_PAREN_TOKEN:
argNameOrVarRef = STNodeFactory.createSimpleNameReferenceNode(consume());
return STNodeFactory.createPositionalArgumentNode(leadingComma, argNameOrVarRef);
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case STRING_LITERAL:
case IDENTIFIER_TOKEN:
case OPEN_PAREN_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
default:
expr = parseExpression();
return STNodeFactory.createPositionalArgumentNode(leadingComma, expr);
}
}
/**
* Parse object type descriptor.
*
* @return Parsed node
*/
private STNode parseObjectTypeDescriptor() {
startContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR);
STNode objectTypeQualifiers = parseObjectTypeQualifiers();
STNode objectKeyword = parseObjectKeyword();
STNode openBrace = parseOpenBrace();
STNode objectMembers = parseObjectMembers();
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createObjectTypeDescriptorNode(objectTypeQualifiers, objectKeyword, openBrace,
objectMembers, closeBrace);
}
/**
* Parse object type qualifiers.
*
* @return Parsed node
*/
private STNode parseObjectTypeQualifiers() {
STToken nextToken = peek();
return parseObjectTypeQualifiers(nextToken.kind);
}
private STNode parseObjectTypeQualifiers(SyntaxKind kind) {
List<STNode> qualifiers = new ArrayList<>();
STNode firstQualifier;
switch (kind) {
case CLIENT_KEYWORD:
STNode clientKeyword = parseClientKeyword();
firstQualifier = clientKeyword;
break;
case ABSTRACT_KEYWORD:
STNode abstractKeyword = parseAbstractKeyword();
firstQualifier = abstractKeyword;
break;
case OBJECT_KEYWORD:
return STNodeFactory.createNodeList(qualifiers);
default:
Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_FIRST_QUALIFIER);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseObjectTypeQualifiers(solution.tokenKind);
}
STNode secondQualifier = parseObjectTypeSecondQualifier(firstQualifier);
qualifiers.add(firstQualifier);
if (secondQualifier != null) {
qualifiers.add(secondQualifier);
}
return STNodeFactory.createNodeList(qualifiers);
}
private STNode parseObjectTypeSecondQualifier(STNode firstQualifier) {
STToken nextToken = peek();
return parseObjectTypeSecondQualifier(nextToken.kind, firstQualifier);
}
private STNode parseObjectTypeSecondQualifier(SyntaxKind kind, STNode firstQualifier) {
if (firstQualifier.kind != kind) {
switch (kind) {
case CLIENT_KEYWORD:
return parseClientKeyword();
case ABSTRACT_KEYWORD:
return parseAbstractKeyword();
case OBJECT_KEYWORD:
return null;
default:
break;
}
}
Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_SECOND_QUALIFIER, firstQualifier);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseObjectTypeSecondQualifier(solution.tokenKind, firstQualifier);
}
/**
* Parse client keyword.
*
* @return Parsed node
*/
private STNode parseClientKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CLIENT_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CLIENT_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse abstract keyword.
*
* @return Parsed node
*/
private STNode parseAbstractKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ABSTRACT_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.ABSTRACT_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse object keyword.
*
* @return Parsed node
*/
private STNode parseObjectKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.OBJECT_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.OBJECT_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse object members.
*
* @return Parsed node
*/
private STNode parseObjectMembers() {
ArrayList<STNode> objectMembers = new ArrayList<>();
STToken nextToken = peek();
while (!isEndOfObjectTypeNode(nextToken.kind)) {
startContext(ParserRuleContext.OBJECT_MEMBER);
STNode member = parseObjectMember(nextToken.kind);
endContext();
if (member == null) {
break;
}
objectMembers.add(member);
nextToken = peek();
}
return STNodeFactory.createNodeList(objectMembers);
}
private STNode parseObjectMember() {
STToken nextToken = peek();
return parseObjectMember(nextToken.kind);
}
private STNode parseObjectMember(SyntaxKind nextTokenKind) {
STNode metadata;
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return null;
case ASTERISK_TOKEN:
case PUBLIC_KEYWORD:
case PRIVATE_KEYWORD:
case REMOTE_KEYWORD:
case FUNCTION_KEYWORD:
metadata = createEmptyMetadata();
break;
case DOCUMENTATION_LINE:
case AT_TOKEN:
metadata = parseMetaData(nextTokenKind);
nextTokenKind = peek().kind;
break;
default:
if (isTypeStartingToken(nextTokenKind)) {
metadata = createEmptyMetadata();
break;
}
Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseObjectMember(solution.tokenKind);
}
return parseObjectMember(nextTokenKind, metadata);
}
private STNode parseObjectMember(SyntaxKind nextTokenKind, STNode metadata) {
STNode member;
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return null;
case ASTERISK_TOKEN:
STNode asterisk = consume();
STNode type = parseTypeReference();
STNode semicolonToken = parseSemicolon();
member = STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken);
break;
case PUBLIC_KEYWORD:
case PRIVATE_KEYWORD:
STNode visibilityQualifier = parseObjectMemberVisibility();
member = parseObjectMethodOrField(metadata, visibilityQualifier);
break;
case REMOTE_KEYWORD:
member = parseObjectMethodOrField(metadata, STNodeFactory.createEmptyNode());
break;
case FUNCTION_KEYWORD:
member = parseObjectMethod(metadata, STNodeFactory.createEmptyNode());
break;
default:
if (isTypeStartingToken(nextTokenKind)) {
member = parseObjectField(metadata, STNodeFactory.createEmptyNode());
break;
}
Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseObjectMember(solution.tokenKind);
}
return member;
}
private STNode parseObjectMethodOrField(STNode metadata, STNode methodQualifiers) {
STToken nextToken = peek(1);
STToken nextNextToken = peek(2);
return parseObjectMethodOrField(nextToken.kind, nextNextToken.kind, metadata, methodQualifiers);
}
/**
* Parse an object member, given the visibility modifier. Object member can have
* only one visibility qualifier. This mean the methodQualifiers list can have
* one qualifier at-most.
*
* @param visibilityQualifiers Visibility qualifiers. A modifier can be
* a syntax node with either 'PUBLIC' or 'PRIVATE'.
* @param nextTokenKind Next token kind
* @param nextNextTokenKind Kind of the token after the
* @param metadata Metadata
* @param visibilityQualifiers Visibility qualifiers
* @return Parse object member node
*/
private STNode parseObjectMethodOrField(SyntaxKind nextTokenKind, SyntaxKind nextNextTokenKind, STNode metadata,
STNode visibilityQualifiers) {
switch (nextTokenKind) {
case REMOTE_KEYWORD:
STNode remoteKeyword = parseRemoteKeyword();
ArrayList<STNode> methodQualifiers = new ArrayList<>();
if (visibilityQualifiers.kind != SyntaxKind.NONE) {
methodQualifiers.add(visibilityQualifiers);
}
methodQualifiers.add(remoteKeyword);
return parseObjectMethod(metadata, STNodeFactory.createNodeList(methodQualifiers));
case FUNCTION_KEYWORD:
return parseObjectMethod(metadata, visibilityQualifiers);
case IDENTIFIER_TOKEN:
if (nextNextTokenKind != SyntaxKind.OPEN_PAREN_TOKEN) {
return parseObjectField(metadata, visibilityQualifiers);
}
break;
default:
if (isTypeStartingToken(nextTokenKind)) {
return parseObjectField(metadata, visibilityQualifiers);
}
break;
}
Solution solution = recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata,
visibilityQualifiers);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseObjectMethodOrField(solution.tokenKind, nextTokenKind, metadata, visibilityQualifiers);
}
/**
* Parse object visibility. Visibility can be <code>public</code> or <code>private</code>.
*
* @return Parsed node
*/
private STNode parseObjectMemberVisibility() {
STToken token = peek();
if (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD);
return sol.recoveredNode;
}
}
private STNode parseRemoteKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.REMOTE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.REMOTE_KEYWORD);
return sol.recoveredNode;
}
}
private STNode parseObjectField(STNode metadata, STNode methodQualifiers) {
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode fieldName = parseVariableName();
return parseObjectFieldRhs(metadata, methodQualifiers, type, fieldName);
}
/**
* Parse object field rhs, and complete the object field parsing. Returns the parsed object field.
*
* @param metadata Metadata
* @param visibilityQualifier Visibility qualifier
* @param type Type descriptor
* @param fieldName Field name
* @return Parsed object field
*/
private STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) {
STToken nextToken = peek();
return parseObjectFieldRhs(nextToken.kind, metadata, visibilityQualifier, type, fieldName);
}
/**
* Parse object field rhs, and complete the object field parsing. Returns the parsed object field.
*
* @param nextTokenKind Kind of the next token
* @param metadata Metadata
* @param visibilityQualifier Visibility qualifier
* @param type Type descriptor
* @param fieldName Field name
* @return Parsed object field
*/
private STNode parseObjectFieldRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier,
STNode type, STNode fieldName) {
STNode equalsToken;
STNode expression;
STNode semicolonToken;
switch (nextTokenKind) {
case SEMICOLON_TOKEN:
equalsToken = STNodeFactory.createEmptyNode();
expression = STNodeFactory.createEmptyNode();
semicolonToken = parseSemicolon();
break;
case EQUAL_TOKEN:
equalsToken = parseAssignOp();
expression = parseExpression();
semicolonToken = parseSemicolon();
break;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier,
type, fieldName);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseObjectFieldRhs(solution.tokenKind, metadata, visibilityQualifier, type, fieldName);
}
return STNodeFactory.createObjectFieldNode(metadata, visibilityQualifier, type, fieldName, equalsToken,
expression, semicolonToken);
}
private STNode parseObjectMethod(STNode metadata, STNode methodQualifiers) {
return parseFuncDefOrFuncTypeDesc(metadata, methodQualifiers);
}
/**
* Parse if-else statement.
* <code>
* if-else-stmt := if expression block-stmt [else-block]
* </code>
*
* @return If-else block
*/
private STNode parseIfElseBlock() {
startContext(ParserRuleContext.IF_BLOCK);
STNode ifKeyword = parseIfKeyword();
STNode condition = parseExpression();
STNode ifBody = parseBlockNode();
endContext();
STNode elseBody = parseElseBlock();
return STNodeFactory.createIfElseStatementNode(ifKeyword, condition, ifBody, elseBody);
}
/**
* Parse if-keyword.
*
* @return Parsed if-keyword node
*/
private STNode parseIfKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IF_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.IF_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse else-keyword.
*
* @return Parsed else keyword node
*/
private STNode parseElseKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ELSE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.ELSE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse block node.
* <code>
* block-stmt := { sequence-stmt }
* sequence-stmt := statement*
* </code>
*
* @return Parse block node
*/
private STNode parseBlockNode() {
startContext(ParserRuleContext.BLOCK_STMT);
STNode openBrace = parseOpenBrace();
STNode stmts = parseStatements();
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace);
}
/**
* Parse else block.
* <code>else-block := else (if-else-stmt | block-stmt)</code>
*
* @return Else block
*/
private STNode parseElseBlock() {
STToken nextToken = peek();
if (nextToken.kind != SyntaxKind.ELSE_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
STNode elseKeyword = parseElseKeyword();
STNode elseBody = parseElseBody();
return STNodeFactory.createElseBlockNode(elseKeyword, elseBody);
}
/**
* Parse else node body.
* <code>else-body := if-else-stmt | block-stmt</code>
*
* @return Else node body
*/
private STNode parseElseBody() {
STToken nextToken = peek();
return parseElseBody(nextToken.kind);
}
private STNode parseElseBody(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case IF_KEYWORD:
return parseIfElseBlock();
case OPEN_BRACE_TOKEN:
return parseBlockNode();
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.ELSE_BODY);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseElseBody(solution.tokenKind);
}
}
/**
* Parse while statement.
* <code>while-stmt := while expression block-stmt</code>
*
* @return While statement
*/
private STNode parseWhileStatement() {
startContext(ParserRuleContext.WHILE_BLOCK);
STNode whileKeyword = parseWhileKeyword();
STNode condition = parseExpression();
STNode whileBody = parseBlockNode();
endContext();
return STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody);
}
/**
* Parse while-keyword.
*
* @return While-keyword node
*/
private STNode parseWhileKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WHILE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.WHILE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse panic statement.
* <code>panic-stmt := panic expression ;</code>
*
* @return Panic statement
*/
private STNode parsePanicStatement() {
startContext(ParserRuleContext.PANIC_STMT);
STNode panicKeyword = parsePanicKeyword();
STNode expression = parseExpression();
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createPanicStatementNode(panicKeyword, expression, semicolon);
}
/**
* Parse panic-keyword.
*
* @return Panic-keyword node
*/
private STNode parsePanicKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.PANIC_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.PANIC_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse check expression. This method is used to parse both check expression
* as well as check action.
*
* <p>
* <code>
* checking-expr := checking-keyword expression
* checking-action := checking-keyword action
* </code>
*
* @param allowActions Allow actions
* @param isRhsExpr Is rhs expression
* @return Check expression node
*/
private STNode parseCheckExpression(boolean isRhsExpr, boolean allowActions) {
STNode checkingKeyword = parseCheckingKeyword();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, allowActions);
if (isAction(expr)) {
return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_ACTION, checkingKeyword, expr);
} else {
return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, expr);
}
}
/**
* Parse checking keyword.
* <p>
* <code>
* checking-keyword := check | checkpanic
* </code>
*
* @return Parsed node
*/
private STNode parseCheckingKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CHECKING_KEYWORD);
return sol.recoveredNode;
}
}
/**
*
* Parse continue statement.
* <code>continue-stmt := continue ; </code>
*
* @return continue statement
*/
private STNode parseContinueStatement() {
startContext(ParserRuleContext.CONTINUE_STATEMENT);
STNode continueKeyword = parseContinueKeyword();
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createContinueStatementNode(continueKeyword, semicolon);
}
/**
* Parse continue-keyword.
*
* @return continue-keyword node
*/
private STNode parseContinueKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONTINUE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CONTINUE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse return statement.
* <code>return-stmt := return [ action-or-expr ] ;</code>
*
* @return Return statement
*/
private STNode parseReturnStatement() {
startContext(ParserRuleContext.RETURN_STMT);
STNode returnKeyword = parseReturnKeyword();
STNode returnRhs = parseReturnStatementRhs(returnKeyword);
endContext();
return returnRhs;
}
/**
* Parse return-keyword.
*
* @return Return-keyword node
*/
private STNode parseReturnKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.RETURN_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.RETURN_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse break statement.
* <code>break-stmt := break ; </code>
*
* @return break statement
*/
private STNode parseBreakStatement() {
startContext(ParserRuleContext.BREAK_STATEMENT);
STNode breakKeyword = parseBreakKeyword();
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createBreakStatementNode(breakKeyword, semicolon);
}
/**
* Parse break-keyword.
*
* @return break-keyword node
*/
private STNode parseBreakKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BREAK_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.BREAK_KEYWORD);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse the right hand side of a return statement.
* </p>
* <code>
* return-stmt-rhs := ; | action-or-expr ;
* </code>
*
* @return Parsed node
*/
private STNode parseReturnStatementRhs(STNode returnKeyword) {
STNode expr;
STNode semicolon;
STToken token = peek();
switch (token.kind) {
case SEMICOLON_TOKEN:
expr = STNodeFactory.createEmptyNode();
break;
default:
expr = parseActionOrExpression();
break;
}
semicolon = parseSemicolon();
return STNodeFactory.createReturnStatementNode(returnKeyword, expr, semicolon);
}
/**
* Parse mapping constructor expression.
* <p>
* <code>mapping-constructor-expr := { [field (, field)*] }</code>
*
* @return Parsed node
*/
private STNode parseMappingConstructorExpr() {
startContext(ParserRuleContext.MAPPING_CONSTRUCTOR);
STNode openBrace = parseOpenBrace();
STNode fields = parseMappingConstructorFields();
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace);
}
/**
* Parse mapping constructor fields.
*
* @return Parsed node
*/
private STNode parseMappingConstructorFields() {
List<STNode> fields = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfMappingConstructor(nextToken.kind)) {
return STNodeFactory.createNodeList(fields);
}
STNode leadingComma = STNodeFactory.createEmptyNode();
STNode field = parseMappingField(leadingComma);
fields.add(field);
nextToken = peek();
while (!isEndOfMappingConstructor(nextToken.kind)) {
leadingComma = parseComma();
field = parseMappingField(leadingComma);
fields.add(field);
nextToken = peek();
}
return STNodeFactory.createNodeList(fields);
}
private boolean isEndOfMappingConstructor(SyntaxKind tokenKind) {
switch (tokenKind) {
case IDENTIFIER_TOKEN:
return false;
case EOF_TOKEN:
case AT_TOKEN:
case DOCUMENTATION_LINE:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case PUBLIC_KEYWORD:
case PRIVATE_KEYWORD:
case FUNCTION_KEYWORD:
case RETURNS_KEYWORD:
case SERVICE_KEYWORD:
case TYPE_KEYWORD:
case LISTENER_KEYWORD:
case CONST_KEYWORD:
case FINAL_KEYWORD:
case RESOURCE_KEYWORD:
return true;
default:
return isSimpleType(tokenKind);
}
}
/**
* Parse mapping constructor field.
* <p>
* <code>field := specific-field | computed-name-field | spread-field</code>
*
* @param leadingComma Leading comma
* @return Parsed node
*/
private STNode parseMappingField(STNode leadingComma) {
STToken nextToken = peek();
return parseMappingField(nextToken.kind, leadingComma);
}
private STNode parseMappingField(SyntaxKind tokenKind, STNode leadingComma) {
switch (tokenKind) {
case IDENTIFIER_TOKEN:
return parseSpecificFieldWithOptionValue(leadingComma);
case STRING_LITERAL:
STNode key = parseStringLiteral();
STNode colon = parseColon();
STNode valueExpr = parseExpression();
return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr);
case OPEN_BRACKET_TOKEN:
return parseComputedField(leadingComma);
case ELLIPSIS_TOKEN:
STNode ellipsis = parseEllipsis();
STNode expr = parseExpression();
return STNodeFactory.createSpreadFieldNode(leadingComma, ellipsis, expr);
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.MAPPING_FIELD, leadingComma);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseMappingField(solution.tokenKind, leadingComma);
}
}
/**
* Parse mapping constructor specific-field with an optional value.
*
* @param leadingComma
* @return Parsed node
*/
private STNode parseSpecificFieldWithOptionValue(STNode leadingComma) {
STNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME);
return parseSpecificFieldRhs(leadingComma, key);
}
private STNode parseSpecificFieldRhs(STNode leadingComma, STNode key) {
STToken nextToken = peek();
return parseSpecificFieldRhs(nextToken.kind, leadingComma, key);
}
private STNode parseSpecificFieldRhs(SyntaxKind tokenKind, STNode leadingComma, STNode key) {
STNode colon;
STNode valueExpr;
switch (tokenKind) {
case COLON_TOKEN:
colon = parseColon();
valueExpr = parseExpression();
break;
case COMMA_TOKEN:
colon = STNodeFactory.createEmptyNode();
valueExpr = STNodeFactory.createEmptyNode();
break;
default:
if (isEndOfMappingConstructor(tokenKind)) {
colon = STNodeFactory.createEmptyNode();
valueExpr = STNodeFactory.createEmptyNode();
break;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, leadingComma, key);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseSpecificFieldRhs(solution.tokenKind, leadingComma, key);
}
return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr);
}
/**
* Parse string literal.
*
* @return Parsed node
*/
private STNode parseStringLiteral() {
STToken token = peek();
if (token.kind == SyntaxKind.STRING_LITERAL) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.STRING_LITERAL);
return sol.recoveredNode;
}
}
/**
* Parse colon token.
*
* @return Parsed node
*/
private STNode parseColon() {
STToken token = peek();
if (token.kind == SyntaxKind.COLON_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.COLON);
return sol.recoveredNode;
}
}
/**
* Parse computed-name-field of a mapping constructor expression.
* <p>
* <code>computed-name-field := [ field-name-expr ] : value-expr</code>
*
* @param leadingComma Leading comma
* @return Parsed node
*/
private STNode parseComputedField(STNode leadingComma) {
startContext(ParserRuleContext.COMPUTED_FIELD_NAME);
STNode openBracket = parseOpenBracket();
STNode fieldNameExpr = parseExpression();
STNode closeBracket = parseCloseBracket();
endContext();
STNode colon = parseColon();
STNode valueExpr = parseExpression();
return STNodeFactory.createComputedNameFieldNode(leadingComma, openBracket, fieldNameExpr, closeBracket, colon,
valueExpr);
}
/**
* Parse open bracket.
*
* @return Parsed node
*/
private STNode parseOpenBracket() {
STToken token = peek();
if (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.OPEN_BRACKET);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse compound assignment statement, which takes the following format.
* </p>
* <code>assignment-stmt := lvexpr CompoundAssignmentOperator action-or-expr ;</code>
*
* @return Parsed node
*/
private STNode parseCompoundAssignmentStmt() {
startContext(ParserRuleContext.COMPOUND_ASSIGNMENT_STMT);
STNode varName = parseVariableName();
STNode compoundAssignmentStmt = parseCompoundAssignmentStmtRhs(varName);
endContext();
return compoundAssignmentStmt;
}
/**
* <p>
* Parse the RHS portion of the compound assignment.
* </p>
* <code>compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;</code>
*
* @param lvExpr LHS expression
* @return Parsed node
*/
private STNode parseCompoundAssignmentStmtRhs(STNode lvExpr) {
validateLVExpr(lvExpr);
STNode binaryOperator = parseCompoundBinaryOperator();
STNode equalsToken = parseAssignOp();
STNode expr = parseActionOrExpression();
STNode semicolon = parseSemicolon();
return STNodeFactory.createCompoundAssignmentStatementNode(lvExpr, binaryOperator, equalsToken, expr,
semicolon);
}
/**
* Parse compound binary operator.
* <code>BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>></code>
*
* @return Parsed node
*/
private STNode parseCompoundBinaryOperator() {
STToken token = peek();
if (isCompoundBinaryOperator(token.kind)) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR);
return sol.recoveredNode;
}
}
/**
* Parse service declaration.
* <p>
* <code>
* service-decl := metadata service [variable-name] on expression-list service-body-block
* <br/>
* expression-list := expression (, expression)*
* </code>
*
* @param metadata Metadata
* @return Parsed node
*/
private STNode parseServiceDecl(STNode metadata) {
startContext(ParserRuleContext.SERVICE_DECL);
STNode serviceKeyword = parseServiceKeyword();
STNode serviceDecl = parseServiceRhs(metadata, serviceKeyword);
endContext();
return serviceDecl;
}
/**
* Parse rhs of the service declaration.
* <p>
* <code>
* service-rhs := [variable-name] on expression-list service-body-block
* </code>
*
* @param metadata Metadata
* @param serviceKeyword Service keyword
* @return Parsed node
*/
private STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) {
STNode serviceName = parseServiceName();
STNode onKeyword = parseOnKeyword();
STNode expressionList = parseListeners();
STNode serviceBody = parseServiceBody();
STNode service = STNodeFactory.createServiceDeclarationNode(metadata, serviceKeyword, serviceName, onKeyword,
expressionList, serviceBody);
return service;
}
private STNode parseServiceName() {
STToken nextToken = peek();
return parseServiceName(nextToken.kind);
}
private STNode parseServiceName(SyntaxKind kind) {
switch (kind) {
case IDENTIFIER_TOKEN:
return parseIdentifier(ParserRuleContext.SERVICE_NAME);
case ON_KEYWORD:
return STNodeFactory.createEmptyNode();
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.OPTIONAL_SERVICE_NAME);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseServiceName(solution.tokenKind);
}
}
/**
* Parse service keyword.
*
* @return Parsed node
*/
private STNode parseServiceKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SERVICE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.SERVICE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Check whether the given token kind is a compound binary operator.
* <p>
* <code>compound-binary-operator := + | - | * | / | & | | | ^ | << | >> | >>></code>
*
* @param tokenKind STToken kind
* @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise
*/
private boolean isCompoundBinaryOperator(SyntaxKind tokenKind) {
switch (tokenKind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case SLASH_TOKEN:
case ASTERISK_TOKEN:
case BITWISE_AND_TOKEN:
case BITWISE_XOR_TOKEN:
case PIPE_TOKEN:
return getNextNextToken(tokenKind).kind == SyntaxKind.EQUAL_TOKEN;
default:
return false;
}
}
/**
* Parse on keyword.
*
* @return Parsed node
*/
private STNode parseOnKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ON_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.ON_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse listener references.
* <p>
* <code>expression-list := expression (, expression)*</code>
*
* @return Parsed node
*/
private STNode parseListeners() {
startContext(ParserRuleContext.LISTENERS_LIST);
List<STNode> listeners = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfExpressionsList(nextToken.kind)) {
endContext();
this.errorHandler.reportMissingTokenError("missing expression");
return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
}
STNode leadingComma = STNodeFactory.createEmptyNode();
STNode exprListItem = parseExpressionListItem(leadingComma);
listeners.add(exprListItem);
nextToken = peek();
while (!isEndOfExpressionsList(nextToken.kind)) {
leadingComma = parseComma();
exprListItem = parseExpressionListItem(leadingComma);
listeners.add(exprListItem);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(listeners);
}
private boolean isEndOfExpressionsList(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case IDENTIFIER_TOKEN:
return false;
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case EOF_TOKEN:
case RESOURCE_KEYWORD:
case LISTENER_KEYWORD:
case AT_TOKEN:
case DOCUMENTATION_LINE:
case PRIVATE_KEYWORD:
case RETURNS_KEYWORD:
case SERVICE_KEYWORD:
case TYPE_KEYWORD:
case CONST_KEYWORD:
case FINAL_KEYWORD:
return true;
default:
return isSimpleType(tokenKind);
}
}
/**
* Parse expression list item.
*
* @param leadingComma Leading comma
* @return Parsed node
*/
private STNode parseExpressionListItem(STNode leadingComma) {
STNode expr = parseExpression();
return STNodeFactory.createExpressionListItemNode(leadingComma, expr);
}
/**
* Parse service body.
* <p>
* <code>
* service-body-block := { service-method-defn* }
* </code>
*
* @return Parsed node
*/
private STNode parseServiceBody() {
STNode openBrace = parseOpenBrace();
STNode resources = parseResources();
STNode closeBrace = parseCloseBrace();
return STNodeFactory.createServiceBodyNode(openBrace, resources, closeBrace);
}
/**
* Parse service resource definitions.
*
* @return Parsed node
*/
private STNode parseResources() {
List<STNode> resources = new ArrayList<>();
STToken nextToken = peek();
while (!isEndOfServiceDecl(nextToken.kind)) {
STNode serviceMethod = parseResource();
if (serviceMethod == null) {
break;
}
resources.add(serviceMethod);
nextToken = peek();
}
return STNodeFactory.createNodeList(resources);
}
private boolean isEndOfServiceDecl(SyntaxKind tokenKind) {
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
case TYPE_KEYWORD:
case SERVICE_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse resource definition (i.e. service-method-defn).
* <p>
* <code>
* service-body-block := { service-method-defn* }
* <br/>
* service-method-defn := metadata [resource] function identifier function-signature method-defn-body
* </code>
*
* @return Parsed node
*/
private STNode parseResource() {
STToken nextToken = peek();
return parseResource(nextToken.kind);
}
private STNode parseResource(SyntaxKind nextTokenKind) {
STNode metadata;
switch (nextTokenKind) {
case RESOURCE_KEYWORD:
case FUNCTION_KEYWORD:
metadata = createEmptyMetadata();
break;
case DOCUMENTATION_LINE:
case AT_TOKEN:
metadata = parseMetaData(nextTokenKind);
nextTokenKind = peek().kind;
break;
default:
if (isEndOfServiceDecl(nextTokenKind)) {
return null;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseResource(solution.tokenKind);
}
return parseResource(nextTokenKind, metadata);
}
private STNode parseResource(SyntaxKind nextTokenKind, STNode metadata) {
switch (nextTokenKind) {
case RESOURCE_KEYWORD:
STNode resourceKeyword = parseResourceKeyword();
return parseFuncDefinition(metadata, resourceKeyword);
case FUNCTION_KEYWORD:
return parseFuncDefinition(metadata, STNodeFactory.createEmptyNode());
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF, metadata);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseResource(solution.tokenKind, metadata);
}
}
/**
* Parse resource keyword.
*
* @return Parsed node
*/
private STNode parseResourceKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.RESOURCE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.RESOURCE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Check whether next construct is a service declaration or not. This method is
* used to determine whether an end-of-block is reached, if the next token is
* a service-keyword. Because service-keyword can be used in statements as well
* as in top-level node (service-decl). We have reached a service-decl, then
* it could be due to missing close-brace at the end of the current block.
*
* @return <code>true</code> if the next construct is a service declaration.
* <code>false</code> otherwise
*/
private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {
switch (peek(lookahead + 1).kind) {
case IDENTIFIER_TOKEN:
SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;
switch (tokenAfterIdentifier) {
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
return false;
case ON_KEYWORD:
return true;
default:
ParserRuleContext sol = this.errorHandler.findBestPath(currentContext);
return sol == ParserRuleContext.SERVICE_DECL || sol == ParserRuleContext.CLOSE_BRACE;
}
case ON_KEYWORD:
return true;
default:
this.errorHandler.removeInvalidToken();
return false;
}
}
/**
* Parse listener declaration, given the qualifier.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.LISTENER_DECL);
STNode listenerKeyword = parseListenerKeyword();
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse listener keyword.
*
* @return Parsed node
*/
private STNode parseListenerKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LISTENER_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.LISTENER_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse constant declaration, given the qualifier.
* <p>
* <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code>
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.CONSTANT_DECL);
STNode constKeyword = parseConstantKeyword();
STNode constDecl = parseConstDecl(metadata, qualifier, constKeyword);
endContext();
return constDecl;
}
/**
* Parse the components that follows after the const keyword of a constant declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {
STToken nextToken = peek();
return parseConstDeclFromType(nextToken.kind, metadata, qualifier, constKeyword);
}
private STNode parseConstDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,
STNode constKeyword) {
switch (nextTokenKind) {
case ANNOTATION_KEYWORD:
switchContext(ParserRuleContext.ANNOTATION_DECL);
return parseAnnotationDeclaration(metadata, qualifier, constKeyword);
case IDENTIFIER_TOKEN:
return parseConstantDeclWithOptionalType(metadata, qualifier, constKeyword);
default:
if (isTypeStartingToken(nextTokenKind)) {
break;
}
STToken token = peek();
Solution solution =
recover(token, ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseConstDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
private STNode parseConstantDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword) {
STNode varNameOrTypeName = parseStatementStartIdentifier();
STNode constDecl = parseConstantDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName);
return constDecl;
}
/**
* Parse the component that follows the first identifier in a const decl. The identifier
* can be either the type-name (a user defined type) or the var-name there the type-name
* is not present.
*
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @param typeOrVarName Identifier that follows the const-keywoord
* @return Parsed node
*/
private STNode parseConstantDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) {
STToken token = peek();
return parseConstantDeclRhs(token.kind, metadata, qualifier, constKeyword, typeOrVarName);
}
private STNode parseConstantDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,
STNode constKeyword, STNode typeOrVarName) {
STNode type;
STNode variableName;
switch (nextTokenKind) {
case IDENTIFIER_TOKEN:
type = typeOrVarName;
variableName = parseVariableName();
break;
case EQUAL_TOKEN:
variableName = typeOrVarName;
type = STNodeFactory.createEmptyNode();
break;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, constKeyword,
typeOrVarName);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseConstantDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, typeOrVarName);
}
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, type, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse const keyword.
*
* @return Parsed node
*/
private STNode parseConstantKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONST_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CONST_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse nil type descriptor.
* <p>
* <code>nil-type-descriptor := ( ) </code>
* </p>
*
* @return Parsed node
*/
private STNode parseNilTypeDescriptor() {
startContext(ParserRuleContext.NIL_TYPE_DESCRIPTOR);
STNode openParenthesisToken = parseOpenParenthesis();
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createNilTypeDescriptorNode(openParenthesisToken, closeParenthesisToken);
}
/**
* Parse typeof expression.
* <p>
* <code>
* typeof-expr := typeof expression
* </code>
*
* @param isRhsExpr
* @return Typeof expression node
*/
private STNode parseTypeofExpression(boolean isRhsExpr) {
STNode typeofKeyword = parseTypeofKeyword();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);
}
/**
* Parse typeof-keyword.
*
* @return Typeof-keyword node
*/
private STNode parseTypeofKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TYPEOF_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.TYPEOF_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse optional type descriptor.
* <p>
* <code>optional-type-descriptor := type-descriptor ? </code>
* </p>
*
* @return Parsed node
*/
private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {
startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);
STNode questionMarkToken = parseQuestionMark();
endContext();
return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken);
}
/**
* Parse unary expression.
* <p>
* <code>
* unary-expr := + expression | - expression | ~ expression | ! expression
* </code>
*
* @param isRhsExpr
* @return Unary expression node
*/
private STNode parseUnaryExpression(boolean isRhsExpr) {
STNode unaryOperator = parseUnaryOperator();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);
}
/**
* Parse unary operator.
* <code>UnaryOperator := + | - | ~ | !</code>
*
* @return Parsed node
*/
private STNode parseUnaryOperator() {
STToken token = peek();
if (isUnaryOperator(token.kind)) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.UNARY_OPERATOR);
return sol.recoveredNode;
}
}
/**
* Check whether the given token kind is a unary operator.
*
* @param kind STToken kind
* @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise
*/
private boolean isUnaryOperator(SyntaxKind kind) {
switch (kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse array type descriptor.
* <p>
* <code>
* array-type-descriptor := member-type-descriptor [ [ array-length ] ]
* member-type-descriptor := type-descriptor
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* inferred-array-length := *
* </code>
* </p>
*
* @param typeDescriptorNode
*
* @return Parsed Node
*/
private STNode parseArrayTypeDescriptor(STNode typeDescriptorNode) {
startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);
STNode openBracketToken = parseOpenBracket();
STNode arrayLengthNode = parseArrayLength();
STNode closeBracketToken = parseCloseBracket();
endContext();
return STNodeFactory.createIndexedExpressionNode(typeDescriptorNode, openBracketToken, arrayLengthNode,
closeBracketToken);
}
/**
* Parse array length.
* <p>
* <code>
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* constant-reference-expr := variable-reference-expr
* </code>
* </p>
*
* @return Parsed array length
*/
private STNode parseArrayLength() {
STToken token = peek();
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case ASTERISK_TOKEN:
return consume();
case CLOSE_BRACKET_TOKEN:
return STNodeFactory.createEmptyNode();
case IDENTIFIER_TOKEN:
return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);
default:
Solution sol = recover(token, ParserRuleContext.ARRAY_LENGTH);
return sol.recoveredNode;
}
}
/**
* Parse annotations.
* <p>
* <i>Note: In the ballerina spec ({@link https:
* annotations-list is specified as one-or-more annotations. And the usage is marked as
* optional annotations-list. However, for the consistency of the tree, here we make the
* annotation-list as zero-or-more annotations, and the usage is not-optional.</i>
* <p>
* <code>annots := annotation*</code>
*
* @return Parsed node
*/
private STNode parseAnnotations() {
STToken nextToken = peek();
return parseAnnotations(nextToken.kind);
}
private STNode parseAnnotations(SyntaxKind nextTokenKind) {
startContext(ParserRuleContext.ANNOTATIONS);
List<STNode> annotList = new ArrayList<>();
while (nextTokenKind == SyntaxKind.AT_TOKEN) {
annotList.add(parseAnnotation());
nextTokenKind = peek().kind;
}
endContext();
return STNodeFactory.createNodeList(annotList);
}
/**
* Parse annotation attachment.
* <p>
* <code>annotation := @ annot-tag-reference annot-value</code>
*
* @return Parsed node
*/
private STNode parseAnnotation() {
STNode atToken = parseAtToken();
STNode annotReference;
if (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) {
annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
} else {
annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);
}
STNode annotValue = parseMappingConstructorExpr();
return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);
}
/**
* Parse '@' token.
*
* @return Parsed node
*/
private STNode parseAtToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.AT_TOKEN) {
return consume();
} else {
Solution sol = recover(nextToken, ParserRuleContext.AT);
return sol.recoveredNode;
}
}
/**
* Parse metadata. Meta data consist of optional doc string and
* an annotations list.
* <p>
* <code>metadata := [DocumentationString] annots</code>
*
* @return Parse node
*/
private STNode parseMetaData(SyntaxKind nextTokenKind) {
STNode docString;
STNode annotations;
switch (nextTokenKind) {
case DOCUMENTATION_LINE:
docString = parseDocumentationString();
annotations = parseAnnotations();
break;
case AT_TOKEN:
docString = STNodeFactory.createEmptyNode();
annotations = parseAnnotations(nextTokenKind);
break;
default:
return createEmptyMetadata();
}
return STNodeFactory.createMetadataNode(docString, annotations);
}
/**
* Create empty metadata node.
*
* @return A metadata node with no doc string and no annotations
*/
private STNode createEmptyMetadata() {
return STNodeFactory.createMetadataNode(STNodeFactory.createEmptyNode(),
STNodeFactory.createNodeList(new ArrayList<>()));
}
/**
* Parse is expression.
* <code>
* is-expr := expression is type-descriptor
* </code>
*
* @param lhsExpr Preceding expression of the is expression
* @return Is expression node
*/
private STNode parseTypeTestExpression(STNode lhsExpr) {
STNode isKeyword = parseIsKeyword();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_EXPRESSION);
return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor);
}
/**
* Parse is-keyword.
*
* @return Is-keyword node
*/
private STNode parseIsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IS_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.IS_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse local type definition statement statement.
* <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code>
*
* @return local type definition statement statement
*/
private STNode parseLocalTypeDefinitionStatement(STNode annots) {
startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);
STNode typeKeyword = parseTypeKeyword();
STNode typeName = parseTypeName();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,
semicolon);
}
/**
* Pass statements that starts with an identifier.
*
* @param tokenKind Next token kind
* @return Parsed node
*/
private STNode parseStatementStartsWithIdentifier(STNode annots) {
startContext(ParserRuleContext.STMT_START_WITH_IDENTIFIER);
STNode identifier = parseStatementStartIdentifier();
STToken nextToken = peek();
STNode stmt = parseStatementStartsWithIdentifier(nextToken.kind, annots, identifier);
endContext();
return stmt;
}
private STNode parseStatementStartsWithIdentifier(STNode annots, STNode identifier) {
return parseStatementStartsWithIdentifier(peek().kind, annots, identifier);
}
private STNode parseStatementStartsWithIdentifier(SyntaxKind nextTokenKind, STNode annots, STNode identifier) {
switch (nextTokenKind) {
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
return parseTypeDescStartsWithIdentifier(identifier, annots);
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
return parseStamentStartWithExpr(nextTokenKind, annots, identifier);
case PIPE_TOKEN:
STToken nextNextToken = peek(2);
if (nextNextToken.kind != SyntaxKind.EQUAL_TOKEN) {
return parseTypeDescStartsWithIdentifier(identifier, annots);
}
default:
if (isCompoundBinaryOperator(nextTokenKind)) {
return parseCompoundAssignmentStmtRhs(identifier);
}
if (isValidExprRhsStart(nextTokenKind)) {
STNode expression = parseActionOrExpressionInLhs(nextTokenKind, identifier);
return parseStamentStartWithExpr(annots, expression);
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_IDENTIFIER, annots, identifier);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseStatementStartsWithIdentifier(solution.tokenKind, annots, identifier);
}
}
private STNode parseTypeDescStartsWithIdentifier(STNode typeDesc, STNode annots) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
endContext();
STNode varName = parseVariableName();
STNode finalKeyword = STNodeFactory.createEmptyNode();
return parseVarDeclRhs(annots, finalKeyword, typeDesc, varName, false);
}
/**
* Parse statement which is only consists of an action or expression.
*
* @param annots Annotations
* @param nextTokenKind Next token kind
* @return Parsed node
*/
private STNode parseStamentStartsWithExpr(SyntaxKind nextTokenKind, STNode annots) {
startContext(ParserRuleContext.EXPRESSION_STATEMENT);
STNode expression = parseActionOrExpression(nextTokenKind);
STNode stmt = parseStamentStartWithExpr(annots, expression);
endContext();
return stmt;
}
/**
* Parse statements that starts with an expression.
*
* @param annots Annotations
* @return Parsed node
*/
private STNode parseStamentStartWithExpr(STNode annots, STNode expression) {
STToken nextToken = peek();
return parseStamentStartWithExpr(nextToken.kind, annots, expression);
}
/**
* Parse the component followed by the expression, at the beginning of a statement.
*
* @param nextTokenKind Kind of the next token
* @param annots Annotations
* @return Parsed node
*/
private STNode parseStamentStartWithExpr(SyntaxKind nextTokenKind, STNode annots, STNode expression) {
switch (nextTokenKind) {
case EQUAL_TOKEN:
switchContext(ParserRuleContext.ASSIGNMENT_STMT);
return parseAssignmentStmtRhs(expression);
case SEMICOLON_TOKEN:
return getExpressionAsStatement(expression);
case IDENTIFIER_TOKEN:
if (isPossibleArrayType(expression)) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
STNode varName = parseVariableName();
STNode finalKeyword = STNodeFactory.createEmptyNode();
return parseVarDeclRhs(annots, finalKeyword, expression, varName, false);
}
default:
if (isCompoundBinaryOperator(nextTokenKind)) {
return parseCompoundAssignmentStmtRhs(expression);
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_EXPR_RHS, annots, expression);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseStamentStartWithExpr(solution.tokenKind, annots, expression);
}
}
private STNode getExpressionAsStatement(STNode expression) {
switch (expression.kind) {
case METHOD_CALL:
case FUNCTION_CALL:
case CHECK_EXPRESSION:
return parseCallStatement(expression);
case REMOTE_METHOD_CALL_ACTION:
case CHECK_ACTION:
case BRACED_ACTION:
return parseActionStatement(expression);
default:
this.errorHandler.reportInvalidNode(null,
"left hand side of an assignment must be a variable reference");
STNode semicolon = parseSemicolon();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID, expression, semicolon);
}
}
/**
* <p>
* Parse call statement, given the call expression.
* <p>
* <code>
* call-stmt := call-expr ;
* <br/>
* call-expr := function-call-expr | method-call-expr | checking-keyword call-expr
* </code>
*
* @param expression Call expression associated with the call statement
* @return Call statement node
*/
private STNode parseCallStatement(STNode expression) {
validateExprInCallStmt(expression);
STNode semicolon = parseSemicolon();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);
}
private void validateExprInCallStmt(STNode expression) {
switch (expression.kind) {
case FUNCTION_CALL:
case METHOD_CALL:
break;
case CHECK_EXPRESSION:
validateExprInCallStmt(((STCheckExpressionNode) expression).expression);
break;
case REMOTE_METHOD_CALL_ACTION:
break;
case BRACED_EXPRESSION:
validateExprInCallStmt(((STBracedExpressionNode) expression).expression);
break;
default:
if (isMissingNode(expression)) {
break;
}
this.errorHandler.reportInvalidNode(null, "expression followed by the checking keyword must be a " +
"func-call, a method-call or a check-expr");
break;
}
}
/**
* Check whether a node is a missing node.
*
* @param node Node to check
* @return <code>true</code> if the node is a missing node. <code>false</code> otherwise
*/
private boolean isMissingNode(STNode node) {
return node instanceof STMissingToken;
}
private STNode parseActionStatement(STNode action) {
STNode semicolon = parseSemicolon();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);
}
private STNode parseAction(SyntaxKind tokenKind, STNode lhsExpr) {
switch (tokenKind) {
case RIGHT_ARROW_TOKEN:
return parseRemoteMethodCallAction(lhsExpr);
default:
return null;
}
}
/**
* Parse remote method call action, given the starting expression.
* <p>
* <code>remote-method-call-action := expression -> method-name ( arg-list )</code>
*
* @param expression LHS expression
* @return
*/
private STNode parseRemoteMethodCallAction(STNode expression) {
STNode rightArrow = parseRightArrow();
STNode methodName = parseFunctionName();
STNode openParenToken = parseOpenParenthesis();
STNode arguments = parseArgsList();
STNode closeParenToken = parseCloseParenthesis();
return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, methodName, openParenToken,
arguments, closeParenToken);
}
/**
* Parse right arrow (<code>-></code>) token.
*
* @return Parsed node
*/
private STNode parseRightArrow() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {
return consume();
} else {
Solution sol = recover(nextToken, ParserRuleContext.RIGHT_ARROW);
return sol.recoveredNode;
}
}
/**
* Check whether this is a valid lhs expression.
*
* @param tokenKind Kind of the next token
* @return <code>true</code>if this is a start of an expression. <code>false</code> otherwise
*/
private boolean isValidLHSExpression(SyntaxKind tokenKind) {
switch (tokenKind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case STRING_LITERAL:
case IDENTIFIER_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case TYPEOF_KEYWORD:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
return true;
case PLUS_TOKEN:
case MINUS_TOKEN:
return !isCompoundBinaryOperator(tokenKind);
case OPEN_PAREN_TOKEN:
default:
return false;
}
}
/**
* Parse parameterized type descriptor.
* parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter
*
* @return Parsed node
*/
private STNode parseParameterizedTypeDescriptor() {
STNode parameterizedTypeKeyword = parseParameterizedTypeKeyword();
STNode ltToken = parseLTToken();
STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
STNode gtToken = parseGTToken();
return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, ltToken, typeNode,
gtToken);
}
/**
* Parse <code>map</code> or <code>future</code> or <code>typedesc</code> keyword token.
*
* @return Parsed node
*/
private STNode parseParameterizedTypeKeyword() {
STToken nextToken = peek();
switch (nextToken.kind) {
case MAP_KEYWORD:
case FUTURE_KEYWORD:
case TYPEDESC_KEYWORD:
return consume();
default:
Solution sol = recover(nextToken, ParserRuleContext.PARAMETERIZED_TYPE);
return sol.recoveredNode;
}
}
/**
* Parse <code> < </code> token.
*
* @return Parsed node
*/
private STNode parseGTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.GT_TOKEN) {
return consume();
} else {
Solution sol = recover(nextToken, ParserRuleContext.GT);
return sol.recoveredNode;
}
}
/**
* Parse <code> > </code> token.
*
* @return Parsed node
*/
private STNode parseLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
return consume();
} else {
Solution sol = recover(nextToken, ParserRuleContext.LT);
return sol.recoveredNode;
}
}
/**
* Parse nil literal. Here nil literal is only referred to ( ).
*
* @return Parsed node
*/
private STNode parseNilLiteral() {
startContext(ParserRuleContext.NIL_LITERAL);
STNode openParenthesisToken = parseOpenParenthesis();
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);
}
/**
* Parse annotation declaration, given the qualifier.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {
startContext(ParserRuleContext.ANNOTATION_DECL);
STNode annotationKeyword = parseAnnotationKeyword();
STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);
endContext();
return annotDecl;
}
/**
* Parse annotation keyword.
*
* @return Parsed node
*/
private STNode parseAnnotationKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.ANNOTATION_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse the components that follows after the annotation keyword of a annotation declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @param annotationKeyword
* @return Parsed node
*/
private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STToken nextToken = peek();
return parseAnnotationDeclFromType(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword);
}
private STNode parseAnnotationDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,
STNode constKeyword, STNode annotationKeyword) {
switch (nextTokenKind) {
case IDENTIFIER_TOKEN:
return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);
default:
if (isTypeStartingToken(nextTokenKind)) {
break;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier,
constKeyword, annotationKeyword);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseAnnotationDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword,
annotationKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);
STNode annotTag = parseAnnotationTag();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, annotTag,
equalsToken, initializer, semicolonToken);
}
/**
* Parse annotation tag.
* <p>
* <code>annot-tag := identifier</code>
*
* @return
*/
private STNode parseAnnotationTag() {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.ANNOTATION_TAG);
return sol.recoveredNode;
}
}
private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STNode typeDescOrAnnotTag = parseAnnotationTag();
if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,
typeDescOrAnnotTag, annotTag);
}
return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag);
}
/**
* Parse the component that follows the first identifier in an annotation decl. The identifier
* can be either the type-name (a user defined type) or the annot-tag, where the type-name
* is not present.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the annotation decl
* @param constKeyword Const keyword
* @param annotationKeyword Annotation keyword
* @param typeDescOrAnnotTag Identifier that follows the annotation-keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDescOrAnnotTag) {
STToken token = peek();
return parseAnnotationDeclRhs(token.kind, metadata, qualifier, constKeyword, annotationKeyword,
typeDescOrAnnotTag);
}
private STNode parseAnnotationDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,
STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) {
STNode typeDesc;
STNode annotTag;
switch (nextTokenKind) {
case IDENTIFIER_TOKEN:
typeDesc = typeDescOrAnnotTag;
annotTag = parseAnnotationTag();
break;
case SEMICOLON_TOKEN:
case ON_KEYWORD:
typeDesc = STNodeFactory.createEmptyNode();
annotTag = typeDescOrAnnotTag;
break;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword,
annotationKeyword, typeDescOrAnnotTag);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseAnnotationDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword,
typeDescOrAnnotTag);
}
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDesc, STNode annotTag) {
STToken nextToken = peek();
return parseAnnotationDeclAttachPoints(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword,
typeDesc, annotTag);
}
private STNode parseAnnotationDeclAttachPoints(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,
STNode constKeyword, STNode annotationKeyword, STNode typeDesc,
STNode annotTag) {
STNode onKeyword;
STNode attachPoints;
switch (nextTokenKind) {
case SEMICOLON_TOKEN:
onKeyword = STNodeFactory.createEmptyNode();
attachPoints = STNodeFactory.createEmptyNode();
break;
case ON_KEYWORD:
onKeyword = parseOnKeyword();
attachPoints = parseAnnotationAttachPoints();
break;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier,
constKeyword, annotationKeyword, typeDesc, annotTag);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseAnnotationDeclAttachPoints(solution.tokenKind, metadata, qualifier, constKeyword,
annotationKeyword, typeDesc, annotTag);
}
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,
typeDesc, annotTag, onKeyword, attachPoints, semicolonToken);
}
/**
* Parse annotation attach points.
* <p>
* <code>
* annot-attach-points := annot-attach-point (, annot-attach-point)*
* <br/><br/>
* annot-attach-point := dual-attach-point | source-only-attach-point
* <br/><br/>
* dual-attach-point := [source] dual-attach-point-ident
* <br/><br/>
* dual-attach-point-ident :=
* [object] type
* | [object|resource] function
* | parameter
* | return
* | service
* | [object|record] field
* <br/><br/>
* source-only-attach-point := source source-only-attach-point-ident
* <br/><br/>
* source-only-attach-point-ident :=
* annotation
* | external
* | var
* | const
* | listener
* | worker
* </code>
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoints() {
startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);
List<STNode> attachPoints = new ArrayList<>();
STToken nextToken = peek();
if (isEndAnnotAttachPointList(nextToken.kind)) {
endContext();
this.errorHandler.reportMissingTokenError("missing attach point");
return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
}
STNode attachPoint = parseAnnotationAttachPoint();
attachPoints.add(attachPoint);
nextToken = peek();
STNode leadingComma;
while (!isEndAnnotAttachPointList(nextToken.kind)) {
leadingComma = parseAttachPointEnd();
if (leadingComma == null) {
break;
}
attachPoints.add(leadingComma);
attachPoint = parseAnnotationAttachPoint();
if (attachPoint == null) {
this.errorHandler.reportMissingTokenError("missing attach point");
attachPoint = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
attachPoints.add(attachPoint);
break;
}
attachPoints.add(attachPoint);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(attachPoints);
}
/**
* Parse annotation attach point end.
*
* @return Parsed node
*/
private STNode parseAttachPointEnd() {
STToken nextToken = peek();
return parseAttachPointEnd(nextToken.kind);
}
private STNode parseAttachPointEnd(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case SEMICOLON_TOKEN:
return null;
case COMMA_TOKEN:
return consume();
default:
Solution sol = recover(peek(), ParserRuleContext.ATTACH_POINT_END);
if (sol.action == Action.REMOVE) {
return sol.recoveredNode;
}
return sol.tokenKind == SyntaxKind.COMMA_TOKEN ? sol.recoveredNode : null;
}
}
private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse annotation attach point.
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoint() {
return parseAnnotationAttachPoint(peek().kind);
}
private STNode parseAnnotationAttachPoint(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
return null;
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
case SOURCE_KEYWORD:
STNode sourceKeyword = parseSourceKeyword();
return parseAttachPointIdent(sourceKeyword);
case OBJECT_KEYWORD:
case TYPE_KEYWORD:
case RESOURCE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
case RECORD_KEYWORD:
sourceKeyword = STNodeFactory.createEmptyNode();
STNode firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT);
return solution.recoveredNode;
}
}
/**
* Parse source keyword.
*
* @return Parsed node
*/
private STNode parseSourceKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SOURCE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.SOURCE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse attach point ident gievn.
* <p>
* <code>
* source-only-attach-point-ident := annotation | external | var | const | listener | worker
* <br/><br/>
* dual-attach-point-ident := [object] type | [object|resource] function | parameter
* | return | service | [object|record] field
* </code>
*
* @param sourceKeyword Source keyword
* @return Parsed node
*/
private STNode parseAttachPointIdent(STNode sourceKeyword) {
return parseAttachPointIdent(peek().kind, sourceKeyword);
}
private STNode parseAttachPointIdent(SyntaxKind nextTokenKind, STNode sourceKeyword) {
switch (nextTokenKind) {
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
STNode firstIdent = consume();
STNode secondIdent = STNodeFactory.createEmptyNode();
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent);
case OBJECT_KEYWORD:
case RESOURCE_KEYWORD:
case RECORD_KEYWORD:
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
firstIdent = solution.recoveredNode;
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
}
}
/**
* Parse dual-attach-point ident.
*
* @param sourceKeyword Source keyword
* @param firstIdent first part of the dual attach-point
* @return Parsed node
*/
private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {
STNode secondIdent;
switch (firstIdent.kind) {
case OBJECT_KEYWORD:
secondIdent = parseIdentAfterObjectIdent();
break;
case RESOURCE_KEYWORD:
secondIdent = parseFunctionIdent();
break;
case RECORD_KEYWORD:
secondIdent = parseFieldIdent();
break;
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
default:
secondIdent = STNodeFactory.createEmptyNode();
break;
}
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent);
}
/**
* Parse the idents that are supported after object-ident.
*
* @return Parsed node
*/
private STNode parseIdentAfterObjectIdent() {
STToken token = peek();
switch (token.kind) {
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case FIELD_KEYWORD:
return consume();
default:
Solution sol = recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);
return sol.recoveredNode;
}
}
/**
* Parse function ident.
*
* @return Parsed node
*/
private STNode parseFunctionIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FUNCTION_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FUNCTION_IDENT);
return sol.recoveredNode;
}
}
/**
* Parse field ident.
*
* @return Parsed node
*/
private STNode parseFieldIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FIELD_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FIELD_IDENT);
return sol.recoveredNode;
}
}
/**
* Parse XML namespace declaration.
* <p>
* <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;
* <br/>
* xml-namespace-uri := simple-const-expr
* <br/>
* xml-namespace-prefix := identifier
* </code>
*
* @return
*/
private STNode parseXMLNamepsaceDeclaration() {
startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);
STNode xmlnsKeyword = parseXMLNSKeyword();
STNode namespaceUri = parseXMLNamespaceUri();
STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri);
endContext();
return xmlnsDecl;
}
/**
* Parse xmlns keyword.
*
* @return Parsed node
*/
private STNode parseXMLNSKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XMLNS_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.XMLNS_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse namespace uri.
*
* @return Parsed node
*/
private STNode parseXMLNamespaceUri() {
STNode expr = parseConstExpr();
switch (expr.kind) {
case STRING_LITERAL:
case IDENTIFIER_TOKEN:
case QUALIFIED_NAME_REFERENCE:
break;
default:
this.errorHandler.reportInvalidNode(null, "namespace uri must be a subtype of string");
}
return expr;
}
/**
* Parse constants expr.
*
* @return Parsed node
*/
private STNode parseConstExpr() {
startContext(ParserRuleContext.CONSTANT_EXPRESSION);
STToken nextToken = peek();
STNode expr;
switch (nextToken.kind) {
case STRING_LITERAL:
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
expr = consume();
break;
case IDENTIFIER_TOKEN:
expr = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
break;
case OPEN_BRACE_TOKEN:
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START);
expr = solution.recoveredNode;
break;
}
endContext();
return expr;
}
/**
* Parse the portion after the namsepsace-uri of an XML declaration.
*
* @param xmlnsKeyword XMLNS keyword
* @param namespaceUri Namespace URI
* @return Parsed node
*/
private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri) {
return parseXMLDeclRhs(peek().kind, xmlnsKeyword, namespaceUri);
}
private STNode parseXMLDeclRhs(SyntaxKind nextTokenKind, STNode xmlnsKeyword, STNode namespaceUri) {
STNode asKeyword = STNodeFactory.createEmptyNode();
STNode namespacePrefix = STNodeFactory.createEmptyNode();
switch (nextTokenKind) {
case AS_KEYWORD:
asKeyword = parseAsKeyword();
namespacePrefix = parseNamespacePrefix();
break;
case SEMICOLON_TOKEN:
break;
default:
STToken token = peek();
Solution solution =
recover(token, ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseXMLDeclRhs(solution.tokenKind, xmlnsKeyword, namespaceUri);
}
STNode semicolon = parseSemicolon();
return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,
semicolon);
}
/**
* Parse import prefix.
*
* @return Parsed node
*/
private STNode parseNamespacePrefix() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.NAMESPACE_PREFIX);
return sol.recoveredNode;
}
}
/**
* Parse named worker declaration.
* <p>
* <code>named-worker-decl := [annots] worker worker-name return-type-descriptor { sequence-stmt }</code>
*
* @param annots Annotations attached to the worker decl
* @return Parsed node
*/
private STNode parseNamedWorkerDeclaration(STNode annots) {
startContext(ParserRuleContext.NAMED_WORKER_DECL);
STNode workerKeyword = parseWorkerKeyword();
STNode workerName = parseWorkerName();
STNode returnTypeDesc = parseReturnTypeDescriptor();
STNode workerBody = parseBlockNode();
endContext();
return STNodeFactory.createNamedWorkerDeclarationNode(annots, workerKeyword, workerName, returnTypeDesc,
workerBody);
}
private STNode parseReturnTypeDescriptor() {
STToken token = peek();
if (token.kind != SyntaxKind.RETURNS_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
STNode returnsKeyword = consume();
STNode annot = parseAnnotations();
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);
return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);
}
/**
* Parse worker keyword.
*
* @return Parsed node
*/
private STNode parseWorkerKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.WORKER_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse worker name.
* <p>
* <code>worker-name := identifier</code>
*
* @return Parsed node
*/
private STNode parseWorkerName() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.WORKER_NAME);
return sol.recoveredNode;
}
}
/**
* Parse documentation string.
* <p>
* <code>DocumentationString := DocumentationLine +</code>
* <p>
* Refer {@link BallerinaLexer
*
* @return Parsed node
*/
private STNode parseDocumentationString() {
List<STNode> docLines = new ArrayList<>();
STToken nextToken = peek();
while (nextToken.kind == SyntaxKind.DOCUMENTATION_LINE) {
docLines.add(consume());
nextToken = peek();
}
STNode documentationLines = STNodeFactory.createNodeList(docLines);
return STNodeFactory.createDocumentationStringNode(documentationLines);
}
/**
* Parse lock statement.
* <code>lock-stmt := lock block-stmt ;</code>
*
* @return Lock statement
*/
private STNode parseLockStatement() {
startContext(ParserRuleContext.LOCK_STMT);
STNode lockKeyword = parseLockKeyword();
STNode blockStatement = parseBlockNode();
endContext();
return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement);
}
/**
* Parse lock-keyword.
*
* @return lock-keyword node
*/
private STNode parseLockKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LOCK_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.LOCK_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Checks whether the given expression is a possible array-type-desc.
* <br/>
* i.e.: a member-access-expr, where its container is also a member-access.
* <code>a[b][]</code>
*
* @param expression EXpression to check
* @return <code>true</code> if the expression provided is a possible array-type desc. <code>false</code> otherwise
*/
private boolean isPossibleArrayType(STNode expression) {
switch (expression.kind) {
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
return true;
case INDEXED_EXPRESSION:
return isPossibleArrayType(((STIndexedExpressionNode) expression).containerExpression);
default:
return false;
}
}
/**
* Parse union type descriptor.
* union-type-descriptor := type-descriptor | type-descriptor
*
* @param leftTypeDesc Type desc in the LHS os the union type desc.
* @param context Current context.
* @return parsed union type desc node
*/
private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context) {
STNode pipeToken = parsePipeToken();
STNode rightTypeDesc = parseTypeDescriptor(context);
return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);
}
/**
* Parse pipe token.
*
* @return parsed pipe token node
*/
private STNode parsePipeToken() {
STToken token = peek();
if (token.kind == SyntaxKind.PIPE_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.PIPE);
return sol.recoveredNode;
}
}
private boolean isTypeStartingToken(SyntaxKind nodeKind) {
switch (nodeKind) {
case IDENTIFIER_TOKEN:
case SERVICE_KEYWORD:
case RECORD_KEYWORD:
case OBJECT_KEYWORD:
case ABSTRACT_KEYWORD:
case CLIENT_KEYWORD:
case OPEN_PAREN_TOKEN:
case MAP_KEYWORD:
case FUTURE_KEYWORD:
case TYPEDESC_KEYWORD:
case ERROR_KEYWORD:
case STREAM_KEYWORD:
case TABLE_KEYWORD:
case FUNCTION_KEYWORD:
case OPEN_BRACKET_TOKEN:
case READONLY_KEYWORD:
return true;
default:
return isSimpleType(nodeKind);
}
}
static boolean isSimpleType(SyntaxKind nodeKind) {
switch (nodeKind) {
case INT_KEYWORD:
case FLOAT_KEYWORD:
case DECIMAL_KEYWORD:
case BOOLEAN_KEYWORD:
case STRING_KEYWORD:
case BYTE_KEYWORD:
case XML_KEYWORD:
case JSON_KEYWORD:
case HANDLE_KEYWORD:
case ANY_KEYWORD:
case ANYDATA_KEYWORD:
case NEVER_KEYWORD:
case SERVICE_KEYWORD:
case VAR_KEYWORD:
case ERROR_KEYWORD:
case STREAM_KEYWORD:
case READONLY_KEYWORD:
return true;
case TYPE_DESC:
return true;
default:
return false;
}
}
private SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) {
switch (typeKeyword) {
case INT_KEYWORD:
return SyntaxKind.INT_TYPE_DESC;
case FLOAT_KEYWORD:
return SyntaxKind.FLOAT_TYPE_DESC;
case DECIMAL_KEYWORD:
return SyntaxKind.DECIMAL_TYPE_DESC;
case BOOLEAN_KEYWORD:
return SyntaxKind.BOOLEAN_TYPE_DESC;
case STRING_KEYWORD:
return SyntaxKind.STRING_TYPE_DESC;
case BYTE_KEYWORD:
return SyntaxKind.BYTE_TYPE_DESC;
case XML_KEYWORD:
return SyntaxKind.XML_TYPE_DESC;
case JSON_KEYWORD:
return SyntaxKind.JSON_TYPE_DESC;
case HANDLE_KEYWORD:
return SyntaxKind.HANDLE_TYPE_DESC;
case ANY_KEYWORD:
return SyntaxKind.ANY_TYPE_DESC;
case ANYDATA_KEYWORD:
return SyntaxKind.ANYDATA_TYPE_DESC;
case NEVER_KEYWORD:
return SyntaxKind.NEVER_TYPE_DESC;
case SERVICE_KEYWORD:
return SyntaxKind.SERVICE_TYPE_DESC;
case VAR_KEYWORD:
return SyntaxKind.VAR_TYPE_DESC;
default:
return SyntaxKind.TYPE_DESC;
}
}
/**
* Parse fork-keyword.
*
* @return Fork-keyword node
*/
private STNode parseForkKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FORK_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FORK_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse multiple named worker declarations.
*
* @return named-worker-declarations node array
*/
private STNode parseMultileNamedWorkerDeclarations() {
STToken token = peek();
ArrayList<STNode> workers = new ArrayList<>();
while (!isEndOfStatements(token.kind)) {
STNode stmt = parseStatement();
if (stmt == null) {
break;
}
switch (stmt.kind) {
case NAMED_WORKER_DECLARATION:
workers.add(stmt);
break;
default:
this.errorHandler.reportInvalidNode(null, "Only named-workers are allowed here");
break;
}
token = peek();
}
if (workers.isEmpty()) {
this.errorHandler.reportInvalidNode(null, "Fork Statement must contain atleast one named-worker");
}
STNode namedWorkers = STNodeFactory.createNodeList(workers);
return namedWorkers;
}
/**
* Parse fork statement.
* <code>fork-stmt := fork { named-worker-decl+ }</code>
*
* @return Fork statement
*/
private STNode parseForkStatement() {
startContext(ParserRuleContext.FORK_STMT);
STNode forkKeyword = parseForkKeyword();
STNode openBrace = parseOpenBrace();
STNode namedWorkerDeclarations = parseMultileNamedWorkerDeclarations();
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);
}
/**
* Parse decimal floating point literal.
*
* @return Parsed node
*/
private STNode parseDecimalFloatingPointLiteral() {
STToken token = peek();
if (token.kind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.DECIMAL_FLOATING_POINT_LITERAL);
return sol.recoveredNode;
}
}
/**
* Parse hex floating point literal.
*
* @return Parsed node
*/
private STNode parseHexFloatingPointLiteral() {
STToken token = peek();
if (token.kind == SyntaxKind.HEX_FLOATING_POINT_LITERAL) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.HEX_FLOATING_POINT_LITERAL);
return sol.recoveredNode;
}
}
/**
* Parse trap expression.
* <p>
* <code>
* trap-expr := trap expression
* </code>
*
* @param isRhsExpr
* @return Trap expression node
*/
private STNode parseTrapExpression(boolean isRhsExpr) {
STNode trapKeyword = parseTrapKeyword();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createTrapExpressionNode(trapKeyword, expr);
}
/**
* Parse trap-keyword.
*
* @return Trap-keyword node
*/
private STNode parseTrapKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TRAP_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.TRAP_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse list constructor expression.
* <p>
* <code>
* list-constructor-expr := [ [ expr-list ] ]
* <br/>
* expr-list := expression (, expression)*
* </code>
*
* @return Parsed node
*/
private STNode parseListConstructorExpr() {
startContext(ParserRuleContext.LIST_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode expressions = parseOptionalExpressionsList();
STNode closeBracket = parseCloseBracket();
endContext();
return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket);
}
/**
* Parse optional expression list.
*
* @return Parsed node
*/
private STNode parseOptionalExpressionsList() {
List<STNode> expressions = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfExpressionsList(nextToken.kind)) {
return STNodeFactory.createNodeList(new ArrayList<>());
}
STNode expr = parseExpression();
expressions.add(expr);
nextToken = peek();
STNode leadingComma;
while (!isEndOfExpressionsList(nextToken.kind)) {
leadingComma = parseComma();
expressions.add(leadingComma);
expr = parseExpression();
expressions.add(expr);
nextToken = peek();
}
return STNodeFactory.createNodeList(expressions);
}
/**
* Parse foreach statement.
* <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt</code>
*
* @return foreach statement
*/
private STNode parseForEachStatement() {
startContext(ParserRuleContext.FOREACH_STMT);
STNode forEachKeyword = parseForEachKeyword();
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
STNode varName = parseVariableName();
STNode inKeyword = parseInKeyword();
STNode actionOrExpr = parseActionOrExpression();
STNode blockStatement = parseBlockNode();
endContext();
return STNodeFactory.createForEachStatementNode(forEachKeyword, type, varName, inKeyword, actionOrExpr,
blockStatement);
}
/**
* Parse foreach-keyword.
*
* @return ForEach-keyword node
*/
private STNode parseForEachKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FOREACH_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FOREACH_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse in-keyword.
*
* @return In-keyword node
*/
private STNode parseInKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IN_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.IN_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse type cast expression.
* <p>
* <code>
* type-cast-expr := < type-cast-param > expression
* <br/>
* type-cast-param := [annots] type-descriptor | annots
* </code>
*
* @return Parsed node
*/
private STNode parseTypeCastExpr(boolean isRhsExpr) {
startContext(ParserRuleContext.TYPE_CAST_EXPRESSION);
STNode ltToken = parseLTToken();
STNode typeCastParam = parseTypeCastParam();
STNode gtToken = parseGTToken();
endContext();
STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);
}
private STNode parseTypeCastParam() {
STNode annot;
STNode type;
STToken token = peek();
switch (token.kind) {
case AT_TOKEN:
annot = parseAnnotations();
token = peek();
if (isTypeStartingToken(token.kind)) {
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
} else {
type = STNodeFactory.createEmptyNode();
}
break;
default:
annot = STNodeFactory.createEmptyNode();
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
break;
}
return STNodeFactory.createTypeCastParamNode(annot, type);
}
/**
* Parse table constructor expression.
* <p>
* <code>
* table-constructor-expr-rhs := [ [row-list] ]
* </code>
*
* @param tableKeyword tableKeyword that precedes this rhs
* @param keySpecifier keySpecifier that precedes this rhs
* @return Parsed node
*/
private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {
switchContext(ParserRuleContext.TABLE_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode rowList = parseRowList();
STNode closeBracket = parseCloseBracket();
return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,
closeBracket);
}
/**
* Parse table-keyword.
*
* @return Table-keyword node
*/
private STNode parseTableKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TABLE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.TABLE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse table rows.
* <p>
* <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code>
*
* @return Parsed node
*/
private STNode parseRowList() {
List<STNode> mappings = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfMappingConstructorsList(nextToken.kind)) {
return STNodeFactory.createNodeList(new ArrayList<>());
}
STNode mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
STNode leadingComma;
while (!isEndOfMappingConstructorsList(nextToken.kind)) {
leadingComma = parseComma();
mappings.add(leadingComma);
mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
}
return STNodeFactory.createNodeList(mappings);
}
private boolean isEndOfMappingConstructorsList(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case OPEN_BRACE_TOKEN:
return false;
default:
return isEndOfMappingConstructor(tokenKind);
}
}
/**
* Parse key specifier.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier() {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode keyKeyword = parseKeyKeyword();
STNode openParen = parseOpenParenthesis();
STNode fieldNames = parseFieldNames();
STNode closeParen = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);
}
/**
* Parse key-keyword.
*
* @return Key-keyword node
*/
private STNode parseKeyKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.KEY_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.KEY_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse field names.
* <p>
* <code>field-name-list := [ field-name (, field-name)* ]</code>
*
* @return Parsed node
*/
private STNode parseFieldNames() {
List<STNode> fieldNames = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfFieldNamesList(nextToken.kind)) {
return STNodeFactory.createNodeList(new ArrayList<>());
}
STNode fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
STNode leadingComma;
while (!isEndOfFieldNamesList(nextToken.kind)) {
leadingComma = parseComma();
fieldNames.add(leadingComma);
fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
}
return STNodeFactory.createNodeList(fieldNames);
}
private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case IDENTIFIER_TOKEN:
return false;
default:
return true;
}
}
/**
* Parse error type descriptor.
* <p>
* error-type-descriptor := error [error-type-param]
* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >
* detail-type-descriptor := type-descriptor
* inferred-type-descriptor := *
* </p>
*
* @return Parsed node
*/
private STNode parseErrorTypeDescriptor() {
STNode errorKeywordToken = parseErrorKeyWord();
STNode errorTypeParamsNode;
STToken nextToken = peek();
STToken nextNextToken = peek(2);
if (nextToken.kind == SyntaxKind.LT_TOKEN || nextNextToken.kind == SyntaxKind.GT_TOKEN) {
errorTypeParamsNode = parseErrorTypeParamsNode();
} else {
errorTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode);
}
/**
* Parse error type param node.
* <p>
* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >
* detail-type-descriptor := type-descriptor
* inferred-type-descriptor := *
* </p>
*
* @return Parsed node
*/
private STNode parseErrorTypeParamsNode() {
STNode ltToken = parseLTToken();
STNode parameter;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {
parameter = consume();
} else {
parameter = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
}
STNode gtToken = parseGTToken();
return STNodeFactory.createErrorTypeParamsNode(ltToken, parameter, gtToken);
}
/**
* Parse error-keyword.
*
* @return Parsed error-keyword node
*/
private STNode parseErrorKeyWord() {
STToken token = peek();
if (token.kind == SyntaxKind.ERROR_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.ERROR_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse stream type descriptor.
* <p>
* stream-type-descriptor := stream [stream-type-parameters]
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type descriptor node
*/
private STNode parseStreamTypeDescriptor() {
STNode streamKeywordToken = parseStreamKeyword();
STNode streamTypeParamsNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
streamTypeParamsNode = parseStreamTypeParamsNode();
} else {
streamTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);
}
/**
* Parse stream type params node.
* <p>
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type params node
*/
private STNode parseStreamTypeParamsNode() {
STNode ltToken = parseLTToken();
startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode leftTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);
endContext();
return streamTypedesc;
}
private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {
return parseStreamTypeParamsNode(peek().kind, ltToken, leftTypeDescNode);
}
private STNode parseStreamTypeParamsNode(SyntaxKind nextTokenKind, STNode ltToken, STNode leftTypeDescNode) {
STNode commaToken, rightTypeDescNode, gtToken;
switch (nextTokenKind) {
case COMMA_TOKEN:
commaToken = parseComma();
rightTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
break;
case GT_TOKEN:
commaToken = STNodeFactory.createEmptyNode();
rightTypeDescNode = STNodeFactory.createEmptyNode();
break;
default:
Solution solution =
recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseStreamTypeParamsNode(solution.tokenKind, ltToken, leftTypeDescNode);
}
gtToken = parseGTToken();
return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,
gtToken);
}
/**
* Parse stream-keyword.
*
* @return Parsed stream-keyword node
*/
private STNode parseStreamKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.STREAM_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.STREAM_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse let expression.
* <p>
* <code>
* let-expr := let let-var-decl [, let-var-decl]* in expression
* </code>
*
* @return Parsed node
*/
private STNode parseLetExpression(boolean isRhsExpr) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr);
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);
}
/**
* Parse let-keyword.
*
* @return Let-keyword node
*/
private STNode parseLetKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LET_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.LET_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse let variable declarations.
* <p>
* <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code>
*
* @return Parsed node
*/
private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) {
startContext(context);
List<STNode> varDecls = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfLetVarDeclarations(nextToken.kind)) {
endContext();
this.errorHandler.reportMissingTokenError("missing let variable declaration");
return STNodeFactory.createNodeList(varDecls);
}
STNode varDec = parseLetVarDec(isRhsExpr);
varDecls.add(varDec);
nextToken = peek();
STNode leadingComma;
while (!isEndOfLetVarDeclarations(nextToken.kind)) {
leadingComma = parseComma();
varDecls.add(leadingComma);
varDec = parseLetVarDec(isRhsExpr);
varDecls.add(varDec);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(varDecls);
}
private boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case AT_TOKEN:
return false;
case IN_KEYWORD:
return true;
default:
return !isTypeStartingToken(tokenKind);
}
}
/**
* Parse let variable declaration.
* <p>
* <code>let-var-decl := [annots] typed-binding-pattern = expression</code>
*
* @return Parsed node
*/
private STNode parseLetVarDec(boolean isRhsExpr) {
STNode annot = parseAnnotations();
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
STNode varName = parseVariableName();
STNode assign = parseAssignOp();
STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createLetVariableDeclarationNode(annot, type, varName, assign, expression);
}
/**
* Parse raw backtick string template expression.
* <p>
* <code>BacktickString := `expression`</code>
*
* @return Template expression node
*/
private STNode parseTemplateExpression() {
STNode type = STNodeFactory.createEmptyNode();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
private STNode parseTemplateContent() {
List<STNode> items = new ArrayList<>();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
items.add(contentItem);
nextToken = peek();
}
return STNodeFactory.createNodeList(items);
}
private boolean isEndOfBacktickContent(SyntaxKind kind) {
switch (kind) {
case EOF_TOKEN:
case BACKTICK_TOKEN:
return true;
default:
return false;
}
}
private STNode parseTemplateItem() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return parseInterpolation();
}
return consume();
}
/**
* Parse string template expression.
* <p>
* <code>string-template-expr := string ` expression `</code>
*
* @return String template expression node
*/
private STNode parseStringTemplateExpression() {
STNode type = parseStringKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
/**
* Parse <code>string</code> keyword.
*
* @return string keyword node
*/
private STNode parseStringKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.STRING_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.STRING_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse XML template expression.
* <p>
* <code>xml-template-expr := xml BacktickString</code>
*
* @return XML template expression
*/
private STNode parseXMLTemplateExpression() {
STNode xmlKeyword = parseXMLKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContentAsXML();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,
startingBackTick, content, endingBackTick);
}
/**
* Parse <code>xml</code> keyword.
*
* @return xml keyword node
*/
private STNode parseXMLKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XML_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.XML_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse the content of the template string as XML. This method first read the
* input in the same way as the raw-backtick-template (BacktickString). Then
* it parses the content as XML.
*
* @return XML node
*/
private STNode parseTemplateContentAsXML() {
ArrayDeque<STNode> expressions = new ArrayDeque<>();
StringBuilder xmlStringBuilder = new StringBuilder();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {
xmlStringBuilder.append(((STToken) contentItem).text());
} else {
xmlStringBuilder.append("${}");
expressions.add(contentItem);
}
nextToken = peek();
}
TextDocument textDocument = TextDocuments.from(xmlStringBuilder.toString());
AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(textDocument.getCharacterReader()));
XMLParser xmlParser = new XMLParser(tokenReader, expressions);
return xmlParser.parse();
}
/**
* Parse interpolation of a back-tick string.
* <p>
* <code>
* interpolation := ${ expression }
* </code>
*
* @return Interpolation node
*/
private STNode parseInterpolation() {
startContext(ParserRuleContext.INTERPOLATION);
STNode interpolStart = parseInterpolationStart();
STNode expr = parseExpression();
removeAdditionalTokensInInterpolation();
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);
}
/**
* Parse interpolation start token.
* <p>
* <code>interpolation-start := ${</code>
*
* @return Interpolation start token
*/
private STNode parseInterpolationStart() {
STToken token = peek();
if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);
return sol.recoveredNode;
}
}
/**
* Remove if there any tokens left after the expression inside the interpolation.
*/
private void removeAdditionalTokensInInterpolation() {
while (true) {
STToken nextToken = peek();
switch (nextToken.kind) {
case EOF_TOKEN:
return;
case CLOSE_BRACE_TOKEN:
return;
default:
consume();
this.errorHandler.reportInvalidNode(nextToken, "invalid token '" + nextToken.text() + "'");
}
}
}
/**
* Parse back-tick token.
*
* @return Back-tick token
*/
private STNode parseBacktickToken(ParserRuleContext ctx) {
STToken token = peek();
if (token.kind == SyntaxKind.BACKTICK_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ctx);
return sol.recoveredNode;
}
}
/**
* Parse table type descriptor.
* <p>
* table-type-descriptor := table row-type-parameter [key-constraint]
* row-type-parameter := type-parameter
* key-constraint := key-specifier | key-type-constraint
* key-specifier := key ( [ field-name (, field-name)* ] )
* key-type-constraint := key type-parameter
* </p>
*
* @return Parsed table type desc node.
*/
private STNode parseTableTypeDescriptor() {
STNode tableKeywordToken = parseTableKeyword();
STNode rowTypeParameterNode = parseRowTypeParameter();
STNode keyConstraintNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.KEY_KEYWORD) {
STNode keyKeywordToken = parseKeyKeyword();
keyConstraintNode = parseKeyConstraint(keyKeywordToken);
} else {
keyConstraintNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);
}
/**
* Parse row type parameter node.
* <p>row-type-parameter := type-parameter</p>
*
* @return Parsed node.
*/
private STNode parseRowTypeParameter() {
startContext(ParserRuleContext.ROW_TYPE_PARAM);
STNode rowTypeParameterNode = parseTypeParameter();
endContext();
return rowTypeParameterNode;
}
/**
* Parse type parameter node.
* <p>type-parameter := < type-descriptor > </p>
*
* @return Parsed node
*/
private STNode parseTypeParameter() {
STNode ltToken = parseLTToken();
STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
STNode gtToken = parseGTToken();
return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);
}
/** Parse key constraint.
* <p> key-constraint := key-specifier | key-type-constraint</p>
*
* @return Parsed node.
*/
private STNode parseKeyConstraint(STNode keyKeywordToken) {
return parseKeyConstraint(peek().kind, keyKeywordToken);
}
private STNode parseKeyConstraint(SyntaxKind nextTokenKind, STNode keyKeywordToken) {
switch (nextTokenKind) {
case OPEN_PAREN_TOKEN:
return parseKeySpecifier(keyKeywordToken);
case LT_TOKEN:
return parseKeyTypeConstraint(keyKeywordToken);
default:
Solution solution = recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseKeyConstraint(solution.tokenKind, keyKeywordToken);
}
}
/**
* Parse key specifier given parsed key keyword token.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier(STNode keyKeywordToken) {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode openParenToken = parseOpenParenthesis();
STNode fieldNamesNode = parseFieldNames();
STNode closeParenToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);
}
/**
* Parse key type constraint.
* <p>key-type-constraint := key type-parameter</p>
*
* @return Parsed node
*/
private STNode parseKeyTypeConstraint(STNode keyKeywordToken) {
STNode typeParameterNode = parseTypeParameter();
return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);
}
/** Parse function type descriptor.
* <p>
* <code>function-type-descriptor := function function-signature</code>
*
* @return Function type descriptor node
*/
private STNode parseFunctionTypeDesc() {
startContext(ParserRuleContext.FUNC_TYPE_DESC);
STNode functionKeyword = parseFunctionKeyword();
STNode signature = parseFuncSignature(true, false);
return STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, signature);
}
/**
* Parse anonymous function or function type desc. In an expression
* context, both of these are possible. Hence we can distinguish only
* after reaching the body.
*
* @param annots
* @return
*/
private STNode parseFunctionExpression(STNode annots) {
startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);
STNode funcKeyword = parseFunctionKeyword();
STNode funcSignature = parseFuncSignature(true, false);
STNode funcBody = parseFunctionTypeOrAnonFuncBody();
if (funcBody == null) {
return STNodeFactory.createFunctionTypeDescriptorNode(funcKeyword, funcSignature);
}
return STNodeFactory.createAnonymousFunctionExpressionNode(annots, funcKeyword, funcSignature, funcBody);
}
private STNode parseFunctionTypeOrAnonFuncBody() {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACE_TOKEN:
return parseFunctionBodyBlock();
case EQUAL_GT_TOKEN:
return null;
default:
return null;
}
}
/**
* Parse tuple type descriptor.
* <p>
* <code>tuple-type-descriptor := [ tuple-member-type-descriptors ]
* <br/><br/>
* tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]
* | [ tuple-rest-descriptor ]
* <br/><br/>
* tuple-rest-descriptor := type-descriptor ...
* </code>
*
* @return
*/
private STNode parseTupleTypeDesc() {
STNode openBracket = parseOpenBracket();
STNode memberTypeDesc = parseTupleMemberTypeDescList();
STNode restTypeDesc = parseTupleRestTypeDesc();
STNode closeBracket = parseCloseBracket();
return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, restTypeDesc, closeBracket);
}
/**
* Parse tuple member type descriptors.
*
* @return Parsed node
*/
private STNode parseTupleMemberTypeDescList() {
List<STNode> typeDescList = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfTypeList(nextToken.kind)) {
this.errorHandler.reportMissingTokenError("missing type-desc");
return STNodeFactory.createNodeList(new ArrayList<>());
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
typeDescList.add(typeDesc);
nextToken = peek();
STNode leadingComma;
while (!isEndOfTypeList(nextToken.kind)) {
leadingComma = parseComma();
typeDescList.add(leadingComma);
typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
typeDescList.add(typeDesc);
nextToken = peek();
}
return STNodeFactory.createNodeList(typeDescList);
}
private boolean isEndOfTypeList(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case CLOSE_BRACKET_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case EOF_TOKEN:
case EQUAL_TOKEN:
case OPEN_BRACE_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
private STNode parseTupleRestTypeDesc() {
return STNodeFactory.createEmptyNode();
}
private STNode parseParenthesisedTypeDesc() {
STNode openParen = parseOpenParenthesis();
STNode typedesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_PARENTHESIS);
STNode closeParen = parseCloseParenthesis();
return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typedesc, closeParen);
}
/**
* Parse read only type desc.
* readonly-type-deacriptor := readonly [type-parameter]
*
* @return Parsed node
*/
private STNode parseReadOnlyTypeDesc() {
STNode readonlyKeyWordToken = parseReadonlyKeyword();
STNode typeParameterNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
typeParameterNode = parseTypeParameter();
} else {
typeParameterNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createReadOnlyTypeDescriptorNode(readonlyKeyWordToken, typeParameterNode);
}
/**
* Parse readonly keyword.
*
* @return Parsed node
*/
private STNode parseReadonlyKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.READONLY_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.READONLY_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse table constructor or query expression.
* <p>
* <code>
* table-constructor-or-query-expr := table-constructor-expr | query-expr
* <br/>
* table-constructor-expr := table [key-specifier] [ [row-list] ]
* <br/>
* query-expr := [query-construct-type] query-pipeline select-clause
* <br/>
* query-construct-type := table key-specifier | stream
* </code>
*
* @return Parsed node
*/
private STNode parseTableConstructorOrQuery(boolean isRhsExpr) {
startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);
STNode tableOrQueryExpr = parseTableConstructorOrQuery(peek().kind, isRhsExpr);
endContext();
return tableOrQueryExpr;
}
private STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, boolean isRhsExpr) {
STNode queryConstructType;
switch (nextTokenKind) {
case FROM_KEYWORD:
queryConstructType = STNodeFactory.createEmptyNode();
return parseQueryExprRhs(queryConstructType, isRhsExpr);
case STREAM_KEYWORD:
queryConstructType = parseStreamKeyword();
return parseQueryExprRhs(queryConstructType, isRhsExpr);
case TABLE_KEYWORD:
STNode tableKeyword = parseTableKeyword();
return parseTableConstructorOrQuery(tableKeyword, isRhsExpr);
default:
Solution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseTableConstructorOrQuery(solution.tokenKind, isRhsExpr);
}
}
private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) {
return parseTableConstructorOrQuery(peek().kind, tableKeyword, isRhsExpr);
}
private STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, STNode tableKeyword, boolean isRhsExpr) {
STNode keySpecifier;
switch (nextTokenKind) {
case OPEN_BRACKET_TOKEN:
keySpecifier = STNodeFactory.createEmptyNode();
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
case KEY_KEYWORD:
keySpecifier = parseKeySpecifier();
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);
default:
Solution solution = recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseTableConstructorOrQuery(solution.tokenKind, tableKeyword, isRhsExpr);
}
}
private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) {
return parseTableConstructorOrQueryRhs(peek().kind, tableKeyword, keySpecifier, isRhsExpr);
}
private STNode parseTableConstructorOrQueryRhs(SyntaxKind nextTokenKind, STNode tableKeyword, STNode keySpecifier,
boolean isRhsExpr) {
switch (nextTokenKind) {
case FROM_KEYWORD:
return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr);
case OPEN_BRACKET_TOKEN:
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
default:
Solution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS,
tableKeyword, keySpecifier, isRhsExpr);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseTableConstructorOrQueryRhs(solution.tokenKind, tableKeyword, keySpecifier, isRhsExpr);
}
}
/**
* Parse query construct type.
* <p>
* <code>query-construct-type := table key-specifier</code>
*
* @return Parsed node
*/
private STNode parseQueryConstructType(STNode tableKeyword, STNode keySpecifier) {
return STNodeFactory.createQueryConstructTypeNode(tableKeyword, keySpecifier);
}
/**
* Parse query expression.
* <p>
* <code>
* query-expr-rhs := query-pipeline select-clause
* <br/>
* query-pipeline := from-clause intermediate-clause*
* </code>
*
* @param queryConstructType queryConstructType that precedes this rhs
* @return Parsed node
*/
private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) {
switchContext(ParserRuleContext.QUERY_EXPRESSION);
STNode fromClause = parseFromClause(isRhsExpr);
List<STNode> clauses = new ArrayList<>();
boolean hasReachedSelectClause = false;
STNode intermediateClause;
STNode selectClause = null;
while (!isEndOfIntermediateClause(peek().kind)) {
intermediateClause = parseIntermediateClause(isRhsExpr);
if (!hasReachedSelectClause) {
if (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) {
selectClause = intermediateClause;
hasReachedSelectClause = true;
} else {
clauses.add(intermediateClause);
}
} else {
this.errorHandler.reportMissingTokenError("extra clauses after select clause");
}
}
if (!hasReachedSelectClause) {
selectClause = parseSelectClause(isRhsExpr);
}
STNode intermediateClauses = STNodeFactory.createNodeList(clauses);
STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);
return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause);
}
/**
* Parse an intermediate clause.
* <p>
* <code>
* intermediate-clause := from-clause | where-clause | let-clause
* </code>
*
* @return Parsed node
*/
private STNode parseIntermediateClause(boolean isRhsExpr) {
return parseIntermediateClause(peek().kind, isRhsExpr);
}
private STNode parseIntermediateClause(SyntaxKind nextTokenKind, boolean isRhsExpr) {
switch (nextTokenKind) {
case FROM_KEYWORD:
return parseFromClause(isRhsExpr);
case WHERE_KEYWORD:
return parseWhereClause(isRhsExpr);
case LET_KEYWORD:
return parseLetClause(isRhsExpr);
case SELECT_KEYWORD:
return parseSelectClause(isRhsExpr);
default:
Solution solution = recover(peek(), ParserRuleContext.QUERY_EXPRESSION_RHS, isRhsExpr);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseIntermediateClause(solution.tokenKind, isRhsExpr);
}
}
private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) {
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case EOF_TOKEN:
case RESOURCE_KEYWORD:
case LISTENER_KEYWORD:
case DOCUMENTATION_LINE:
case PRIVATE_KEYWORD:
case RETURNS_KEYWORD:
case SERVICE_KEYWORD:
case TYPE_KEYWORD:
case CONST_KEYWORD:
case FINAL_KEYWORD:
return true;
default:
return isValidExprRhsStart(tokenKind);
}
}
/**
* Parse from clause.
* <p>
* <code>from-clause := from typed-binding-pattern in expression</code>
*
* @return Parsed node
*/
private STNode parseFromClause(boolean isRhsExpr) {
STNode fromKeyword = parseFromKeyword();
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
STNode varName = parseVariableName();
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createFromClauseNode(fromKeyword, type, varName, inKeyword, expression);
}
/**
* Parse from-keyword.
*
* @return From-keyword node
*/
private STNode parseFromKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FROM_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FROM_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse where clause.
* <p>
* <code>where-clause := where expression</code>
*
* @return Parsed node
*/
private STNode parseWhereClause(boolean isRhsExpr) {
STNode whereKeyword = parseWhereKeyword();
STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createWhereClauseNode(whereKeyword, expression);
}
/**
* Parse where-keyword.
*
* @return Where-keyword node
*/
private STNode parseWhereKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WHERE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.WHERE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse let clause.
* <p>
* <code>let-clause := let let-var-decl [, let-var-decl]* </code>
*
* @return Parsed node
*/
private STNode parseLetClause(boolean isRhsExpr) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr);
return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);
}
/**
* Parse select clause.
* <p>
* <code>select-clause := select expression</code>
*
* @return Parsed node
*/
private STNode parseSelectClause(boolean isRhsExpr) {
STNode selectKeyword = parseSelectKeyword();
STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createSelectClauseNode(selectKeyword, expression);
}
/**
* Parse select-keyword.
*
* @return Select-keyword node
*/
private STNode parseSelectKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SELECT_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.SELECT_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse double-LT token.
*
* @return Parsed node
*/
private STNode parseDoubleLTToken() {
STNode openLTToken = parseLTToken();
STNode endLTToken = parseLTToken();
return STNodeFactory.createDoubleLTTokenNode(openLTToken, endLTToken);
}
/**
* Parse double-GT token.
*
* @return Parsed node
*/
private STNode parseDoubleGTToken() {
STNode openGTToken = parseGTToken();
STNode endLGToken = parseGTToken();
return STNodeFactory.createDoubleGTTokenNode(openGTToken, endLGToken);
}
/**
* Parse tripple-GT token.
*
* @return Parsed node
*/
} | class BallerinaParser extends AbstractParser {
private static final OperatorPrecedence DEFAULT_OP_PRECEDENCE = OperatorPrecedence.ACTION;
protected BallerinaParser(AbstractTokenReader tokenReader) {
super(tokenReader, new BallerinaParserErrorHandler(tokenReader));
}
/**
* Start parsing the given input.
*
* @return Parsed node
*/
@Override
public STNode parse() {
return parseCompUnit();
}
/**
* Start parsing the input from a given context. Supported starting points are:
* <ul>
* <li>Module part (a file)</li>
* <li>Top level node</li>
* <li>Statement</li>
* <li>Expression</li>
* </ul>
*
* @param context Context to start parsing
* @return Parsed node
*/
public STNode parse(ParserRuleContext context) {
switch (context) {
case COMP_UNIT:
return parseCompUnit();
case TOP_LEVEL_NODE:
startContext(ParserRuleContext.COMP_UNIT);
return parseTopLevelNode();
case STATEMENT:
startContext(ParserRuleContext.COMP_UNIT);
startContext(ParserRuleContext.FUNC_BODY_BLOCK);
return parseStatement();
case EXPRESSION:
startContext(ParserRuleContext.COMP_UNIT);
startContext(ParserRuleContext.FUNC_BODY_BLOCK);
startContext(ParserRuleContext.STATEMENT);
return parseExpression();
default:
throw new UnsupportedOperationException("Cannot start parsing from: " + context);
}
}
/**
* Resume the parsing from the given context.
*
* @param context Context to resume parsing
* @param args Arguments that requires to continue parsing from the given parser context
* @return Parsed node
*/
@Override
public STNode resumeParsing(ParserRuleContext context, Object... args) {
switch (context) {
case COMP_UNIT:
return parseCompUnit();
case EXTERNAL_FUNC_BODY:
return parseExternalFunctionBody();
case FUNC_BODY:
return parseFunctionBody((boolean) args[0]);
case OPEN_BRACE:
return parseOpenBrace();
case CLOSE_BRACE:
return parseCloseBrace();
case FUNC_NAME:
return parseFunctionName();
case OPEN_PARENTHESIS:
case ARG_LIST_START:
return parseOpenParenthesis((ParserRuleContext) args[0]);
case SIMPLE_TYPE_DESCRIPTOR:
return parseSimpleTypeDescriptor();
case ASSIGN_OP:
return parseAssignOp();
case EXTERNAL_KEYWORD:
return parseExternalKeyword();
case SEMICOLON:
return parseSemicolon();
case CLOSE_PARENTHESIS:
return parseCloseParenthesis();
case VARIABLE_NAME:
return parseVariableName();
case TERMINAL_EXPRESSION:
return parseTerminalExpression((boolean) args[0], (boolean) args[1]);
case STATEMENT:
return parseStatement();
case STATEMENT_WITHOUT_ANNOTS:
return parseStatement((STNode) args[0]);
case EXPRESSION_RHS:
return parseExpressionRhs((OperatorPrecedence) args[0], (STNode) args[1], (boolean) args[2],
(boolean) args[3]);
case PARAMETER_START:
return parseParameter((SyntaxKind) args[0], (STNode) args[1], (int) args[2], (boolean) args[3]);
case PARAMETER_WITHOUT_ANNOTS:
return parseParamGivenAnnots((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (int) args[3],
(boolean) args[4]);
case AFTER_PARAMETER_TYPE:
return parseAfterParamType((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],
(STNode) args[4], (boolean) args[5]);
case PARAMETER_NAME_RHS:
return parseParameterRhs((SyntaxKind) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],
(STNode) args[4], (STNode) args[5]);
case TOP_LEVEL_NODE:
return parseTopLevelNode();
case TOP_LEVEL_NODE_WITHOUT_METADATA:
return parseTopLevelNode((STNode) args[0]);
case TOP_LEVEL_NODE_WITHOUT_MODIFIER:
return parseTopLevelNode((STNode) args[0], (STNode) args[1]);
case STATEMENT_START_IDENTIFIER:
return parseStatementStartIdentifier();
case VAR_DECL_STMT_RHS:
return parseVarDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],
(boolean) args[4]);
case TYPE_REFERENCE:
return parseTypeReference();
case FIELD_DESCRIPTOR_RHS:
return parseFieldDescriptorRhs((STNode) args[0], (STNode) args[1], (STNode) args[2]);
case RECORD_BODY_START:
return parseRecordBodyStartDelimiter();
case TYPE_DESCRIPTOR:
return parseTypeDescriptorInternal((ParserRuleContext) args[0]);
case OBJECT_MEMBER_START:
return parseObjectMember();
case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY:
return parseObjectMethodOrField((STNode) args[0], (STNode) args[1]);
case OBJECT_FIELD_RHS:
return parseObjectFieldRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3]);
case OBJECT_TYPE_FIRST_QUALIFIER:
return parseObjectTypeQualifiers();
case OBJECT_TYPE_SECOND_QUALIFIER:
return parseObjectTypeSecondQualifier((STNode) args[0]);
case OBJECT_KEYWORD:
return parseObjectKeyword();
case TYPE_NAME:
return parseTypeName();
case IF_KEYWORD:
return parseIfKeyword();
case ELSE_KEYWORD:
return parseElseKeyword();
case ELSE_BODY:
return parseElseBody();
case WHILE_KEYWORD:
return parseWhileKeyword();
case PANIC_KEYWORD:
return parsePanicKeyword();
case MAJOR_VERSION:
return parseMajorVersion();
case IMPORT_DECL_RHS:
return parseImportDecl((STNode) args[0], (STNode) args[1]);
case IMPORT_PREFIX:
return parseImportPrefix();
case IMPORT_MODULE_NAME:
case IMPORT_ORG_OR_MODULE_NAME:
case VARIABLE_REF:
case FIELD_OR_FUNC_NAME:
case SERVICE_NAME:
case IMPLICIT_ANON_FUNC_PARAM:
return parseIdentifier(context);
case IMPORT_KEYWORD:
return parseImportKeyword();
case SLASH:
return parseSlashToken();
case DOT:
return parseDotToken();
case IMPORT_VERSION_DECL:
return parseVersion();
case VERSION_KEYWORD:
return parseVersionKeywrod();
case VERSION_NUMBER:
return parseVersionNumber();
case DECIMAL_INTEGER_LITERAL:
return parseDecimalIntLiteral(context);
case IMPORT_SUB_VERSION:
return parseSubVersion(context);
case IMPORT_PREFIX_DECL:
return parseImportPrefixDecl();
case AS_KEYWORD:
return parseAsKeyword();
case CONTINUE_KEYWORD:
return parseContinueKeyword();
case BREAK_KEYWORD:
return parseBreakKeyword();
case RETURN_KEYWORD:
return parseReturnKeyword();
case MAPPING_FIELD:
case FIRST_MAPPING_FIELD:
return parseMappingField((ParserRuleContext) args[0], (STNode) args[1]);
case SPECIFIC_FIELD_RHS:
return parseSpecificFieldRhs((STNode) args[0], (STNode) args[1]);
case STRING_LITERAL:
return parseStringLiteral();
case COLON:
return parseColon();
case OPEN_BRACKET:
return parseOpenBracket();
case RESOURCE_DEF:
return parseResource();
case OPTIONAL_SERVICE_NAME:
return parseServiceName();
case SERVICE_KEYWORD:
return parseServiceKeyword();
case ON_KEYWORD:
return parseOnKeyword();
case RESOURCE_KEYWORD:
return parseResourceKeyword();
case LISTENER_KEYWORD:
return parseListenerKeyword();
case NIL_TYPE_DESCRIPTOR:
return parseNilTypeDescriptor();
case COMPOUND_ASSIGNMENT_STMT:
return parseCompoundAssignmentStmt();
case TYPEOF_KEYWORD:
return parseTypeofKeyword();
case ARRAY_TYPE_DESCRIPTOR:
return parseArrayTypeDescriptor((STNode) args[0]);
case ARRAY_LENGTH:
return parseArrayLength();
case FUNC_DEF_OR_FUNC_TYPE:
case REQUIRED_PARAM:
case ANNOT_REFERENCE:
return parseIdentifier(context);
case IS_KEYWORD:
return parseIsKeyword();
case STMT_START_WITH_EXPR_RHS:
return parseStamentStartWithExpr((STNode) args[0], (STNode) args[1]);
case COMMA:
return parseComma();
case CONST_DECL_TYPE:
return parseConstDecl((STNode) args[0], (STNode) args[1], (STNode) args[2]);
case STMT_START_WITH_IDENTIFIER:
return parseStatementStartsWithIdentifier((STNode) args[0], (STNode) args[1]);
case LT:
return parseLTToken();
case GT:
return parseGTToken();
case NIL_LITERAL:
return parseNilLiteral();
case RECORD_FIELD_OR_RECORD_END:
return parseFieldOrRestDescriptor((boolean) args[0]);
case ANNOTATION_KEYWORD:
return parseAnnotationKeyword();
case ANNOT_DECL_OPTIONAL_TYPE:
return parseAnnotationDeclFromType((STNode) args[0], (STNode) args[1], (STNode) args[2],
(STNode) args[3]);
case ANNOT_DECL_RHS:
return parseAnnotationDeclRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (STNode) args[3],
(STNode) args[4]);
case ANNOT_OPTIONAL_ATTACH_POINTS:
return parseAnnotationDeclAttachPoints((STNode) args[0], (STNode) args[1], (STNode) args[2],
(STNode) args[3], (STNode) args[4], (STNode) args[5]);
case SOURCE_KEYWORD:
return parseSourceKeyword();
case ATTACH_POINT_IDENT:
return parseAttachPointIdent((STNode) args[0]);
case IDENT_AFTER_OBJECT_IDENT:
return parseIdentAfterObjectIdent();
case FUNCTION_IDENT:
return parseFunctionIdent();
case FIELD_IDENT:
return parseFieldIdent();
case ATTACH_POINT_END:
return parseAttachPointEnd();
case XMLNS_KEYWORD:
return parseXMLNSKeyword();
case XML_NAMESPACE_PREFIX_DECL:
return parseXMLDeclRhs((STNode) args[0], (STNode) args[1]);
case NAMESPACE_PREFIX:
return parseNamespacePrefix();
case WORKER_KEYWORD:
return parseWorkerKeyword();
case WORKER_NAME:
return parseWorkerName();
case FORK_KEYWORD:
return parseForkKeyword();
case DECIMAL_FLOATING_POINT_LITERAL:
return parseDecimalFloatingPointLiteral();
case HEX_FLOATING_POINT_LITERAL:
return parseHexFloatingPointLiteral();
case TRAP_KEYWORD:
return parseTrapKeyword();
case IN_KEYWORD:
return parseInKeyword();
case FOREACH_KEYWORD:
return parseForEachKeyword();
case TABLE_KEYWORD:
return parseTableKeyword();
case KEY_KEYWORD:
return parseKeyKeyword();
case TABLE_KEYWORD_RHS:
return parseTableConstructorOrQuery((STNode) args[0], (boolean) args[1]);
case ERROR_KEYWORD:
return parseErrorKeyWord();
case LET_KEYWORD:
return parseLetKeyword();
case STREAM_KEYWORD:
return parseStreamKeyword();
case STREAM_TYPE_FIRST_PARAM_RHS:
return parseStreamTypeParamsNode((STNode) args[0], (STNode) args[1]);
case TEMPLATE_START:
case TEMPLATE_END:
return parseBacktickToken(context);
case KEY_CONSTRAINTS_RHS:
return parseKeyConstraint((STNode) args[0]);
case FUNCTION_KEYWORD_RHS:
return parseFunctionKeywordRhs((STNode) args[0], (STNode) args[1], (STNode) args[2], (boolean) args[3],
(boolean) args[3]);
case FUNC_OPTIONAL_RETURNS:
return parseFuncReturnTypeDescriptor();
case RETURNS_KEYWORD:
return parseReturnsKeyword();
case NEW_KEYWORD_RHS:
return parseNewKeywordRhs((STNode) args[0]);
case NEW_KEYWORD:
return parseNewKeyword();
case IMPLICIT_NEW:
return parseImplicitNewRhs((STNode) args[0]);
case FROM_KEYWORD:
return parseFromKeyword();
case WHERE_KEYWORD:
return parseWhereKeyword();
case SELECT_KEYWORD:
return parseSelectKeyword();
case TABLE_CONSTRUCTOR_OR_QUERY_START:
return parseTableConstructorOrQuery((boolean) args[0]);
case TABLE_CONSTRUCTOR_OR_QUERY_RHS:
return parseTableConstructorOrQueryRhs((STNode) args[0], (STNode) args[1], (boolean) args[2]);
case QUERY_EXPRESSION_RHS:
return parseIntermediateClause((boolean) args[0]);
case ANON_FUNC_BODY:
return parseAnonFuncBody();
case CLOSE_BRACKET:
return parseCloseBracket();
case ARG_START_OR_ARG_LIST_END:
return parseArg((STNode) args[0]);
case ARG_END:
return parseArgEnd();
case MAPPING_FIELD_END:
return parseMappingFieldEnd();
case FUNCTION_KEYWORD:
return parseFunctionKeyword();
case FIELD_OR_REST_DESCIPTOR_RHS:
return parseFieldOrRestDescriptorRhs((STNode) args[0], (STNode) args[1]);
case TYPE_DESC_IN_TUPLE_RHS:
return parseTupleMemberRhs();
case LIST_BINDING_PATTERN_END_OR_CONTINUE:
return parseListBindingpatternRhs();
case CONSTANT_EXPRESSION_START:
return parseConstExprInternal();
case LIST_CONSTRUCTOR_MEMBER_END:
return parseListConstructorMemberEnd();
case NIL_OR_PARENTHESISED_TYPE_DESC_RHS:
return parseNilOrParenthesisedTypeDescRhs((STNode) args[0]);
case ANON_FUNC_PARAM_RHS:
return parseImplicitAnonFuncParamEnd();
case CAPTURE_BINDING_PATTERN:
return parseCaptureBindingPattern();
case LIST_BINDING_PATTERN:
return parseListBindingPattern();
case BINDING_PATTERN:
return parseBindingPattern();
case TYPED_BINDING_PATTERN:
return parseTypedBindingPattern();
case PEER_WORKER_NAME:
return parsePeerWorkerName();
case SYNC_SEND_TOKEN:
return parseSyncSendToken();
case LEFT_ARROW_TOKEN:
return parseLeftArrowToken();
case RECEIVE_WORKERS:
return parseReceiveWorkers();
default:
throw new IllegalStateException("cannot resume parsing the rule: " + context);
}
}
/*
* Private methods.
*/
/**
* Parse a given input and returns the AST. Starts parsing from the top of a compilation unit.
*
* @return Parsed node
*/
private STNode parseCompUnit() {
startContext(ParserRuleContext.COMP_UNIT);
STToken token = peek();
List<STNode> otherDecls = new ArrayList<>();
List<STNode> importDecls = new ArrayList<>();
boolean processImports = true;
while (token.kind != SyntaxKind.EOF_TOKEN) {
STNode decl = parseTopLevelNode(token.kind);
if (decl.kind == SyntaxKind.IMPORT_DECLARATION) {
if (processImports) {
importDecls.add(decl);
} else {
otherDecls.add(decl);
this.errorHandler.reportInvalidNode(token, "imports must be declared before other declarations");
}
} else {
if (processImports) {
processImports = false;
}
otherDecls.add(decl);
}
token = peek();
}
STToken eof = consume();
endContext();
return STNodeFactory.createModulePartNode(STNodeFactory.createNodeList(importDecls),
STNodeFactory.createNodeList(otherDecls), eof);
}
/**
* Parse top level node having an optional modifier preceding it.
*
* @return Parsed node
*/
private STNode parseTopLevelNode() {
STToken token = peek();
return parseTopLevelNode(token.kind);
}
protected STNode parseTopLevelNode(SyntaxKind tokenKind) {
STNode metadata;
switch (tokenKind) {
case EOF_TOKEN:
return consume();
case DOCUMENTATION_LINE:
case AT_TOKEN:
metadata = parseMetaData(tokenKind);
return parseTopLevelNode(metadata);
case IMPORT_KEYWORD:
case FINAL_KEYWORD:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case TYPE_KEYWORD:
case LISTENER_KEYWORD:
case CONST_KEYWORD:
case ANNOTATION_KEYWORD:
case XMLNS_KEYWORD:
case SERVICE_KEYWORD:
metadata = createEmptyMetadata();
break;
case IDENTIFIER_TOKEN:
if (isModuleVarDeclStart(1)) {
return parseModuleVarDecl(createEmptyMetadata(), null);
}
default:
if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {
metadata = createEmptyMetadata();
break;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE);
if (solution.action == Action.KEEP) {
metadata = STNodeFactory.createNodeList(new ArrayList<>());
break;
}
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseTopLevelNode(solution.tokenKind);
}
return parseTopLevelNode(tokenKind, metadata);
}
/**
* Parse top level node having an optional modifier preceding it, given the next token kind.
*
* @param metadata Next token kind
* @return Parsed node
*/
private STNode parseTopLevelNode(STNode metadata) {
STToken nextToken = peek();
return parseTopLevelNode(nextToken.kind, metadata);
}
private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata) {
STNode qualifier = null;
switch (tokenKind) {
case EOF_TOKEN:
if (metadata != null) {
this.errorHandler.reportInvalidNode(null, "invalid metadata");
}
return consume();
case PUBLIC_KEYWORD:
qualifier = parseQualifier();
tokenKind = peek().kind;
break;
case FUNCTION_KEYWORD:
case TYPE_KEYWORD:
case LISTENER_KEYWORD:
case CONST_KEYWORD:
case FINAL_KEYWORD:
case IMPORT_KEYWORD:
case ANNOTATION_KEYWORD:
case XMLNS_KEYWORD:
break;
case IDENTIFIER_TOKEN:
if (isModuleVarDeclStart(1)) {
return parseModuleVarDecl(metadata, null);
}
default:
if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {
break;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA, metadata);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
if (solution.action == Action.KEEP) {
qualifier = STNodeFactory.createEmptyNode();
break;
}
return parseTopLevelNode(solution.tokenKind, metadata);
}
return parseTopLevelNode(tokenKind, metadata, qualifier);
}
/**
* Check whether the cursor is at the start of a module level var-decl.
*
* @param lookahead Offset of the token to to check
* @return <code>true</code> if the cursor is at the start of a module level var-decl.
* <code>false</code> otherwise.
*/
private boolean isModuleVarDeclStart(int lookahead) {
STToken nextToken = peek(lookahead + 1);
switch (nextToken.kind) {
case EQUAL_TOKEN:
case OPEN_BRACKET_TOKEN:
case QUESTION_MARK_TOKEN:
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
return true;
case IDENTIFIER_TOKEN:
switch (peek(lookahead + 2).kind) {
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
case COLON_TOKEN:
if (lookahead > 1) {
return false;
}
if (peek(lookahead + 2).kind != SyntaxKind.IDENTIFIER_TOKEN) {
return false;
}
return isModuleVarDeclStart(lookahead + 2);
default:
return false;
}
}
/**
* Parse import declaration.
* <p>
* <code>import-decl := import [org-name /] module-name [version sem-ver] [as import-prefix] ;</code>
*
* @return Parsed node
*/
private STNode parseImportDecl() {
startContext(ParserRuleContext.IMPORT_DECL);
this.tokenReader.startMode(ParserMode.IMPORT);
STNode importKeyword = parseImportKeyword();
STNode identifier = parseIdentifier(ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME);
STToken token = peek();
STNode importDecl = parseImportDecl(token.kind, importKeyword, identifier);
this.tokenReader.endMode();
endContext();
return importDecl;
}
/**
* Parse import keyword.
*
* @return Parsed node
*/
private STNode parseImportKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IMPORT_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.IMPORT_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse identifier.
*
* @return Parsed node
*/
private STNode parseIdentifier(ParserRuleContext currentCtx) {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(token, currentCtx);
return sol.recoveredNode;
}
}
/**
* Parse RHS of the import declaration. This includes the components after the
* starting identifier (org-name/module-name) of the import decl.
*
* @param importKeyword Import keyword
* @param identifier Org-name or the module name
* @return Parsed node
*/
private STNode parseImportDecl(STNode importKeyword, STNode identifier) {
STToken nextToken = peek();
return parseImportDecl(nextToken.kind, importKeyword, identifier);
}
private STNode parseImportDecl(SyntaxKind tokenKind, STNode importKeyword, STNode identifier) {
STNode orgName;
STNode moduleName;
STNode version;
STNode alias;
switch (tokenKind) {
case SLASH_TOKEN:
STNode slash = parseSlashToken();
orgName = STNodeFactory.createImportOrgNameNode(identifier, slash);
moduleName = parseModuleName();
version = parseVersion();
alias = parseImportPrefixDecl();
break;
case DOT_TOKEN:
case VERSION_KEYWORD:
orgName = STNodeFactory.createEmptyNode();
moduleName = parseModuleName(tokenKind, identifier);
version = parseVersion();
alias = parseImportPrefixDecl();
break;
case AS_KEYWORD:
orgName = STNodeFactory.createEmptyNode();
moduleName = parseModuleName(tokenKind, identifier);
version = STNodeFactory.createEmptyNode();
alias = parseImportPrefixDecl();
break;
case SEMICOLON_TOKEN:
orgName = STNodeFactory.createEmptyNode();
moduleName = parseModuleName(tokenKind, identifier);
version = STNodeFactory.createEmptyNode();
alias = STNodeFactory.createEmptyNode();
break;
default:
Solution solution = recover(peek(), ParserRuleContext.IMPORT_DECL_RHS, importKeyword, identifier);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseImportDecl(solution.tokenKind, importKeyword, identifier);
}
STNode semicolon = parseSemicolon();
return STNodeFactory.createImportDeclarationNode(importKeyword, orgName, moduleName, version, alias, semicolon);
}
/**
* parse slash token.
*
* @return Parsed node
*/
private STNode parseSlashToken() {
STToken token = peek();
if (token.kind == SyntaxKind.SLASH_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.SLASH);
return sol.recoveredNode;
}
}
/**
* Parse dot token.
*
* @return Parsed node
*/
private STNode parseDotToken() {
STToken nextToken = peek();
return parseDotToken(nextToken.kind);
}
private STNode parseDotToken(SyntaxKind tokenKind) {
if (tokenKind == SyntaxKind.DOT_TOKEN) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.DOT);
return sol.recoveredNode;
}
}
/**
* Parse module name of a import declaration.
*
* @return Parsed node
*/
private STNode parseModuleName() {
STNode moduleNameStart = parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME);
return parseModuleName(peek().kind, moduleNameStart);
}
/**
* Parse import module name of a import declaration, given the module name start identifier.
*
* @param moduleNameStart Starting identifier of the module name
* @return Parsed node
*/
private STNode parseModuleName(SyntaxKind nextTokenKind, STNode moduleNameStart) {
List<STNode> moduleNameParts = new ArrayList<>();
moduleNameParts.add(moduleNameStart);
while (!isEndOfImportModuleName(nextTokenKind)) {
moduleNameParts.add(parseDotToken());
moduleNameParts.add(parseIdentifier(ParserRuleContext.IMPORT_MODULE_NAME));
nextTokenKind = peek().kind;
}
return STNodeFactory.createNodeList(moduleNameParts);
}
private boolean isEndOfImportModuleName(SyntaxKind nextTokenKind) {
return nextTokenKind != SyntaxKind.DOT_TOKEN && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN;
}
private boolean isEndOfImportDecl(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case SEMICOLON_TOKEN:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case TYPE_KEYWORD:
case ABSTRACT_KEYWORD:
case CONST_KEYWORD:
case EOF_TOKEN:
case SERVICE_KEYWORD:
case IMPORT_KEYWORD:
case FINAL_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse version component of a import declaration.
* <p>
* <code>version-decl := version sem-ver</code>
*
* @return Parsed node
*/
private STNode parseVersion() {
STToken nextToken = peek();
return parseVersion(nextToken.kind);
}
private STNode parseVersion(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case VERSION_KEYWORD:
STNode versionKeyword = parseVersionKeywrod();
STNode versionNumber = parseVersionNumber();
return STNodeFactory.createImportVersionNode(versionKeyword, versionNumber);
case AS_KEYWORD:
case SEMICOLON_TOKEN:
return STNodeFactory.createEmptyNode();
default:
if (isEndOfImportDecl(nextTokenKind)) {
return STNodeFactory.createEmptyNode();
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.IMPORT_VERSION_DECL);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseVersion(solution.tokenKind);
}
}
/**
* Parse version keywrod.
*
* @return Parsed node
*/
private STNode parseVersionKeywrod() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.VERSION_KEYWORD) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.VERSION_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse version number.
* <p>
* <code>sem-ver := major-num [. minor-num [. patch-num]]
* <br/>
* major-num := DecimalNumber
* <br/>
* minor-num := DecimalNumber
* <br/>
* patch-num := DecimalNumber
* </code>
*
* @return Parsed node
*/
private STNode parseVersionNumber() {
STToken nextToken = peek();
return parseVersionNumber(nextToken.kind);
}
private STNode parseVersionNumber(SyntaxKind nextTokenKind) {
STNode majorVersion;
switch (nextTokenKind) {
case DECIMAL_INTEGER_LITERAL:
majorVersion = parseMajorVersion();
break;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.VERSION_NUMBER);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseVersionNumber(solution.tokenKind);
}
List<STNode> versionParts = new ArrayList<>();
versionParts.add(majorVersion);
STNode minorVersion = parseMinorVersion();
if (minorVersion != null) {
versionParts.add(minorVersion);
STNode patchVersion = parsePatchVersion();
if (patchVersion != null) {
versionParts.add(patchVersion);
}
}
return STNodeFactory.createNodeList(versionParts);
}
private STNode parseMajorVersion() {
return parseDecimalIntLiteral(ParserRuleContext.MAJOR_VERSION);
}
private STNode parseMinorVersion() {
return parseSubVersion(ParserRuleContext.MINOR_VERSION);
}
private STNode parsePatchVersion() {
return parseSubVersion(ParserRuleContext.PATCH_VERSION);
}
/**
* Parse decimal literal.
*
* @param context Context in which the decimal literal is used.
* @return Parsed node
*/
private STNode parseDecimalIntLiteral(ParserRuleContext context) {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL) {
return consume();
} else {
Solution sol = recover(peek(), context);
return sol.recoveredNode;
}
}
/**
* Parse sub version. i.e: minor-version/patch-version.
*
* @param context Context indicating what kind of sub-version is being parsed.
* @return Parsed node
*/
private STNode parseSubVersion(ParserRuleContext context) {
STToken nextToken = peek();
return parseSubVersion(nextToken.kind, context);
}
private STNode parseSubVersion(SyntaxKind nextTokenKind, ParserRuleContext context) {
switch (nextTokenKind) {
case AS_KEYWORD:
case SEMICOLON_TOKEN:
return null;
case DOT_TOKEN:
STNode leadingDot = parseDotToken();
STNode versionNumber = parseDecimalIntLiteral(context);
return STNodeFactory.createImportSubVersionNode(leadingDot, versionNumber);
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.IMPORT_SUB_VERSION);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseSubVersion(solution.tokenKind, context);
}
}
/**
* Parse import prefix declaration.
* <p>
* <code>import-prefix-decl := as import-prefix
* <br/>
* import-prefix := a identifier | _
* </code>
*
* @return Parsed node
*/
private STNode parseImportPrefixDecl() {
STToken token = peek();
return parseImportPrefixDecl(token.kind);
}
private STNode parseImportPrefixDecl(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case AS_KEYWORD:
STNode asKeyword = parseAsKeyword();
STNode prefix = parseImportPrefix();
return STNodeFactory.createImportPrefixNode(asKeyword, prefix);
case SEMICOLON_TOKEN:
return STNodeFactory.createEmptyNode();
default:
if (isEndOfImportDecl(nextTokenKind)) {
return STNodeFactory.createEmptyNode();
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.IMPORT_PREFIX_DECL);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseImportPrefixDecl(solution.tokenKind);
}
}
/**
* Parse <code>as</code> keyword.
*
* @return Parsed node
*/
private STNode parseAsKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.AS_KEYWORD) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.AS_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse import prefix.
*
* @return Parsed node
*/
private STNode parseImportPrefix() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.IMPORT_PREFIX);
return sol.recoveredNode;
}
}
/**
* Parse top level node, given the modifier that precedes it.
*
* @param qualifier Qualifier that precedes the top level node
* @return Parsed node
*/
private STNode parseTopLevelNode(STNode metadata, STNode qualifier) {
STToken token = peek();
return parseTopLevelNode(token.kind, metadata, qualifier);
}
/**
* Parse top level node given the next token kind and the modifier that precedes it.
*
* @param tokenKind Next token kind
* @param qualifier Qualifier that precedes the top level node
* @return Parsed top-level node
*/
private STNode parseTopLevelNode(SyntaxKind tokenKind, STNode metadata, STNode qualifier) {
switch (tokenKind) {
case FUNCTION_KEYWORD:
return parseFuncDefOrFuncTypeDesc(metadata, getQualifier(qualifier), false);
case TYPE_KEYWORD:
return parseModuleTypeDefinition(metadata, getQualifier(qualifier));
case LISTENER_KEYWORD:
return parseListenerDeclaration(metadata, getQualifier(qualifier));
case CONST_KEYWORD:
return parseConstantDeclaration(metadata, getQualifier(qualifier));
case ANNOTATION_KEYWORD:
STNode constKeyword = STNodeFactory.createEmptyNode();
return parseAnnotationDeclaration(metadata, getQualifier(qualifier), constKeyword);
case IMPORT_KEYWORD:
reportInvalidQualifier(qualifier);
return parseImportDecl();
case XMLNS_KEYWORD:
reportInvalidQualifier(qualifier);
return parseXMLNamepsaceDeclaration();
case FINAL_KEYWORD:
reportInvalidQualifier(qualifier);
STNode finalKeyword = parseFinalKeyword();
return parseVariableDecl(metadata, finalKeyword, true);
case SERVICE_KEYWORD:
if (isServiceDeclStart(ParserRuleContext.TOP_LEVEL_NODE, 1)) {
reportInvalidQualifier(qualifier);
return parseServiceDecl(metadata);
}
return parseModuleVarDecl(metadata, qualifier);
case IDENTIFIER_TOKEN:
if (isModuleVarDeclStart(1)) {
return parseModuleVarDecl(metadata, qualifier);
}
default:
if (isTypeStartingToken(tokenKind) && tokenKind != SyntaxKind.IDENTIFIER_TOKEN) {
return parseModuleVarDecl(metadata, qualifier);
}
STToken token = peek();
Solution solution =
recover(token, ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER, metadata, qualifier);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
if (solution.action == Action.KEEP) {
return parseModuleVarDecl(metadata, qualifier);
}
return parseTopLevelNode(solution.tokenKind, metadata, qualifier);
}
}
private STNode parseModuleVarDecl(STNode metadata, STNode qualifier) {
reportInvalidQualifier(qualifier);
STNode finalKeyword = STNodeFactory.createEmptyNode();
return parseVariableDecl(metadata, finalKeyword, true);
}
private STNode getQualifier(STNode qualifier) {
return qualifier == null ? STNodeFactory.createEmptyNode() : qualifier;
}
private void reportInvalidQualifier(STNode qualifier) {
if (qualifier != null && qualifier.kind != SyntaxKind.NONE) {
this.errorHandler.reportInvalidNode((STToken) qualifier,
"invalid qualifier '" + qualifier.toString().trim() + "'");
}
}
/**
* Parse access modifiers.
*
* @return Parsed node
*/
private STNode parseQualifier() {
STToken token = peek();
if (token.kind == SyntaxKind.PUBLIC_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD);
return sol.recoveredNode;
}
}
private STNode parseFuncDefinition(STNode metadata, STNode visibilityQualifier, boolean isObjectMethod) {
startContext(ParserRuleContext.FUNC_DEF);
STNode functionKeyword = parseFunctionKeyword();
STNode funcDef = parseFunctionKeywordRhs(metadata, visibilityQualifier, functionKeyword, true, isObjectMethod);
endContext();
return funcDef;
}
/**
* Parse function definition for the function type descriptor.
* <p>
* <code>
* function-defn := FUNCTION identifier function-signature function-body
* <br/>
* function-type-descriptor := function function-signature
* </code>
*
* @param metadata Metadata
* @param visibilityQualifier Visibility qualifier
* @return Parsed node
*/
private STNode parseFuncDefOrFuncTypeDesc(STNode metadata, STNode visibilityQualifier, boolean isObjectMethod) {
startContext(ParserRuleContext.FUNC_DEF_OR_FUNC_TYPE);
STNode functionKeyword = parseFunctionKeyword();
STNode funcDefOrType =
parseFunctionKeywordRhs(metadata, visibilityQualifier, functionKeyword, false, isObjectMethod);
endContext();
return funcDefOrType;
}
private STNode parseFunctionKeywordRhs(STNode metadata, STNode visibilityQualifier, STNode functionKeyword,
boolean isFuncDef, boolean isObjectMethod) {
return parseFunctionKeywordRhs(peek().kind, metadata, visibilityQualifier, functionKeyword, isFuncDef,
isObjectMethod);
}
private STNode parseFunctionKeywordRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier,
STNode functionKeyword, boolean isFuncDef, boolean isObjectMethod) {
STNode name;
switch (nextTokenKind) {
case IDENTIFIER_TOKEN:
name = parseFunctionName();
isFuncDef = true;
break;
case OPEN_PAREN_TOKEN:
name = STNodeFactory.createEmptyNode();
break;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.FUNCTION_KEYWORD_RHS, metadata,
visibilityQualifier, functionKeyword, isFuncDef, isObjectMethod);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseFunctionKeywordRhs(solution.tokenKind, metadata, visibilityQualifier, functionKeyword,
isFuncDef, isObjectMethod);
}
if (isFuncDef) {
switchContext(ParserRuleContext.FUNC_DEF);
STNode funcSignature = parseFuncSignature(false);
return createFuncDefOrMethodDecl(metadata, visibilityQualifier, functionKeyword, isObjectMethod, name,
funcSignature);
}
STNode funcSignature = parseFuncSignature(true);
return parseReturnTypeDescRhs(metadata, visibilityQualifier, functionKeyword, funcSignature, isObjectMethod);
}
private STNode createFuncDefOrMethodDecl(STNode metadata, STNode visibilityQualifier, STNode functionKeyword,
boolean isObjectMethod, STNode name, STNode funcSignature) {
STNode body = parseFunctionBody(isObjectMethod);
if (body.kind == SyntaxKind.SEMICOLON_TOKEN) {
return STNodeFactory.createFunctionDeclarationNode(metadata, visibilityQualifier, functionKeyword, name,
funcSignature, body);
}
return STNodeFactory.createFunctionDefinitionNode(metadata, visibilityQualifier, functionKeyword, name,
funcSignature, body);
}
/**
* Parse function signature.
* <p>
* <code>
* function-signature := ( param-list ) return-type-descriptor
* <br/>
* return-type-descriptor := [ returns [annots] type-descriptor ]
* </code>
*
* @param isParamNameOptional Whether the parameter names are optional
* @param isInExprContext Whether this function signature is occurred within an expression context
* @return Function signature node
*/
private STNode parseFuncSignature(boolean isParamNameOptional) {
STNode openParenthesis = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);
STNode parameters = parseParamList(isParamNameOptional);
STNode closeParenthesis = parseCloseParenthesis();
endContext();
STNode returnTypeDesc = parseFuncReturnTypeDescriptor();
return STNodeFactory.createFunctionSignatureNode(openParenthesis, parameters, closeParenthesis, returnTypeDesc);
}
private STNode parseReturnTypeDescRhs(STNode metadata, STNode visibilityQualifier, STNode functionKeyword,
STNode funcSignature, boolean isObjectMethod) {
switch (peek().kind) {
case SEMICOLON_TOKEN:
case IDENTIFIER_TOKEN:
endContext();
startContext(ParserRuleContext.VAR_DECL_STMT);
STNode typeDesc = STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, funcSignature);
STNode varName = parseVariableName();
STNode varDecl = parseVarDeclRhs(metadata, visibilityQualifier, typeDesc, varName, true);
return varDecl;
case OPEN_PAREN_TOKEN:
case EQUAL_TOKEN:
break;
default:
break;
}
this.errorHandler.reportMissingTokenError("missing " + ParserRuleContext.FUNC_NAME);
STNode name = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);
return createFuncDefOrMethodDecl(metadata, visibilityQualifier, functionKeyword, isObjectMethod, name,
funcSignature);
}
/**
* Validate the param list and return. If there are params without param-name,
* then this method will create a new set of params with missing param-name
* and return.
*
* @param signature Function signature
* @return
*/
private STNode validateAndGetFuncParams(STFunctionSignatureNode signature) {
STNode parameters = signature.parameters;
int paramCount = parameters.bucketCount();
int index = 0;
for (; index < paramCount; index++) {
STNode param = parameters.childInBucket(index);
switch (param.kind) {
case REQUIRED_PARAM:
STRequiredParameterNode requiredParam = (STRequiredParameterNode) param;
if (isEmpty(requiredParam.paramName)) {
break;
}
continue;
case DEFAULTABLE_PARAM:
STDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param;
if (isEmpty(defaultableParam.paramName)) {
break;
}
continue;
case REST_PARAM:
STRestParameterNode restParam = (STRestParameterNode) param;
if (isEmpty(restParam.paramName)) {
break;
}
continue;
default:
continue;
}
break;
}
if (index == paramCount) {
return signature;
}
STNode updatedParams = getUpdatedParamList(parameters, index);
return STNodeFactory.createFunctionSignatureNode(signature.openParenToken, updatedParams,
signature.closeParenToken, signature.returnTypeDesc);
}
private STNode getUpdatedParamList(STNode parameters, int index) {
int paramCount = parameters.bucketCount();
int newIndex = 0;
ArrayList<STNode> newParams = new ArrayList<>();
for (; newIndex < index; newIndex++) {
newParams.add(parameters.childInBucket(index));
}
for (; newIndex < paramCount; newIndex++) {
STNode param = parameters.childInBucket(newIndex);
STNode paramName = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
switch (param.kind) {
case REQUIRED_PARAM:
STRequiredParameterNode requiredParam = (STRequiredParameterNode) param;
if (isEmpty(requiredParam.paramName)) {
param = STNodeFactory.createRequiredParameterNode(requiredParam.leadingComma,
requiredParam.annotations, requiredParam.visibilityQualifier, requiredParam.typeName,
paramName);
}
break;
case DEFAULTABLE_PARAM:
STDefaultableParameterNode defaultableParam = (STDefaultableParameterNode) param;
if (isEmpty(defaultableParam.paramName)) {
param = STNodeFactory.createDefaultableParameterNode(defaultableParam.leadingComma,
defaultableParam.annotations, defaultableParam.visibilityQualifier,
defaultableParam.typeName, paramName, defaultableParam.equalsToken,
defaultableParam.expression);
}
break;
case REST_PARAM:
STRestParameterNode restParam = (STRestParameterNode) param;
if (isEmpty(restParam.paramName)) {
param = STNodeFactory.createRestParameterNode(restParam.leadingComma, restParam.annotations,
restParam.typeName, restParam.ellipsisToken, paramName);
}
break;
default:
break;
}
newParams.add(param);
}
return STNodeFactory.createNodeList(newParams);
}
private boolean isEmpty(STNode node) {
return node == null;
}
/**
* Parse function keyword. Need to validate the token before consuming,
* since we can reach here while recovering.
*
* @return Parsed node
*/
private STNode parseFunctionKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FUNCTION_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FUNCTION_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse function name.
*
* @return Parsed node
*/
private STNode parseFunctionName() {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FUNC_NAME);
return sol.recoveredNode;
}
}
/**
* Parse open parenthesis.
*
* @param ctx Context of the parenthesis
* @return Parsed node
*/
private STNode parseOpenParenthesis(ParserRuleContext ctx) {
STToken token = peek();
if (token.kind == SyntaxKind.OPEN_PAREN_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ctx, ctx);
return sol.recoveredNode;
}
}
/**
* Parse close parenthesis.
*
* @return Parsed node
*/
private STNode parseCloseParenthesis() {
STToken token = peek();
if (token.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CLOSE_PARENTHESIS);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse parameter list.
* </p>
* <code>
* param-list := required-params [, defaultable-params] [, rest-param]
* <br/> | defaultable-params [, rest-param]
* <br/> | [rest-param]
* <br/><br/>
* required-params := required-param (, required-param)*
* <br/><br/>
* required-param := [annots] [public] type-descriptor [param-name]
* <br/><br/>
* defaultable-params := defaultable-param (, defaultable-param)*
* <br/><br/>
* defaultable-param := [annots] [public] type-descriptor [param-name] default-value
* <br/><br/>
* rest-param := [annots] type-descriptor ... [param-name]
* <br/><br/>
* param-name := identifier
* </code>
*
* @param isParamNameOptional Whether the param names in the signature is optional or not.
* @return Parsed node
*/
private STNode parseParamList(boolean isParamNameOptional) {
startContext(ParserRuleContext.PARAM_LIST);
ArrayList<STNode> paramsList = new ArrayList<>();
STToken token = peek();
if (isEndOfParametersList(token.kind)) {
STNode params = STNodeFactory.createNodeList(paramsList);
return params;
}
STNode startingComma = STNodeFactory.createEmptyNode();
startContext(ParserRuleContext.REQUIRED_PARAM);
STNode firstParam = parseParameter(startingComma, SyntaxKind.REQUIRED_PARAM, isParamNameOptional);
SyntaxKind prevParamKind = firstParam.kind;
paramsList.add(firstParam);
token = peek();
while (!isEndOfParametersList(token.kind)) {
switch (prevParamKind) {
case REST_PARAM:
this.errorHandler.reportInvalidNode(token, "cannot have more parameters after the rest-parameter");
startContext(ParserRuleContext.REQUIRED_PARAM);
break;
case DEFAULTABLE_PARAM:
startContext(ParserRuleContext.DEFAULTABLE_PARAM);
break;
case REQUIRED_PARAM:
default:
startContext(ParserRuleContext.REQUIRED_PARAM);
break;
}
STNode paramEnd = parseParameterRhs(token.kind);
if (paramEnd == null) {
endContext();
break;
}
STNode param = parseParameter(paramEnd, prevParamKind, isParamNameOptional);
prevParamKind = param.kind;
paramsList.add(param);
token = peek();
}
STNode params = STNodeFactory.createNodeList(paramsList);
return params;
}
private STNode parseParameterRhs(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_PAREN_TOKEN:
return null;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.PARAM_END);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseParameterRhs(solution.tokenKind);
}
}
/**
* Parse a single parameter. Parameter can be a required parameter, a defaultable
* parameter, or a rest parameter.
*
* @param prevParamKind Kind of the parameter that precedes current parameter
* @param leadingComma Comma that occurs before the param
* @param isParamNameOptional Whether the param names in the signature is optional or not.
* @return Parsed node
*/
private STNode parseParameter(STNode leadingComma, SyntaxKind prevParamKind, boolean isParamNameOptional) {
STToken token = peek();
return parseParameter(token.kind, prevParamKind, leadingComma, 1, isParamNameOptional);
}
private STNode parseParameter(SyntaxKind prevParamKind, STNode leadingComma, int nextTokenOffset,
boolean isParamNameOptional) {
return parseParameter(peek().kind, prevParamKind, leadingComma, nextTokenOffset, isParamNameOptional);
}
private STNode parseParameter(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma,
int nextTokenOffset, boolean isParamNameOptional) {
STNode annots;
switch (nextTokenKind) {
case AT_TOKEN:
annots = parseAnnotations(nextTokenKind);
nextTokenKind = peek().kind;
break;
case PUBLIC_KEYWORD:
case IDENTIFIER_TOKEN:
annots = STNodeFactory.createNodeList(new ArrayList<>());
break;
default:
if (nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN && isTypeStartingToken(nextTokenKind)) {
annots = STNodeFactory.createNodeList(new ArrayList<>());
break;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.PARAMETER_START, prevParamKind, leadingComma,
nextTokenOffset, isParamNameOptional);
if (solution.action == Action.KEEP) {
annots = STNodeFactory.createNodeList(new ArrayList<>());
break;
}
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseParameter(solution.tokenKind, prevParamKind, leadingComma, 0, isParamNameOptional);
}
return parseParamGivenAnnots(nextTokenKind, prevParamKind, leadingComma, annots, 1, isParamNameOptional);
}
private STNode parseParamGivenAnnots(SyntaxKind prevParamKind, STNode leadingComma, STNode annots,
int nextNextTokenOffset, boolean isFuncDef) {
return parseParamGivenAnnots(peek().kind, prevParamKind, leadingComma, annots, nextNextTokenOffset, isFuncDef);
}
private STNode parseParamGivenAnnots(SyntaxKind nextTokenKind, SyntaxKind prevParamKind, STNode leadingComma,
STNode annots, int nextTokenOffset, boolean isParamNameOptional) {
STNode qualifier;
switch (nextTokenKind) {
case PUBLIC_KEYWORD:
qualifier = parseQualifier();
break;
case IDENTIFIER_TOKEN:
qualifier = STNodeFactory.createEmptyNode();
break;
case AT_TOKEN:
default:
if (isTypeStartingToken(nextTokenKind) && nextTokenKind != SyntaxKind.IDENTIFIER_TOKEN) {
qualifier = STNodeFactory.createEmptyNode();
break;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.PARAMETER_WITHOUT_ANNOTS, prevParamKind,
leadingComma, annots, nextTokenOffset, isParamNameOptional);
if (solution.action == Action.KEEP) {
qualifier = STNodeFactory.createEmptyNode();
break;
}
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseParamGivenAnnots(solution.tokenKind, prevParamKind, leadingComma, annots, 0,
isParamNameOptional);
}
return parseParamGivenAnnotsAndQualifier(prevParamKind, leadingComma, annots, qualifier, isParamNameOptional);
}
private STNode parseParamGivenAnnotsAndQualifier(SyntaxKind prevParamKind, STNode leadingComma, STNode annots,
STNode qualifier, boolean isParamNameOptional) {
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode param = parseAfterParamType(prevParamKind, leadingComma, annots, qualifier, type, isParamNameOptional);
endContext();
return param;
}
private STNode parseAfterParamType(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier,
STNode type, boolean isParamNameOptional) {
STToken token = peek();
return parseAfterParamType(token.kind, prevParamKind, leadingComma, annots, qualifier, type,
isParamNameOptional);
}
private STNode parseAfterParamType(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma,
STNode annots, STNode qualifier, STNode type, boolean isParamNameOptional) {
STNode paramName;
switch (tokenKind) {
case ELLIPSIS_TOKEN:
switchContext(ParserRuleContext.REST_PARAM);
reportInvalidQualifier(qualifier);
STNode ellipsis = parseEllipsis();
if (isParamNameOptional && peek().kind != SyntaxKind.IDENTIFIER_TOKEN) {
paramName = STNodeFactory.createEmptyNode();
} else {
paramName = parseVariableName();
}
return STNodeFactory.createRestParameterNode(leadingComma, annots, type, ellipsis, paramName);
case IDENTIFIER_TOKEN:
paramName = parseVariableName();
return parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);
case EQUAL_TOKEN:
if (!isParamNameOptional) {
break;
}
paramName = STNodeFactory.createEmptyNode();
return parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);
default:
if (!isParamNameOptional) {
break;
}
paramName = STNodeFactory.createEmptyNode();
return parseParameterRhs(prevParamKind, leadingComma, annots, qualifier, type, paramName);
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.AFTER_PARAMETER_TYPE, prevParamKind, leadingComma, annots,
qualifier, type, isParamNameOptional);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseAfterParamType(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type,
isParamNameOptional);
}
/**
* Parse ellipsis.
*
* @return Parsed node
*/
private STNode parseEllipsis() {
STToken token = peek();
if (token.kind == SyntaxKind.ELLIPSIS_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.ELLIPSIS);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse the right hand side of a required/defaultable parameter.
* </p>
* <code>parameter-rhs := [= expression]</code>
*
* @param leadingComma Comma that precedes this parameter
* @param prevParamKind Kind of the parameter that precedes current parameter
* @param annots Annotations attached to the parameter
* @param qualifier Visibility qualifier
* @param type Type descriptor
* @param paramName Name of the parameter
* @return Parsed parameter node
*/
private STNode parseParameterRhs(SyntaxKind prevParamKind, STNode leadingComma, STNode annots, STNode qualifier,
STNode type, STNode paramName) {
STToken token = peek();
return parseParameterRhs(token.kind, prevParamKind, leadingComma, annots, qualifier, type, paramName);
}
private STNode parseParameterRhs(SyntaxKind tokenKind, SyntaxKind prevParamKind, STNode leadingComma, STNode annots,
STNode qualifier, STNode type, STNode paramName) {
if (isEndOfParameter(tokenKind)) {
if (prevParamKind == SyntaxKind.DEFAULTABLE_PARAM) {
this.errorHandler.reportInvalidNode(peek(),
"cannot have a required parameter after a defaultable parameter");
}
return STNodeFactory.createRequiredParameterNode(leadingComma, annots, qualifier, type, paramName);
} else if (tokenKind == SyntaxKind.EQUAL_TOKEN) {
if (prevParamKind == SyntaxKind.REQUIRED_PARAM) {
switchContext(ParserRuleContext.DEFAULTABLE_PARAM);
}
STNode equal = parseAssignOp();
STNode expr = parseExpression();
return STNodeFactory.createDefaultableParameterNode(leadingComma, annots, qualifier, type, paramName, equal,
expr);
} else {
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.PARAMETER_NAME_RHS, prevParamKind, leadingComma,
annots, qualifier, type, paramName);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseParameterRhs(solution.tokenKind, prevParamKind, leadingComma, annots, qualifier, type,
paramName);
}
}
/**
* Parse comma.
*
* @return Parsed node
*/
private STNode parseComma() {
STToken token = peek();
if (token.kind == SyntaxKind.COMMA_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.COMMA);
return sol.recoveredNode;
}
}
/**
* Parse return type descriptor of a function. A return type descriptor has the following structure.
*
* <code>return-type-descriptor := [ returns annots type-descriptor ]</code>
*
* @return Parsed node
*/
private STNode parseFuncReturnTypeDescriptor() {
return parseFuncReturnTypeDescriptor(peek().kind);
}
private STNode parseFuncReturnTypeDescriptor(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case OPEN_BRACE_TOKEN:
case EQUAL_TOKEN:
return STNodeFactory.createEmptyNode();
case RETURNS_KEYWORD:
break;
default:
STToken nextNextToken = getNextNextToken(nextTokenKind);
if (nextNextToken.kind == SyntaxKind.RETURNS_KEYWORD) {
break;
}
return STNodeFactory.createEmptyNode();
}
STNode returnsKeyword = parseReturnsKeyword();
STNode annot = parseAnnotations();
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);
return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);
}
/**
* Parse 'returns' keyword.
*
* @return Return-keyword node
*/
private STNode parseReturnsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.RETURNS_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.RETURNS_KEYWORD);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse a type descriptor. A type descriptor has the following structure.
* </p>
* <code>type-descriptor :=
* simple-type-descriptor<br/>
* | structured-type-descriptor<br/>
* | behavioral-type-descriptor<br/>
* | singleton-type-descriptor<br/>
* | union-type-descriptor<br/>
* | optional-type-descriptor<br/>
* | any-type-descriptor<br/>
* | anydata-type-descriptor<br/>
* | byte-type-descriptor<br/>
* | json-type-descriptor<br/>
* | type-descriptor-reference<br/>
* | ( type-descriptor )
* <br/>
* type-descriptor-reference := qualified-identifier</code>
*
* @return Parsed node
*/
private STNode parseTypeDescriptor(ParserRuleContext context, boolean isTypedBindingPattern) {
startContext(context);
STNode typeDesc = parseTypeDescriptorInternal(context, isTypedBindingPattern);
endContext();
return typeDesc;
}
private STNode parseTypeDescriptor(ParserRuleContext context) {
return parseTypeDescriptor(context, false);
}
private STNode parseTypeDescriptorInternal(ParserRuleContext context) {
return parseTypeDescriptorInternal(context, false);
}
private STNode parseTypeDescriptorInternal(ParserRuleContext context, boolean isTypedBindingPattern) {
STToken token = peek();
STNode typeDesc = parseTypeDescriptorInternal(token.kind, context);
return parseComplexTypeDescriptor(typeDesc, context, isTypedBindingPattern);
}
/**
* This will handle the parsing of optional,array,union type desc to infinite length.
*
* @param typeDesc
*
* @return Parsed type descriptor node
*/
private STNode parseComplexTypeDescriptor(STNode typeDesc, ParserRuleContext context,
boolean isTypedBindingPattern) {
STToken nextToken = peek();
switch (nextToken.kind) {
case QUESTION_MARK_TOKEN:
return parseComplexTypeDescriptor(parseOptionalTypeDescriptor(typeDesc), context,
isTypedBindingPattern);
case OPEN_BRACKET_TOKEN:
if (isTypedBindingPattern) {
return typeDesc;
}
return parseComplexTypeDescriptor(parseArrayTypeDescriptor(typeDesc), context, isTypedBindingPattern);
case PIPE_TOKEN:
return parseUnionTypeDescriptor(typeDesc, context);
case BITWISE_AND_TOKEN:
return parseIntersectionTypeDescriptor(typeDesc, context);
default:
return typeDesc;
}
}
/**
* <p>
* Parse a type descriptor, given the next token kind.
* </p>
* If the preceding token is <code>?</code> then it is an optional type descriptor
*
* @param tokenKind Next token kind
* @param context Current context
* @return Parsed node
*/
private STNode parseTypeDescriptorInternal(SyntaxKind tokenKind, ParserRuleContext context) {
switch (tokenKind) {
case IDENTIFIER_TOKEN:
return parseTypeReference();
case RECORD_KEYWORD:
return parseRecordTypeDescriptor();
case OBJECT_KEYWORD:
case ABSTRACT_KEYWORD:
case CLIENT_KEYWORD:
return parseObjectTypeDescriptor();
case OPEN_PAREN_TOKEN:
return parseNilOrParenthesisedTypeDesc();
case MAP_KEYWORD:
case FUTURE_KEYWORD:
case TYPEDESC_KEYWORD:
return parseParameterizedTypeDescriptor();
case ERROR_KEYWORD:
return parseErrorTypeDescriptor();
case STREAM_KEYWORD:
return parseStreamTypeDescriptor();
case TABLE_KEYWORD:
return parseTableTypeDescriptor();
case FUNCTION_KEYWORD:
return parseFunctionTypeDesc();
case OPEN_BRACKET_TOKEN:
return parseTupleTypeDesc();
default:
if (isSingletonTypeDescStart(tokenKind, true)) {
return parseSingletonTypeDesc();
}
if (isSimpleType(tokenKind)) {
return parseSimpleTypeDescriptor();
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.TYPE_DESCRIPTOR, context);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseTypeDescriptorInternal(solution.tokenKind, context);
}
}
private STNode parseNilOrParenthesisedTypeDesc() {
STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);
return parseNilOrParenthesisedTypeDescRhs(openParen);
}
private STNode parseNilOrParenthesisedTypeDescRhs(STNode openParen) {
return parseNilOrParenthesisedTypeDescRhs(peek().kind, openParen);
}
private STNode parseNilOrParenthesisedTypeDescRhs(SyntaxKind nextTokenKind, STNode openParen) {
STNode closeParen;
switch (nextTokenKind) {
case CLOSE_PAREN_TOKEN:
closeParen = parseCloseParenthesis();
return STNodeFactory.createNilTypeDescriptorNode(openParen, closeParen);
default:
if (isTypeStartingToken(nextTokenKind)) {
STNode typedesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_PARENTHESIS);
closeParen = parseCloseParenthesis();
return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typedesc, closeParen);
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.NIL_OR_PARENTHESISED_TYPE_DESC_RHS, openParen);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseNilOrParenthesisedTypeDescRhs(solution.tokenKind, openParen);
}
}
/**
* Parse simple type descriptor.
*
* @return Parsed node
*/
private STNode parseSimpleTypeDescriptor() {
STToken node = peek();
if (isSimpleType(node.kind)) {
STToken token = consume();
SyntaxKind typeKind = getTypeSyntaxKind(token.kind);
return STNodeFactory.createBuiltinSimpleNameReferenceNode(typeKind, token);
} else {
Solution sol = recover(peek(), ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse function body. A function body has the following structure.
* </p>
* <code>
* function-body := function-body-block | external-function-body
* external-function-body := = annots external ;
* function-body-block := { [default-worker-init, named-worker-decl+] default-worker }
* </code>
*
* @param isObjectMethod Flag indicating whether this is an object-method
* @return Parsed node
*/
private STNode parseFunctionBody(boolean isObjectMethod) {
STToken token = peek();
return parseFunctionBody(token.kind, isObjectMethod);
}
/**
* Parse function body, given the next token kind.
*
* @param tokenKind Next token kind
* @param isObjectMethod Flag indicating whether this is an object-method
* @return Parsed node
*/
protected STNode parseFunctionBody(SyntaxKind tokenKind, boolean isObjectMethod) {
switch (tokenKind) {
case EQUAL_TOKEN:
return parseExternalFunctionBody();
case OPEN_BRACE_TOKEN:
return parseFunctionBodyBlock(false);
case RIGHT_DOUBLE_ARROW:
return parseExpressionFuncBody(false);
case SEMICOLON_TOKEN:
if (isObjectMethod) {
return parseSemicolon();
}
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.FUNC_BODY, isObjectMethod);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
if (solution.tokenKind == SyntaxKind.NONE) {
return STNodeFactory.createMissingToken(solution.tokenKind);
}
return parseFunctionBody(solution.tokenKind, isObjectMethod);
}
}
/**
* <p>
* Parse function body block. A function body block has the following structure.
* </p>
*
* <code>
* function-body-block := { [default-worker-init, named-worker-decl+] default-worker }<br/>
* default-worker-init := sequence-stmt<br/>
* default-worker := sequence-stmt<br/>
* named-worker-decl := worker worker-name return-type-descriptor { sequence-stmt }<br/>
* worker-name := identifier<br/>
* </code>
*
* @param isAnonFunc Flag indicating whether the func body belongs to an anonymous function
* @return Parsed node
*/
private STNode parseFunctionBodyBlock(boolean isAnonFunc) {
startContext(ParserRuleContext.FUNC_BODY_BLOCK);
STNode openBrace = parseOpenBrace();
STToken token = peek();
ArrayList<STNode> firstStmtList = new ArrayList<>();
ArrayList<STNode> workers = new ArrayList<>();
ArrayList<STNode> secondStmtList = new ArrayList<>();
ParserRuleContext currentCtx = ParserRuleContext.DEFAULT_WORKER_INIT;
boolean hasNamedWorkers = false;
while (!isEndOfFuncBodyBlock(token.kind, isAnonFunc)) {
STNode stmt = parseStatement();
if (stmt == null) {
break;
}
switch (currentCtx) {
case DEFAULT_WORKER_INIT:
if (stmt.kind != SyntaxKind.NAMED_WORKER_DECLARATION) {
firstStmtList.add(stmt);
break;
}
currentCtx = ParserRuleContext.NAMED_WORKERS;
hasNamedWorkers = true;
case NAMED_WORKERS:
if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {
workers.add(stmt);
break;
}
currentCtx = ParserRuleContext.DEFAULT_WORKER;
case DEFAULT_WORKER:
default:
if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {
this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here");
break;
}
secondStmtList.add(stmt);
break;
}
token = peek();
}
STNode namedWorkersList;
STNode statements;
if (hasNamedWorkers) {
STNode workerInitStatements = STNodeFactory.createNodeList(firstStmtList);
STNode namedWorkers = STNodeFactory.createNodeList(workers);
namedWorkersList = STNodeFactory.createNamedWorkerDeclarator(workerInitStatements, namedWorkers);
statements = STNodeFactory.createNodeList(secondStmtList);
} else {
namedWorkersList = STNodeFactory.createEmptyNode();
statements = STNodeFactory.createNodeList(firstStmtList);
}
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createFunctionBodyBlockNode(openBrace, namedWorkersList, statements, closeBrace);
}
private boolean isEndOfFuncBodyBlock(SyntaxKind nextTokenKind, boolean isAnonFunc) {
if (isAnonFunc) {
switch (nextTokenKind) {
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case COMMA_TOKEN:
case PUBLIC_KEYWORD:
case EOF_TOKEN:
case EQUAL_TOKEN:
case BACKTICK_TOKEN:
return true;
default:
break;
}
}
return isEndOfStatements();
}
private boolean isEndOfRecordTypeNode(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case TYPE_KEYWORD:
case PUBLIC_KEYWORD:
default:
return endOfModuleLevelNode(1);
}
}
private boolean isEndOfObjectTypeNode() {
return endOfModuleLevelNode(1, true);
}
private boolean isEndOfStatements() {
switch (peek().kind) {
case RESOURCE_KEYWORD:
return true;
default:
return endOfModuleLevelNode(1);
}
}
private boolean endOfModuleLevelNode(int peekIndex) {
return endOfModuleLevelNode(peekIndex, false);
}
private boolean endOfModuleLevelNode(int peekIndex, boolean isObject) {
switch (peek(peekIndex).kind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
case IMPORT_KEYWORD:
case CONST_KEYWORD:
case ANNOTATION_KEYWORD:
case LISTENER_KEYWORD:
return true;
case SERVICE_KEYWORD:
return isServiceDeclStart(ParserRuleContext.OBJECT_MEMBER, 1);
case PUBLIC_KEYWORD:
return endOfModuleLevelNode(peekIndex + 1, isObject);
case FUNCTION_KEYWORD:
if (isObject) {
return false;
}
return peek(peekIndex + 1).kind == SyntaxKind.IDENTIFIER_TOKEN;
default:
return false;
}
}
/**
* Check whether the given token is an end of a parameter.
*
* @param tokenKind Next token kind
* @return <code>true</code> if the token represents an end of a parameter. <code>false</code> otherwise
*/
private boolean isEndOfParameter(SyntaxKind tokenKind) {
switch (tokenKind) {
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case COMMA_TOKEN:
case RETURNS_KEYWORD:
case TYPE_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case AT_TOKEN:
return true;
default:
return endOfModuleLevelNode(1);
}
}
/**
* Check whether the given token is an end of a parameter-list.
*
* @param tokenKind Next token kind
* @return <code>true</code> if the token represents an end of a parameter-list. <code>false</code> otherwise
*/
private boolean isEndOfParametersList(SyntaxKind tokenKind) {
switch (tokenKind) {
case CLOSE_PAREN_TOKEN:
case SEMICOLON_TOKEN:
case RETURNS_KEYWORD:
case TYPE_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case OPEN_BRACE_TOKEN:
return true;
default:
return endOfModuleLevelNode(1);
}
}
/**
* Parse type reference or variable reference.
*
* @return Parsed node
*/
private STNode parseStatementStartIdentifier() {
return parseQualifiedIdentifier(ParserRuleContext.STATEMENT_START_IDENTIFIER);
}
/**
* Parse variable name.
*
* @return Parsed node
*/
private STNode parseVariableName() {
STToken token = peek();
return parseVariableName(token.kind);
}
/**
* Parse variable name.
*
* @return Parsed node
*/
private STNode parseVariableName(SyntaxKind tokenKind) {
if (tokenKind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.VARIABLE_NAME);
return sol.recoveredNode;
}
}
/**
* Parse open brace.
*
* @return Parsed node
*/
private STNode parseOpenBrace() {
STToken token = peek();
if (token.kind == SyntaxKind.OPEN_BRACE_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.OPEN_BRACE);
return sol.recoveredNode;
}
}
/**
* Parse close brace.
*
* @return Parsed node
*/
private STNode parseCloseBrace() {
STToken token = peek();
if (token.kind == SyntaxKind.CLOSE_BRACE_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CLOSE_BRACE);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse external function body. An external function body has the following structure.
* </p>
* <code>
* external-function-body := = annots external ;
* </code>
*
* @return Parsed node
*/
private STNode parseExternalFunctionBody() {
startContext(ParserRuleContext.EXTERNAL_FUNC_BODY);
STNode assign = parseAssignOp();
STNode annotation = parseAnnotations();
STNode externalKeyword = parseExternalKeyword();
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createExternalFunctionBodyNode(assign, annotation, externalKeyword, semicolon);
}
/**
* Parse semicolon.
*
* @return Parsed node
*/
private STNode parseSemicolon() {
STToken token = peek();
if (token.kind == SyntaxKind.SEMICOLON_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.SEMICOLON);
return sol.recoveredNode;
}
}
/**
* Parse <code>external</code> keyword.
*
* @return Parsed node
*/
private STNode parseExternalKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.EXTERNAL_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.EXTERNAL_KEYWORD);
return sol.recoveredNode;
}
}
/*
* Operators
*/
/**
* Parse assign operator.
*
* @return Parsed node
*/
private STNode parseAssignOp() {
STToken token = peek();
if (token.kind == SyntaxKind.EQUAL_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.ASSIGN_OP);
return sol.recoveredNode;
}
}
/**
* Parse binary operator.
*
* @return Parsed node
*/
private STNode parseBinaryOperator() {
STToken token = peek();
if (isBinaryOperator(token.kind)) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.BINARY_OPERATOR);
return sol.recoveredNode;
}
}
/**
* Check whether the given token kind is a binary operator.
*
* @param kind STToken kind
* @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise
*/
private boolean isBinaryOperator(SyntaxKind kind) {
switch (kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case SLASH_TOKEN:
case ASTERISK_TOKEN:
case GT_TOKEN:
case LT_TOKEN:
case DOUBLE_EQUAL_TOKEN:
case TRIPPLE_EQUAL_TOKEN:
case LT_EQUAL_TOKEN:
case GT_EQUAL_TOKEN:
case NOT_EQUAL_TOKEN:
case NOT_DOUBLE_EQUAL_TOKEN:
case BITWISE_AND_TOKEN:
case BITWISE_XOR_TOKEN:
case PIPE_TOKEN:
case LOGICAL_AND_TOKEN:
case LOGICAL_OR_TOKEN:
case PERCENT_TOKEN:
case DOUBLE_LT_TOKEN:
case DOUBLE_GT_TOKEN:
case TRIPPLE_GT_TOKEN:
case ELLIPSIS_TOKEN:
case DOUBLE_DOT_LT_TOKEN:
return true;
default:
return false;
}
}
/**
* Get the precedence of a given operator.
*
* @param binaryOpKind Operator kind
* @return Precedence of the given operator
*/
private OperatorPrecedence getOpPrecedence(SyntaxKind binaryOpKind) {
switch (binaryOpKind) {
case ASTERISK_TOKEN:
case SLASH_TOKEN:
case PERCENT_TOKEN:
return OperatorPrecedence.MULTIPLICATIVE;
case PLUS_TOKEN:
case MINUS_TOKEN:
return OperatorPrecedence.ADDITIVE;
case GT_TOKEN:
case LT_TOKEN:
case GT_EQUAL_TOKEN:
case LT_EQUAL_TOKEN:
case IS_KEYWORD:
return OperatorPrecedence.BINARY_COMPARE;
case DOT_TOKEN:
case OPEN_BRACKET_TOKEN:
case OPEN_PAREN_TOKEN:
return OperatorPrecedence.MEMBER_ACCESS;
case DOUBLE_EQUAL_TOKEN:
case TRIPPLE_EQUAL_TOKEN:
case NOT_EQUAL_TOKEN:
case NOT_DOUBLE_EQUAL_TOKEN:
return OperatorPrecedence.EQUALITY;
case BITWISE_AND_TOKEN:
return OperatorPrecedence.BITWISE_AND;
case BITWISE_XOR_TOKEN:
return OperatorPrecedence.BITWISE_XOR;
case PIPE_TOKEN:
return OperatorPrecedence.BITWISE_OR;
case LOGICAL_AND_TOKEN:
return OperatorPrecedence.LOGICAL_AND;
case LOGICAL_OR_TOKEN:
return OperatorPrecedence.LOGICAL_OR;
case RIGHT_ARROW_TOKEN:
return OperatorPrecedence.REMOTE_CALL_ACTION;
case RIGHT_DOUBLE_ARROW:
case SYNC_SEND_TOKEN:
return OperatorPrecedence.ACTION;
case DOUBLE_LT_TOKEN:
case DOUBLE_GT_TOKEN:
case TRIPPLE_GT_TOKEN:
return OperatorPrecedence.SHIFT;
case ELLIPSIS_TOKEN:
case DOUBLE_DOT_LT_TOKEN:
return OperatorPrecedence.RANGE;
default:
throw new UnsupportedOperationException("Unsupported binary operator '" + binaryOpKind + "'");
}
}
/**
* <p>
* Get the operator kind to insert during recovery, given the precedence level.
* </p>
*
* @param opPrecedenceLevel Precedence of the given operator
* @return Kind of the operator to insert
*/
private SyntaxKind getBinaryOperatorKindToInsert(OperatorPrecedence opPrecedenceLevel) {
switch (opPrecedenceLevel) {
case UNARY:
case ACTION:
case EXPRESSION_ACTION:
case REMOTE_CALL_ACTION:
case ANON_FUNC:
case MULTIPLICATIVE:
return SyntaxKind.ASTERISK_TOKEN;
case ADDITIVE:
return SyntaxKind.PLUS_TOKEN;
case SHIFT:
return SyntaxKind.DOUBLE_LT_TOKEN;
case RANGE:
return SyntaxKind.ELLIPSIS_TOKEN;
case BINARY_COMPARE:
return SyntaxKind.LT_TOKEN;
case EQUALITY:
return SyntaxKind.DOUBLE_EQUAL_TOKEN;
case BITWISE_AND:
return SyntaxKind.BITWISE_AND_TOKEN;
case BITWISE_XOR:
return SyntaxKind.BITWISE_XOR_TOKEN;
case BITWISE_OR:
return SyntaxKind.PIPE_TOKEN;
case LOGICAL_AND:
return SyntaxKind.LOGICAL_AND_TOKEN;
case LOGICAL_OR:
return SyntaxKind.LOGICAL_OR_TOKEN;
default:
throw new UnsupportedOperationException(
"Unsupported operator precedence level'" + opPrecedenceLevel + "'");
}
}
/**
* <p>
* Parse a module type definition.
* </p>
* <code>module-type-defn := metadata [public] type identifier type-descriptor ;</code>
*
* @param metadata Metadata
* @param qualifier Visibility qualifier
* @return Parsed node
*/
private STNode parseModuleTypeDefinition(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.MODULE_TYPE_DEFINITION);
STNode typeKeyword = parseTypeKeyword();
STNode typeName = parseTypeName();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createTypeDefinitionNode(metadata, qualifier, typeKeyword, typeName, typeDescriptor,
semicolon);
}
/**
* Parse type keyword.
*
* @return Parsed node
*/
private STNode parseTypeKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TYPE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.TYPE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse type name.
*
* @return Parsed node
*/
private STNode parseTypeName() {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.TYPE_NAME);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse record type descriptor. A record type descriptor body has the following structure.
* </p>
*
* <code>record-type-descriptor := inclusive-record-type-descriptor | exclusive-record-type-descriptor
* <br/><br/>inclusive-record-type-descriptor := record { field-descriptor* }
* <br/><br/>exclusive-record-type-descriptor := record {| field-descriptor* [record-rest-descriptor] |}
* </code>
*
* @return Parsed node
*/
private STNode parseRecordTypeDescriptor() {
startContext(ParserRuleContext.RECORD_TYPE_DESCRIPTOR);
STNode recordKeyword = parseRecordKeyword();
STNode bodyStartDelimiter = parseRecordBodyStartDelimiter();
boolean isInclusive = bodyStartDelimiter.kind == SyntaxKind.OPEN_BRACE_TOKEN;
STNode fields = parseFieldDescriptors(isInclusive);
STNode bodyEndDelimiter = parseRecordBodyCloseDelimiter(bodyStartDelimiter.kind);
endContext();
return STNodeFactory.createRecordTypeDescriptorNode(recordKeyword, bodyStartDelimiter, fields,
bodyEndDelimiter);
}
/**
* Parse record body start delimiter.
*
* @return Parsed node
*/
private STNode parseRecordBodyStartDelimiter() {
STToken token = peek();
return parseRecordBodyStartDelimiter(token.kind);
}
private STNode parseRecordBodyStartDelimiter(SyntaxKind kind) {
switch (kind) {
case OPEN_BRACE_PIPE_TOKEN:
return parseClosedRecordBodyStart();
case OPEN_BRACE_TOKEN:
return parseOpenBrace();
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.RECORD_BODY_START);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseRecordBodyStartDelimiter(solution.tokenKind);
}
}
/**
* Parse closed-record body start delimiter.
*
* @return Parsed node
*/
private STNode parseClosedRecordBodyStart() {
STToken token = peek();
if (token.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_START);
return sol.recoveredNode;
}
}
/**
* Parse record body close delimiter.
*
* @return Parsed node
*/
private STNode parseRecordBodyCloseDelimiter(SyntaxKind startingDelimeter) {
switch (startingDelimeter) {
case OPEN_BRACE_PIPE_TOKEN:
return parseClosedRecordBodyEnd();
case OPEN_BRACE_TOKEN:
return parseCloseBrace();
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.RECORD_BODY_END);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseRecordBodyCloseDelimiter(solution.tokenKind);
}
}
/**
* Parse closed-record body end delimiter.
*
* @return Parsed node
*/
private STNode parseClosedRecordBodyEnd() {
STToken token = peek();
if (token.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CLOSED_RECORD_BODY_END);
return sol.recoveredNode;
}
}
/**
* Parse record keyword.
*
* @return Parsed node
*/
private STNode parseRecordKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.RECORD_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.RECORD_KEYWORD);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse field descriptors.
* </p>
*
* @return Parsed node
*/
private STNode parseFieldDescriptors(boolean isInclusive) {
ArrayList<STNode> recordFields = new ArrayList<>();
STToken token = peek();
boolean endOfFields = false;
while (!isEndOfRecordTypeNode(token.kind)) {
STNode field = parseFieldOrRestDescriptor(isInclusive);
if (field == null) {
endOfFields = true;
break;
}
recordFields.add(field);
token = peek();
if (field.kind == SyntaxKind.RECORD_REST_TYPE) {
break;
}
}
while (!endOfFields && !isEndOfRecordTypeNode(token.kind)) {
parseFieldOrRestDescriptor(isInclusive);
this.errorHandler.reportInvalidNode(token, "cannot have more fields after the rest type descriptor");
token = peek();
}
return STNodeFactory.createNodeList(recordFields);
}
/**
* <p>
* Parse field descriptor or rest descriptor.
* </p>
*
* <code>
* <br/><br/>field-descriptor := individual-field-descriptor | record-type-reference
* <br/><br/><br/>individual-field-descriptor := metadata type-descriptor field-name [? | default-value] ;
* <br/><br/>field-name := identifier
* <br/><br/>default-value := = expression
* <br/><br/>record-type-reference := * type-reference ;
* <br/><br/>record-rest-descriptor := type-descriptor ... ;
* </code>
*
* @return Parsed node
*/
private STNode parseFieldOrRestDescriptor(boolean isInclusive) {
return parseFieldOrRestDescriptor(peek().kind, isInclusive);
}
private STNode parseFieldOrRestDescriptor(SyntaxKind nextTokenKind, boolean isInclusive) {
switch (nextTokenKind) {
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
return null;
case ASTERISK_TOKEN:
startContext(ParserRuleContext.RECORD_FIELD);
STNode asterisk = consume();
STNode type = parseTypeReference();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken);
case AT_TOKEN:
startContext(ParserRuleContext.RECORD_FIELD);
STNode metadata = parseMetaData(nextTokenKind);
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);
STNode fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata);
endContext();
return fieldOrRestDesc;
default:
if (isTypeStartingToken(nextTokenKind)) {
startContext(ParserRuleContext.RECORD_FIELD);
metadata = createEmptyMetadata();
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RECORD_FIELD);
fieldOrRestDesc = parseFieldDescriptor(isInclusive, type, metadata);
endContext();
return fieldOrRestDesc;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.RECORD_FIELD_OR_RECORD_END, isInclusive);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseFieldOrRestDescriptor(solution.tokenKind, isInclusive);
}
}
private STNode parseFieldDescriptor(boolean isInclusive, STNode type, STNode metadata) {
if (isInclusive) {
STNode fieldName = parseVariableName();
return parseFieldDescriptorRhs(metadata, type, fieldName);
} else {
return parseFieldOrRestDescriptorRhs(metadata, type);
}
}
/**
* Parse type reference.
* <code>type-reference := identifier | qualified-identifier</code>
*
* @return Type reference node
*/
private STNode parseTypeReference() {
return parseQualifiedIdentifier(ParserRuleContext.TYPE_REFERENCE);
}
/**
* Parse identifier or qualified identifier.
*
* @return Identifier node
*/
private STNode parseQualifiedIdentifier(ParserRuleContext currentCtx) {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
STNode typeRefOrPkgRef = consume();
return parseQualifiedIdentifier(typeRefOrPkgRef);
} else {
Solution sol = recover(token, currentCtx);
return sol.recoveredNode;
}
}
/**
* Parse identifier or qualified identifier, given the starting identifier.
*
* @param identifier Starting identifier
* @return Parse node
*/
private STNode parseQualifiedIdentifier(STNode identifier) {
STToken nextToken = peek(1);
if (nextToken.kind != SyntaxKind.COLON_TOKEN) {
return STNodeFactory.createSimpleNameReferenceNode(identifier);
}
STToken nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
STToken colon = consume();
STToken varOrFuncName = consume();
return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, varOrFuncName);
} else {
this.errorHandler.removeInvalidToken();
return parseQualifiedIdentifier(identifier);
}
}
/**
* Parse RHS of a field or rest type descriptor.
*
* @param metadata Metadata
* @param type Type descriptor
* @return Parsed node
*/
private STNode parseFieldOrRestDescriptorRhs(STNode metadata, STNode type) {
STToken token = peek();
return parseFieldOrRestDescriptorRhs(token.kind, metadata, type);
}
private STNode parseFieldOrRestDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type) {
switch (kind) {
case ELLIPSIS_TOKEN:
STNode ellipsis = parseEllipsis();
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createRecordRestDescriptorNode(type, ellipsis, semicolonToken);
case IDENTIFIER_TOKEN:
STNode fieldName = parseVariableName();
return parseFieldDescriptorRhs(metadata, type, fieldName);
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.FIELD_OR_REST_DESCIPTOR_RHS, metadata, type);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseFieldOrRestDescriptorRhs(solution.tokenKind, metadata, type);
}
}
/**
* <p>
* Parse field descriptor rhs.
* </p>
*
* @param metadata Metadata
* @param type Type descriptor
* @param fieldName Field name
* @return Parsed node
*/
private STNode parseFieldDescriptorRhs(STNode metadata, STNode type, STNode fieldName) {
STToken token = peek();
return parseFieldDescriptorRhs(token.kind, metadata, type, fieldName);
}
/**
* <p>
* Parse field descriptor rhs.
* </p>
*
* <code>
* field-descriptor := [? | default-value] ;
* <br/>default-value := = expression
* </code>
*
* @param kind Kind of the next token
* @param metadata Metadata
* @param type Type descriptor
* @param fieldName Field name
* @return Parsed node
*/
private STNode parseFieldDescriptorRhs(SyntaxKind kind, STNode metadata, STNode type, STNode fieldName) {
switch (kind) {
case SEMICOLON_TOKEN:
STNode questionMarkToken = STNodeFactory.createEmptyNode();
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken,
semicolonToken);
case QUESTION_MARK_TOKEN:
questionMarkToken = parseQuestionMark();
semicolonToken = parseSemicolon();
return STNodeFactory.createRecordFieldNode(metadata, type, fieldName, questionMarkToken,
semicolonToken);
case EQUAL_TOKEN:
STNode equalsToken = parseAssignOp();
STNode expression = parseExpression();
semicolonToken = parseSemicolon();
return STNodeFactory.createRecordFieldWithDefaultValueNode(metadata, type, fieldName, equalsToken,
expression, semicolonToken);
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.FIELD_DESCRIPTOR_RHS, metadata, type, fieldName);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseFieldDescriptorRhs(solution.tokenKind, metadata, type, fieldName);
}
}
/**
* Parse question mark.
*
* @return Parsed node
*/
private STNode parseQuestionMark() {
STToken token = peek();
if (token.kind == SyntaxKind.QUESTION_MARK_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.QUESTION_MARK);
return sol.recoveredNode;
}
}
/*
* Statements
*/
/**
* Parse statements, until an end of a block is reached.
*
* @return Parsed node
*/
private STNode parseStatements() {
ArrayList<STNode> stmts = new ArrayList<>();
while (!isEndOfStatements()) {
STNode stmt = parseStatement();
if (stmt == null) {
break;
}
if (stmt.kind == SyntaxKind.NAMED_WORKER_DECLARATION) {
this.errorHandler.reportInvalidNode(null, "named-workers are not allowed here");
break;
}
stmts.add(stmt);
}
return STNodeFactory.createNodeList(stmts);
}
/**
* Parse a single statement.
*
* @return Parsed node
*/
protected STNode parseStatement() {
STToken token = peek();
return parseStatement(token.kind, 1);
}
private STNode parseStatement(SyntaxKind tokenKind, int nextTokenIndex) {
STNode annots = null;
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
return null;
case SEMICOLON_TOKEN:
this.errorHandler.removeInvalidToken();
return parseStatement();
case AT_TOKEN:
annots = parseAnnotations(tokenKind);
tokenKind = peek().kind;
break;
case FINAL_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case PANIC_KEYWORD:
case CONTINUE_KEYWORD:
case BREAK_KEYWORD:
case RETURN_KEYWORD:
case TYPE_KEYWORD:
case LOCK_KEYWORD:
case OPEN_BRACE_TOKEN:
case FORK_KEYWORD:
case FOREACH_KEYWORD:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case TRAP_KEYWORD:
case START_KEYWORD:
case FLUSH_KEYWORD:
case LEFT_ARROW_TOKEN:
case WORKER_KEYWORD:
break;
default:
if (isTypeStartingToken(tokenKind)) {
break;
}
if (isValidLHSExpression(tokenKind)) {
break;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.STATEMENT, nextTokenIndex);
if (solution.action == Action.KEEP) {
break;
}
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseStatement(solution.tokenKind, nextTokenIndex);
}
return parseStatement(tokenKind, annots, nextTokenIndex);
}
private STNode getAnnotations(STNode nullbaleAnnot) {
if (nullbaleAnnot != null) {
return nullbaleAnnot;
}
return STNodeFactory.createNodeList(new ArrayList<>());
}
private STNode parseStatement(STNode annots) {
return parseStatement(peek().kind, annots, 1);
}
/**
* Parse a single statement, given the next token kind.
*
* @param tokenKind Next token kind
* @return Parsed node
*/
private STNode parseStatement(SyntaxKind tokenKind, STNode annots, int nextTokenIndex) {
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
this.errorHandler.reportInvalidNode(null, "invalid annotations");
return null;
case SEMICOLON_TOKEN:
this.errorHandler.removeInvalidToken();
return parseStatement(annots);
case FINAL_KEYWORD:
STNode finalKeyword = parseFinalKeyword();
return parseVariableDecl(getAnnotations(annots), finalKeyword, false);
case IF_KEYWORD:
return parseIfElseBlock();
case WHILE_KEYWORD:
return parseWhileStatement();
case PANIC_KEYWORD:
return parsePanicStatement();
case CONTINUE_KEYWORD:
return parseContinueStatement();
case BREAK_KEYWORD:
return parseBreakStatement();
case RETURN_KEYWORD:
return parseReturnStatement();
case TYPE_KEYWORD:
return parseLocalTypeDefinitionStatement(getAnnotations(annots));
case IDENTIFIER_TOKEN:
return parseStatementStartsWithIdentifier(getAnnotations(annots));
case LOCK_KEYWORD:
return parseLockStatement();
case OPEN_BRACE_TOKEN:
return parseBlockNode();
case WORKER_KEYWORD:
return parseNamedWorkerDeclaration(getAnnotations(annots));
case FORK_KEYWORD:
return parseForkStatement();
case FOREACH_KEYWORD:
return parseForEachStatement();
case START_KEYWORD:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case TRAP_KEYWORD:
case FLUSH_KEYWORD:
case LEFT_ARROW_TOKEN:
return parseExpressionStament(tokenKind, getAnnotations(annots));
default:
if (isTypeStartingToken(tokenKind)) {
finalKeyword = STNodeFactory.createEmptyNode();
return parseVariableDecl(getAnnotations(annots), finalKeyword, false);
}
if (isValidExpressionStart(tokenKind, nextTokenIndex)) {
return parseStamentStartWithExpr(tokenKind, getAnnotations(annots));
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.STATEMENT_WITHOUT_ANNOTS, annots, nextTokenIndex);
if (solution.action == Action.KEEP) {
finalKeyword = STNodeFactory.createEmptyNode();
return parseVariableDecl(getAnnotations(annots), finalKeyword, false);
}
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseStatement(solution.tokenKind, annots, nextTokenIndex - 1);
}
}
/**
* <p>
* Parse variable declaration. Variable declaration can be a local or module level.
* </p>
*
* <code>
* local-var-decl-stmt := local-init-var-decl-stmt | local-no-init-var-decl-stmt
* <br/><br/>
* local-init-var-decl-stmt := [annots] [final] typed-binding-pattern = action-or-expr ;
* <br/><br/>
* local-no-init-var-decl-stmt := [annots] [final] type-descriptor variable-name ;
* </code>
*
* @param annots Annotations or metadata
* @param finalKeyword Final keyword
* @return Parsed node
*/
private STNode parseVariableDecl(STNode annots, STNode finalKeyword, boolean isModuleVar) {
startContext(ParserRuleContext.VAR_DECL_STMT);
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
STNode varName = parseVariableName();
STNode varDecl = parseVarDeclRhs(annots, finalKeyword, type, varName, isModuleVar);
endContext();
return varDecl;
}
/**
* Parse final keyword.
*
* @return Parsed node
*/
private STNode parseFinalKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FINAL_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FINAL_KEYWORD);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse the right hand side of a variable declaration statement.
* </p>
* <code>
* var-decl-rhs := ; | = action-or-expr ;
* </code>
*
* @param metadata metadata
* @param finalKeyword Final keyword
* @param type Type descriptor
* @param varName Variable name
* @return Parsed node
*/
private STNode parseVarDeclRhs(STNode metadata, STNode finalKeyword, STNode type, STNode varName,
boolean isModuleVar) {
STToken token = peek();
return parseVarDeclRhs(token.kind, metadata, finalKeyword, type, varName, isModuleVar);
}
/**
* Parse the right hand side of a variable declaration statement, given the
* next token kind.
*
* @param tokenKind Next token kind
* @param metadata Metadata
* @param finalKeyword Final keyword
* @param type Type descriptor
* @param varName Variable name
* @param isModuleVar flag indicating whether the var is module level
* @return Parsed node
*/
private STNode parseVarDeclRhs(SyntaxKind tokenKind, STNode metadata, STNode finalKeyword, STNode type,
STNode varName, boolean isModuleVar) {
STNode assign;
STNode expr;
STNode semicolon;
switch (tokenKind) {
case EQUAL_TOKEN:
assign = parseAssignOp();
if (isModuleVar) {
expr = parseExpression();
} else {
expr = parseActionOrExpression();
}
semicolon = parseSemicolon();
break;
case SEMICOLON_TOKEN:
if (isModuleVar) {
this.errorHandler.reportMissingTokenError("assignment required");
}
assign = STNodeFactory.createEmptyNode();
expr = STNodeFactory.createEmptyNode();
semicolon = parseSemicolon();
break;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.VAR_DECL_STMT_RHS, metadata, finalKeyword, type,
varName, isModuleVar);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseVarDeclRhs(solution.tokenKind, metadata, finalKeyword, type, varName, isModuleVar);
}
if (isModuleVar) {
return STNodeFactory.createModuleVariableDeclarationNode(metadata, finalKeyword, type, varName, assign,
expr, semicolon);
}
return STNodeFactory.createVariableDeclarationNode(metadata, finalKeyword, type, varName, assign, expr,
semicolon);
}
/**
* <p>
* Parse the RHS portion of the assignment.
* </p>
* <code>assignment-stmt-rhs := = action-or-expr ;</code>
*
* @param lvExpr LHS expression
* @return Parsed node
*/
private STNode parseAssignmentStmtRhs(STNode lvExpr) {
validateLVExpr(lvExpr);
STNode assign = parseAssignOp();
STNode expr = parseActionOrExpression();
STNode semicolon = parseSemicolon();
return STNodeFactory.createAssignmentStatementNode(lvExpr, assign, expr, semicolon);
}
/*
* Expressions
*/
/**
* Parse expression. This will start parsing expressions from the lowest level of precedence.
*
* @return Parsed node
*/
protected STNode parseExpression() {
return parseExpression(DEFAULT_OP_PRECEDENCE, true, false);
}
/**
* Parse action or expression. This will start parsing actions or expressions from the lowest level of precedence.
*
* @return Parsed node
*/
private STNode parseActionOrExpression() {
return parseExpression(DEFAULT_OP_PRECEDENCE, true, true);
}
private STNode parseActionOrExpressionInLhs(SyntaxKind tokenKind) {
return parseExpression(tokenKind, DEFAULT_OP_PRECEDENCE, false, true);
}
private STNode parseActionOrExpression(boolean isRhsExpr) {
return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, true);
}
/**
* Parse expression.
*
* @param isRhsExpr Flag indicating whether this is a rhs expression
* @return Parsed node
*/
private STNode parseExpression(boolean isRhsExpr) {
return parseExpression(DEFAULT_OP_PRECEDENCE, isRhsExpr, false);
}
private void validateLVExpr(STNode expression) {
if (isValidLVExpr(expression)) {
return;
}
this.errorHandler.reportInvalidNode(null, "invalid expression for assignment lhs");
}
private boolean isValidLVExpr(STNode expression) {
switch (expression.kind) {
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
return true;
case FIELD_ACCESS:
return isValidLVExpr(((STFieldAccessExpressionNode) expression).expression);
case INDEXED_EXPRESSION:
return isValidLVExpr(((STIndexedExpressionNode) expression).containerExpression);
default:
return (expression instanceof STMissingToken);
}
}
/**
* Parse an expression that has an equal or higher precedence than a given level.
*
* @param precedenceLevel Precedence level of expression to be parsed
* @param isRhsExpr Flag indicating whether this is a rhs expression
* @param allowActions Flag indicating whether the current context support actions
* @return Parsed node
*/
private STNode parseExpression(OperatorPrecedence precedenceLevel, boolean isRhsExpr, boolean allowActions) {
STToken token = peek();
return parseExpression(token.kind, precedenceLevel, isRhsExpr, allowActions);
}
private STNode parseExpression(SyntaxKind kind, OperatorPrecedence precedenceLevel, boolean isRhsExpr,
boolean allowActions) {
STNode expr = parseTerminalExpression(kind, isRhsExpr, allowActions);
return parseExpressionRhs(precedenceLevel, expr, isRhsExpr, allowActions);
}
/**
* Parse terminal expressions. A terminal expression has the highest precedence level
* out of all expressions, and will be at the leaves of an expression tree.
*
* @param isRhsExpr Is a rhs expression
* @param allowActions Allow actions
* @return Parsed node
*/
private STNode parseTerminalExpression(boolean isRhsExpr, boolean allowActions) {
return parseTerminalExpression(peek().kind, isRhsExpr, allowActions);
}
private STNode parseTerminalExpression(SyntaxKind kind, boolean isRhsExpr, boolean allowActions) {
switch (kind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case STRING_LITERAL:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
return parseBasicLiteral();
case IDENTIFIER_TOKEN:
return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
case OPEN_PAREN_TOKEN:
STToken nextNextToken = getNextNextToken(kind);
if (nextNextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {
return parseNilLiteral();
}
return parseBracedExpression(isRhsExpr, allowActions);
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
return parseCheckExpression(isRhsExpr, allowActions);
case OPEN_BRACE_TOKEN:
return parseMappingConstructorExpr();
case TYPEOF_KEYWORD:
return parseTypeofExpression(isRhsExpr);
case PLUS_TOKEN:
case MINUS_TOKEN:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
return parseUnaryExpression(isRhsExpr);
case TRAP_KEYWORD:
return parseTrapExpression(isRhsExpr, allowActions);
case OPEN_BRACKET_TOKEN:
return parseListConstructorExpr();
case LT_TOKEN:
return parseTypeCastExpr(isRhsExpr);
case TABLE_KEYWORD:
case STREAM_KEYWORD:
case FROM_KEYWORD:
return parseTableConstructorOrQuery(isRhsExpr);
case ERROR_KEYWORD:
return parseErrorConstructorExpr();
case LET_KEYWORD:
return parseLetExpression(isRhsExpr);
case BACKTICK_TOKEN:
return parseTemplateExpression();
case XML_KEYWORD:
nextNextToken = getNextNextToken(kind);
if (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) {
return parseXMLTemplateExpression();
}
break;
case STRING_KEYWORD:
nextNextToken = getNextNextToken(kind);
if (nextNextToken.kind == SyntaxKind.BACKTICK_TOKEN) {
return parseStringTemplateExpression();
}
break;
case FUNCTION_KEYWORD:
return parseExplicitFunctionExpression(null);
case AT_TOKEN:
break;
case NEW_KEYWORD:
return parseNewExpression();
case START_KEYWORD:
return parseStartAction(null);
case FLUSH_KEYWORD:
return parseFlushAction();
case LEFT_ARROW_TOKEN:
return parseReceiveAction();
default:
break;
}
Solution solution = recover(peek(), ParserRuleContext.TERMINAL_EXPRESSION, isRhsExpr, allowActions);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
if (solution.action == Action.KEEP) {
if (kind == SyntaxKind.XML_KEYWORD) {
return parseXMLTemplateExpression();
}
return parseStringTemplateExpression();
}
switch (solution.tokenKind) {
case IDENTIFIER_TOKEN:
this.errorHandler.reportMissingTokenError("missing " + solution.ctx);
return parseQualifiedIdentifier(solution.recoveredNode);
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case STRING_LITERAL:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
this.errorHandler.reportMissingTokenError("missing " + solution.ctx);
return solution.recoveredNode;
default:
return parseTerminalExpression(solution.tokenKind, isRhsExpr, allowActions);
}
}
private STNode parseActionOrExpressionInLhs(SyntaxKind nextTokenKind, STNode lhsExpr) {
return parseExpressionRhs(nextTokenKind, DEFAULT_OP_PRECEDENCE, lhsExpr, false, true);
}
private boolean isValidExprStart(SyntaxKind tokenKind) {
switch (tokenKind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case STRING_LITERAL:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
case IDENTIFIER_TOKEN:
case OPEN_PAREN_TOKEN:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case OPEN_BRACE_TOKEN:
case TYPEOF_KEYWORD:
case PLUS_TOKEN:
case MINUS_TOKEN:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
case TRAP_KEYWORD:
case OPEN_BRACKET_TOKEN:
case LT_TOKEN:
case TABLE_KEYWORD:
case STREAM_KEYWORD:
case FROM_KEYWORD:
case ERROR_KEYWORD:
case LET_KEYWORD:
case BACKTICK_TOKEN:
case XML_KEYWORD:
case STRING_KEYWORD:
case FUNCTION_KEYWORD:
case AT_TOKEN:
case NEW_KEYWORD:
case START_KEYWORD:
case FLUSH_KEYWORD:
return true;
default:
return false;
}
}
/**
* <p>
* Parse a new expression.
* </p>
* <code>
* new-expr := explicit-new-expr | implicit-new-expr
* <br/>
* explicit-new-expr := new type-descriptor ( arg-list )
* <br/>
* implicit-new-expr := new [( arg-list )]
* </code>
*
* @return Parsed NewExpression node.
*/
private STNode parseNewExpression() {
STNode newKeyword = parseNewKeyword();
return parseNewKeywordRhs(newKeyword);
}
/**
* <p>
* Parse `new` keyword.
* </p>
*
* @return Parsed NEW_KEYWORD Token.
*/
private STNode parseNewKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.NEW_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.NEW_KEYWORD);
return sol.recoveredNode;
}
}
private STNode parseNewKeywordRhs(STNode newKeyword) {
STNode token = peek();
return parseNewKeywordRhs(token.kind, newKeyword);
}
/**
* <p>
* Parse an implicit or explicit expression.
* </p>
*
* @param kind next token kind.
* @param newKeyword parsed node for `new` keyword.
* @return Parsed new-expression node.
*/
private STNode parseNewKeywordRhs(SyntaxKind kind, STNode newKeyword) {
switch (kind) {
case OPEN_PAREN_TOKEN:
return parseImplicitNewRhs(newKeyword);
case SEMICOLON_TOKEN:
break;
case IDENTIFIER_TOKEN:
case OBJECT_KEYWORD:
return parseTypeDescriptorInNewExpr(newKeyword);
default:
break;
}
return STNodeFactory.createImplicitNewExpressionNode(newKeyword, STNodeFactory.createEmptyNode());
}
/**
* <p>
* Parse an Explicit New expression.
* </p>
* <code>
* explicit-new-expr := new type-descriptor ( arg-list )
* </code>
*
* @param newKeyword Parsed `new` keyword.
* @return the Parsed Explicit New Expression.
*/
private STNode parseTypeDescriptorInNewExpr(STNode newKeyword) {
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_NEW_EXPR);
STNode parenthesizedArgsList = parseParenthesizedArgList();
return STNodeFactory.createExplicitNewExpressionNode(newKeyword, typeDescriptor, parenthesizedArgsList);
}
/**
* <p>
* Parse an <code>implicit-new-expr</code> with arguments.
* </p>
*
* @param newKeyword Parsed `new` keyword.
* @return Parsed implicit-new-expr.
*/
private STNode parseImplicitNewRhs(STNode newKeyword) {
STNode implicitNewArgList = parseParenthesizedArgList();
return STNodeFactory.createImplicitNewExpressionNode(newKeyword, implicitNewArgList);
}
/**
* <p>
* Parse the parenthesized argument list for a <code>new-expr</code>.
* </p>
*
* @return Parsed parenthesized rhs of <code>new-expr</code>.
*/
private STNode parseParenthesizedArgList() {
STNode openParan = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);
STNode arguments = parseArgsList();
STNode closeParan = parseCloseParenthesis();
return STNodeFactory.createParenthesizedArgList(openParan, arguments, closeParan);
}
/**
* <p>
* Parse the right-hand-side of an expression.
* </p>
* <code>expr-rhs := (binary-op expression
* | dot identifier
* | open-bracket expression close-bracket
* )*</code>
*
* @param precedenceLevel Precedence level of the expression that is being parsed currently
* @param lhsExpr LHS expression of the expression
* @param isLVExpr Flag indicating whether this is on a lhsExpr of a statement
* @param allowActions Flag indicating whether the current context support actions
* @return Parsed node
*/
private STNode parseExpressionRhs(OperatorPrecedence precedenceLevel, STNode lhsExpr, boolean isLVExpr,
boolean allowActions) {
STToken token = peek();
return parseExpressionRhs(token.kind, precedenceLevel, lhsExpr, isLVExpr, allowActions);
}
/**
* Parse the right hand side of an expression given the next token kind.
*
* @param tokenKind Next token kind
* @param currentPrecedenceLevel Precedence level of the expression that is being parsed currently
* @param lhsExpr LHS expression
* @param isRhsExpr Flag indicating whether this is a rhs expr or not
* @param allowActions Flag indicating whether to allow actions or not
* @return Parsed node
*/
private STNode parseExpressionRhs(SyntaxKind tokenKind, OperatorPrecedence currentPrecedenceLevel, STNode lhsExpr,
boolean isRhsExpr, boolean allowActions) {
if (isEndOfExpression(tokenKind, isRhsExpr)) {
return lhsExpr;
}
if (lhsExpr.kind == SyntaxKind.ASYNC_SEND_ACTION) {
return lhsExpr;
}
if (!isValidExprRhsStart(tokenKind)) {
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.EXPRESSION_RHS, currentPrecedenceLevel, lhsExpr,
isRhsExpr, allowActions);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
if (solution.ctx == ParserRuleContext.BINARY_OPERATOR) {
SyntaxKind binaryOpKind = getBinaryOperatorKindToInsert(currentPrecedenceLevel);
return parseExpressionRhs(binaryOpKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions);
} else {
return parseExpressionRhs(solution.tokenKind, currentPrecedenceLevel, lhsExpr, isRhsExpr, allowActions);
}
}
if (tokenKind == SyntaxKind.GT_TOKEN && peek(2).kind == SyntaxKind.GT_TOKEN) {
if (peek(3).kind == SyntaxKind.GT_TOKEN) {
tokenKind = SyntaxKind.TRIPPLE_GT_TOKEN;
} else {
tokenKind = SyntaxKind.DOUBLE_GT_TOKEN;
}
}
OperatorPrecedence nextOperatorPrecedence = getOpPrecedence(tokenKind);
if (currentPrecedenceLevel.isHigherThan(nextOperatorPrecedence, allowActions)) {
return lhsExpr;
}
STNode newLhsExpr;
STNode operator;
switch (tokenKind) {
case OPEN_PAREN_TOKEN:
newLhsExpr = parseFuncCall(lhsExpr);
break;
case OPEN_BRACKET_TOKEN:
newLhsExpr = parseMemberAccessExpr(lhsExpr);
break;
case DOT_TOKEN:
newLhsExpr = parseFieldAccessOrMethodCall(lhsExpr);
break;
case IS_KEYWORD:
newLhsExpr = parseTypeTestExpression(lhsExpr);
break;
case RIGHT_ARROW_TOKEN:
newLhsExpr = parseRemoteMethodCallOrAsyncSendAction(lhsExpr, isRhsExpr);
if (!allowActions) {
this.errorHandler.reportInvalidNode(null, "actions are not allowed here");
}
break;
case SYNC_SEND_TOKEN:
newLhsExpr = parseSyncSendAction(lhsExpr);
if (!allowActions) {
this.errorHandler.reportInvalidNode(null, "actions are not allowed here");
}
break;
case RIGHT_DOUBLE_ARROW:
newLhsExpr = parseImplicitAnonFunc(lhsExpr);
break;
default:
if (tokenKind == SyntaxKind.DOUBLE_GT_TOKEN) {
operator = parseDoubleGTToken();
} else if (tokenKind == SyntaxKind.TRIPPLE_GT_TOKEN) {
operator = parseTrippleGTToken();
} else {
operator = parseBinaryOperator();
}
STNode rhsExpr = parseExpression(nextOperatorPrecedence, isRhsExpr, false);
newLhsExpr = STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, operator,
rhsExpr);
break;
}
return parseExpressionRhs(currentPrecedenceLevel, newLhsExpr, isRhsExpr, allowActions);
}
private boolean isValidExprRhsStart(SyntaxKind tokenKind) {
switch (tokenKind) {
case OPEN_PAREN_TOKEN:
case DOT_TOKEN:
case OPEN_BRACKET_TOKEN:
case IS_KEYWORD:
case RIGHT_ARROW_TOKEN:
case RIGHT_DOUBLE_ARROW:
case SYNC_SEND_TOKEN:
return true;
default:
return isBinaryOperator(tokenKind);
}
}
/**
* Parse member access expression.
*
* @param lhsExpr Container expression
* @return Member access expression
*/
private STNode parseMemberAccessExpr(STNode lhsExpr) {
STNode openBracket = parseOpenBracket();
STNode keyExpr;
switch (peek().kind) {
case CLOSE_BRACKET_TOKEN:
keyExpr = STNodeFactory.createEmptyNode();
break;
case ASTERISK_TOKEN:
keyExpr = consume();
break;
default:
keyExpr = parseExpression();
break;
}
STNode closeBracket = parseCloseBracket();
return STNodeFactory.createIndexedExpressionNode(lhsExpr, openBracket, keyExpr, closeBracket);
}
/**
* Parse close bracket.
*
* @return Parsed node
*/
private STNode parseCloseBracket() {
STToken token = peek();
if (token.kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CLOSE_BRACKET);
return sol.recoveredNode;
}
}
/**
* Parse field access expression and method call expression.
*
* @param lhsExpr Preceding expression of the field access or method call
* @return One of <code>field-access-expression</code> or <code>method-call-expression</code>.
*/
private STNode parseFieldAccessOrMethodCall(STNode lhsExpr) {
STNode dotToken = parseDotToken();
STNode fieldOrMethodName = parseIdentifier(ParserRuleContext.FIELD_OR_FUNC_NAME);
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {
STNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);
STNode args = parseArgsList();
STNode closeParen = parseCloseParenthesis();
return STNodeFactory.createMethodCallExpressionNode(lhsExpr, dotToken, fieldOrMethodName, openParen, args,
closeParen);
}
return STNodeFactory.createFieldAccessExpressionNode(lhsExpr, dotToken, fieldOrMethodName);
}
/**
* <p>
* Parse braced expression.
* </p>
* <code>braced-expr := ( expression )</code>
*
* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement
* @param allowActions Allow actions
* @return Parsed node
*/
private STNode parseBracedExpression(boolean isRhsExpr, boolean allowActions) {
STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);
startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);
STNode expr;
if (allowActions) {
expr = parseActionOrExpression(isRhsExpr);
} else {
expr = parseExpression(isRhsExpr);
}
if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
return parseBracedExprOrAnonFuncParamRhs(peek().kind, openParen, expr);
}
STNode closeParen = parseCloseParenthesis();
endContext();
if (isAction(expr)) {
return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, expr, closeParen);
}
return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION, openParen, expr, closeParen);
}
private STNode parseBracedExprOrAnonFuncParamRhs(SyntaxKind nextTokenKind, STNode openParen, STNode expr) {
switch (nextTokenKind) {
case CLOSE_PAREN_TOKEN:
STNode closeParen = parseCloseParenthesis();
STNode bracedEXprOrAnonFuncParam;
if (isAction(expr)) {
bracedEXprOrAnonFuncParam = STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION,
openParen, expr, closeParen);
} else {
bracedEXprOrAnonFuncParam = STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_EXPRESSION,
openParen, expr, closeParen);
}
endContext();
return bracedEXprOrAnonFuncParam;
case COMMA_TOKEN:
return parseImplicitAnonFunc(openParen, expr);
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAM_RHS);
if (solution.action == Action.REMOVE) {
endContext();
return solution.recoveredNode;
}
return parseBracedExprOrAnonFuncParamRhs(solution.tokenKind, openParen, expr);
}
}
/**
* Check whether a given node is an action node.
*
* @param node Node to check
* @return <code>true</code> if the node is an action node. <code>false</code> otherwise
*/
private boolean isAction(STNode node) {
switch (node.kind) {
case REMOTE_METHOD_CALL_ACTION:
case BRACED_ACTION:
case CHECK_ACTION:
case START_ACTION:
case TRAP_ACTION:
return true;
default:
return false;
}
}
/**
* Check whether the given token is an end of a expression.
*
* @param tokenKind Token to check
* @param isRhsExpr Flag indicating whether this is on a rhsExpr of a statement
* @return <code>true</code> if the token represents an end of a block. <code>false</code> otherwise
*/
private boolean isEndOfExpression(SyntaxKind tokenKind, boolean isRhsExpr) {
if (!isRhsExpr) {
if (isCompoundBinaryOperator(tokenKind)) {
return true;
}
return !isValidExprRhsStart(tokenKind);
}
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
case OPEN_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case COMMA_TOKEN:
case PUBLIC_KEYWORD:
case EOF_TOKEN:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case EQUAL_TOKEN:
case AT_TOKEN:
case DOCUMENTATION_LINE:
case AS_KEYWORD:
case IN_KEYWORD:
case BACKTICK_TOKEN:
case FROM_KEYWORD:
case WHERE_KEYWORD:
case LET_KEYWORD:
case SELECT_KEYWORD:
return true;
default:
return isSimpleType(tokenKind);
}
}
/**
* Parse basic literals. It is assumed that we come here after validation.
*
* @return Parsed node
*/
private STNode parseBasicLiteral() {
STToken literalToken = consume();
return STNodeFactory.createBasicLiteralNode(literalToken.kind, literalToken);
}
/**
* Parse function call expression.
* <code>function-call-expr := function-reference ( arg-list )
* function-reference := variable-reference</code>
*
* @param identifier Function name
* @return Function call expression
*/
private STNode parseFuncCall(STNode identifier) {
STNode openParen = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);
STNode args = parseArgsList();
STNode closeParen = parseCloseParenthesis();
return STNodeFactory.createFunctionCallExpressionNode(identifier, openParen, args, closeParen);
}
/**
* <p>
* Parse error constructor expression.
* </p>
* <code>
* error-constructor-expr := error ( arg-list )
* </code>
*
* @return Error constructor expression
*/
private STNode parseErrorConstructorExpr() {
return parseFuncCall(parseErrorKeyWord());
}
/**
* Parse function call argument list.
*
* @return Parsed args list
*/
private STNode parseArgsList() {
startContext(ParserRuleContext.ARG_LIST);
ArrayList<STNode> argsList = new ArrayList<>();
STToken token = peek();
if (isEndOfParametersList(token.kind)) {
STNode args = STNodeFactory.createNodeList(argsList);
endContext();
return args;
}
STNode leadingComma = STNodeFactory.createEmptyNode();
STNode arg = parseArg(leadingComma);
if (arg == null) {
STNode args = STNodeFactory.createNodeList(argsList);
endContext();
return args;
}
SyntaxKind lastProcessedArgKind;
if (SyntaxKind.POSITIONAL_ARG.ordinal() <= arg.kind.ordinal()) {
argsList.add(arg);
lastProcessedArgKind = arg.kind;
} else {
reportInvalidOrderOfArgs(peek(), SyntaxKind.POSITIONAL_ARG, arg.kind);
lastProcessedArgKind = SyntaxKind.POSITIONAL_ARG;
}
parseFollowUpArgs(argsList, lastProcessedArgKind);
STNode args = STNodeFactory.createNodeList(argsList);
endContext();
return args;
}
/**
* Parse follow up arguments.
*
* @param argsList Arguments list to which the parsed argument must be added
* @param lastProcessedArgKind Kind of the argument processed prior to this
*/
private void parseFollowUpArgs(ArrayList<STNode> argsList, SyntaxKind lastProcessedArgKind) {
STToken nextToken = peek();
while (!isEndOfParametersList(nextToken.kind)) {
STNode argEnd = parseArgEnd(nextToken.kind);
if (argEnd == null) {
break;
}
nextToken = peek();
if (isEndOfParametersList(nextToken.kind)) {
this.errorHandler.reportInvalidNode((STToken) argEnd, "invalid token " + argEnd);
break;
}
STNode arg = parseArg(nextToken.kind, argEnd);
if (lastProcessedArgKind.ordinal() <= arg.kind.ordinal()) {
if (lastProcessedArgKind == SyntaxKind.REST_ARG && arg.kind == SyntaxKind.REST_ARG) {
this.errorHandler.reportInvalidNode(nextToken, "cannot more than one rest arg");
} else {
argsList.add(arg);
lastProcessedArgKind = arg.kind;
}
} else {
reportInvalidOrderOfArgs(nextToken, lastProcessedArgKind, arg.kind);
}
nextToken = peek();
}
}
private STNode parseArgEnd() {
return parseArgEnd(peek().kind);
}
private STNode parseArgEnd(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_PAREN_TOKEN:
return null;
default:
Solution solution = recover(peek(), ParserRuleContext.ARG_END);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseArgEnd(solution.tokenKind);
}
}
/**
* Report invalid order of args.
*
* @param token Staring token of the arg.
* @param lastArgKind Kind of the previously processed arg
* @param argKind Current arg
*/
private void reportInvalidOrderOfArgs(STToken token, SyntaxKind lastArgKind, SyntaxKind argKind) {
this.errorHandler.reportInvalidNode(token, "cannot have a " + argKind + " after the " + lastArgKind);
}
/**
* Parse function call argument.
*
* @param leadingComma Comma that occurs before the param
* @return Parsed argument node
*/
private STNode parseArg(STNode leadingComma) {
STToken token = peek();
return parseArg(token.kind, leadingComma);
}
private STNode parseArg(SyntaxKind kind, STNode leadingComma) {
STNode arg;
switch (kind) {
case ELLIPSIS_TOKEN:
STToken ellipsis = consume();
STNode expr = parseExpression();
arg = STNodeFactory.createRestArgumentNode(leadingComma, ellipsis, expr);
break;
case IDENTIFIER_TOKEN:
arg = parseNamedOrPositionalArg(leadingComma, kind);
break;
case CLOSE_PAREN_TOKEN:
return null;
default:
if (isValidExprStart(kind)) {
expr = parseExpression();
arg = STNodeFactory.createPositionalArgumentNode(leadingComma, expr);
break;
}
Solution solution = recover(peek(), ParserRuleContext.ARG_START_OR_ARG_LIST_END, leadingComma);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseArg(solution.tokenKind, leadingComma);
}
return arg;
}
/**
* Parse positional or named arg. This method assumed peek()/peek(1)
* is always an identifier.
*
* @param leadingComma Comma that occurs before the param
* @return Parsed argument node
*/
private STNode parseNamedOrPositionalArg(STNode leadingComma, SyntaxKind nextTokenKind) {
STNode argNameOrExpr = parseTerminalExpression(peek().kind, true, false);
STToken secondToken = peek();
switch (secondToken.kind) {
case EQUAL_TOKEN:
STNode equal = parseAssignOp();
STNode valExpr = parseExpression();
return STNodeFactory.createNamedArgumentNode(leadingComma, argNameOrExpr, equal, valExpr);
case COMMA_TOKEN:
case CLOSE_PAREN_TOKEN:
return STNodeFactory.createPositionalArgumentNode(leadingComma, argNameOrExpr);
default:
argNameOrExpr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, argNameOrExpr, false, false);
return STNodeFactory.createPositionalArgumentNode(leadingComma, argNameOrExpr);
}
}
/**
* Parse object type descriptor.
*
* @return Parsed node
*/
private STNode parseObjectTypeDescriptor() {
startContext(ParserRuleContext.OBJECT_TYPE_DESCRIPTOR);
STNode objectTypeQualifiers = parseObjectTypeQualifiers();
STNode objectKeyword = parseObjectKeyword();
STNode openBrace = parseOpenBrace();
STNode objectMembers = parseObjectMembers();
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createObjectTypeDescriptorNode(objectTypeQualifiers, objectKeyword, openBrace,
objectMembers, closeBrace);
}
/**
* Parse object type qualifiers.
*
* @return Parsed node
*/
private STNode parseObjectTypeQualifiers() {
STToken nextToken = peek();
return parseObjectTypeQualifiers(nextToken.kind);
}
private STNode parseObjectTypeQualifiers(SyntaxKind kind) {
List<STNode> qualifiers = new ArrayList<>();
STNode firstQualifier;
switch (kind) {
case CLIENT_KEYWORD:
STNode clientKeyword = parseClientKeyword();
firstQualifier = clientKeyword;
break;
case ABSTRACT_KEYWORD:
STNode abstractKeyword = parseAbstractKeyword();
firstQualifier = abstractKeyword;
break;
case OBJECT_KEYWORD:
return STNodeFactory.createNodeList(qualifiers);
default:
Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_FIRST_QUALIFIER);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseObjectTypeQualifiers(solution.tokenKind);
}
STNode secondQualifier = parseObjectTypeSecondQualifier(firstQualifier);
qualifiers.add(firstQualifier);
if (secondQualifier != null) {
qualifiers.add(secondQualifier);
}
return STNodeFactory.createNodeList(qualifiers);
}
private STNode parseObjectTypeSecondQualifier(STNode firstQualifier) {
STToken nextToken = peek();
return parseObjectTypeSecondQualifier(nextToken.kind, firstQualifier);
}
private STNode parseObjectTypeSecondQualifier(SyntaxKind kind, STNode firstQualifier) {
if (firstQualifier.kind != kind) {
switch (kind) {
case CLIENT_KEYWORD:
return parseClientKeyword();
case ABSTRACT_KEYWORD:
return parseAbstractKeyword();
case OBJECT_KEYWORD:
return null;
default:
break;
}
}
Solution solution = recover(peek(), ParserRuleContext.OBJECT_TYPE_SECOND_QUALIFIER, firstQualifier);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseObjectTypeSecondQualifier(solution.tokenKind, firstQualifier);
}
/**
* Parse client keyword.
*
* @return Parsed node
*/
private STNode parseClientKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CLIENT_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CLIENT_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse abstract keyword.
*
* @return Parsed node
*/
private STNode parseAbstractKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ABSTRACT_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.ABSTRACT_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse object keyword.
*
* @return Parsed node
*/
private STNode parseObjectKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.OBJECT_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.OBJECT_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse object members.
*
* @return Parsed node
*/
private STNode parseObjectMembers() {
ArrayList<STNode> objectMembers = new ArrayList<>();
while (!isEndOfObjectTypeNode()) {
startContext(ParserRuleContext.OBJECT_MEMBER);
STNode member = parseObjectMember(peek().kind);
endContext();
if (member == null) {
break;
}
objectMembers.add(member);
}
return STNodeFactory.createNodeList(objectMembers);
}
private STNode parseObjectMember() {
STToken nextToken = peek();
return parseObjectMember(nextToken.kind);
}
private STNode parseObjectMember(SyntaxKind nextTokenKind) {
STNode metadata;
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return null;
case ASTERISK_TOKEN:
case PUBLIC_KEYWORD:
case PRIVATE_KEYWORD:
case REMOTE_KEYWORD:
case FUNCTION_KEYWORD:
metadata = createEmptyMetadata();
break;
case DOCUMENTATION_LINE:
case AT_TOKEN:
metadata = parseMetaData(nextTokenKind);
nextTokenKind = peek().kind;
break;
default:
if (isTypeStartingToken(nextTokenKind)) {
metadata = createEmptyMetadata();
break;
}
Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_START);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseObjectMember(solution.tokenKind);
}
return parseObjectMember(nextTokenKind, metadata);
}
private STNode parseObjectMember(SyntaxKind nextTokenKind, STNode metadata) {
STNode member;
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return null;
case ASTERISK_TOKEN:
STNode asterisk = consume();
STNode type = parseTypeReference();
STNode semicolonToken = parseSemicolon();
member = STNodeFactory.createTypeReferenceNode(asterisk, type, semicolonToken);
break;
case PUBLIC_KEYWORD:
case PRIVATE_KEYWORD:
STNode visibilityQualifier = parseObjectMemberVisibility();
member = parseObjectMethodOrField(metadata, visibilityQualifier);
break;
case REMOTE_KEYWORD:
member = parseObjectMethodOrField(metadata, STNodeFactory.createEmptyNode());
break;
case FUNCTION_KEYWORD:
member = parseObjectMethod(metadata, STNodeFactory.createEmptyNode());
break;
default:
if (isTypeStartingToken(nextTokenKind)) {
member = parseObjectField(metadata, STNodeFactory.createEmptyNode());
break;
}
Solution solution = recover(peek(), ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseObjectMember(solution.tokenKind);
}
return member;
}
private STNode parseObjectMethodOrField(STNode metadata, STNode methodQualifiers) {
STToken nextToken = peek(1);
STToken nextNextToken = peek(2);
return parseObjectMethodOrField(nextToken.kind, nextNextToken.kind, metadata, methodQualifiers);
}
/**
* Parse an object member, given the visibility modifier. Object member can have
* only one visibility qualifier. This mean the methodQualifiers list can have
* one qualifier at-most.
*
* @param visibilityQualifiers Visibility qualifiers. A modifier can be
* a syntax node with either 'PUBLIC' or 'PRIVATE'.
* @param nextTokenKind Next token kind
* @param nextNextTokenKind Kind of the token after the
* @param metadata Metadata
* @param visibilityQualifiers Visibility qualifiers
* @return Parse object member node
*/
private STNode parseObjectMethodOrField(SyntaxKind nextTokenKind, SyntaxKind nextNextTokenKind, STNode metadata,
STNode visibilityQualifiers) {
switch (nextTokenKind) {
case REMOTE_KEYWORD:
STNode remoteKeyword = parseRemoteKeyword();
ArrayList<STNode> methodQualifiers = new ArrayList<>();
if (!isEmpty(visibilityQualifiers)) {
methodQualifiers.add(visibilityQualifiers);
}
methodQualifiers.add(remoteKeyword);
return parseObjectMethod(metadata, STNodeFactory.createNodeList(methodQualifiers));
case FUNCTION_KEYWORD:
return parseObjectMethod(metadata, visibilityQualifiers);
case IDENTIFIER_TOKEN:
if (nextNextTokenKind != SyntaxKind.OPEN_PAREN_TOKEN) {
return parseObjectField(metadata, visibilityQualifiers);
}
break;
default:
if (isTypeStartingToken(nextTokenKind)) {
return parseObjectField(metadata, visibilityQualifiers);
}
break;
}
Solution solution = recover(peek(), ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY, metadata,
visibilityQualifiers);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseObjectMethodOrField(solution.tokenKind, nextTokenKind, metadata, visibilityQualifiers);
}
/**
* Parse object visibility. Visibility can be <code>public</code> or <code>private</code>.
*
* @return Parsed node
*/
private STNode parseObjectMemberVisibility() {
STToken token = peek();
if (token.kind == SyntaxKind.PUBLIC_KEYWORD || token.kind == SyntaxKind.PRIVATE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.PUBLIC_KEYWORD);
return sol.recoveredNode;
}
}
private STNode parseRemoteKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.REMOTE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.REMOTE_KEYWORD);
return sol.recoveredNode;
}
}
private STNode parseObjectField(STNode metadata, STNode methodQualifiers) {
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode fieldName = parseVariableName();
return parseObjectFieldRhs(metadata, methodQualifiers, type, fieldName);
}
/**
* Parse object field rhs, and complete the object field parsing. Returns the parsed object field.
*
* @param metadata Metadata
* @param visibilityQualifier Visibility qualifier
* @param type Type descriptor
* @param fieldName Field name
* @return Parsed object field
*/
private STNode parseObjectFieldRhs(STNode metadata, STNode visibilityQualifier, STNode type, STNode fieldName) {
STToken nextToken = peek();
return parseObjectFieldRhs(nextToken.kind, metadata, visibilityQualifier, type, fieldName);
}
/**
* Parse object field rhs, and complete the object field parsing. Returns the parsed object field.
*
* @param nextTokenKind Kind of the next token
* @param metadata Metadata
* @param visibilityQualifier Visibility qualifier
* @param type Type descriptor
* @param fieldName Field name
* @return Parsed object field
*/
private STNode parseObjectFieldRhs(SyntaxKind nextTokenKind, STNode metadata, STNode visibilityQualifier,
STNode type, STNode fieldName) {
STNode equalsToken;
STNode expression;
STNode semicolonToken;
switch (nextTokenKind) {
case SEMICOLON_TOKEN:
equalsToken = STNodeFactory.createEmptyNode();
expression = STNodeFactory.createEmptyNode();
semicolonToken = parseSemicolon();
break;
case EQUAL_TOKEN:
equalsToken = parseAssignOp();
expression = parseExpression();
semicolonToken = parseSemicolon();
break;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.OBJECT_FIELD_RHS, metadata, visibilityQualifier,
type, fieldName);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseObjectFieldRhs(solution.tokenKind, metadata, visibilityQualifier, type, fieldName);
}
return STNodeFactory.createObjectFieldNode(metadata, visibilityQualifier, type, fieldName, equalsToken,
expression, semicolonToken);
}
private STNode parseObjectMethod(STNode metadata, STNode methodQualifiers) {
return parseFuncDefOrFuncTypeDesc(metadata, methodQualifiers, true);
}
/**
* Parse if-else statement.
* <code>
* if-else-stmt := if expression block-stmt [else-block]
* </code>
*
* @return If-else block
*/
private STNode parseIfElseBlock() {
startContext(ParserRuleContext.IF_BLOCK);
STNode ifKeyword = parseIfKeyword();
STNode condition = parseExpression();
STNode ifBody = parseBlockNode();
endContext();
STNode elseBody = parseElseBlock();
return STNodeFactory.createIfElseStatementNode(ifKeyword, condition, ifBody, elseBody);
}
/**
* Parse if-keyword.
*
* @return Parsed if-keyword node
*/
private STNode parseIfKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IF_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.IF_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse else-keyword.
*
* @return Parsed else keyword node
*/
private STNode parseElseKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ELSE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.ELSE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse block node.
* <code>
* block-stmt := { sequence-stmt }
* sequence-stmt := statement*
* </code>
*
* @return Parse block node
*/
private STNode parseBlockNode() {
startContext(ParserRuleContext.BLOCK_STMT);
STNode openBrace = parseOpenBrace();
STNode stmts = parseStatements();
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createBlockStatementNode(openBrace, stmts, closeBrace);
}
/**
* Parse else block.
* <code>else-block := else (if-else-stmt | block-stmt)</code>
*
* @return Else block
*/
private STNode parseElseBlock() {
STToken nextToken = peek();
if (nextToken.kind != SyntaxKind.ELSE_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
STNode elseKeyword = parseElseKeyword();
STNode elseBody = parseElseBody();
return STNodeFactory.createElseBlockNode(elseKeyword, elseBody);
}
/**
* Parse else node body.
* <code>else-body := if-else-stmt | block-stmt</code>
*
* @return Else node body
*/
private STNode parseElseBody() {
STToken nextToken = peek();
return parseElseBody(nextToken.kind);
}
private STNode parseElseBody(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case IF_KEYWORD:
return parseIfElseBlock();
case OPEN_BRACE_TOKEN:
return parseBlockNode();
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.ELSE_BODY);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseElseBody(solution.tokenKind);
}
}
/**
* Parse while statement.
* <code>while-stmt := while expression block-stmt</code>
*
* @return While statement
*/
private STNode parseWhileStatement() {
startContext(ParserRuleContext.WHILE_BLOCK);
STNode whileKeyword = parseWhileKeyword();
STNode condition = parseExpression();
STNode whileBody = parseBlockNode();
endContext();
return STNodeFactory.createWhileStatementNode(whileKeyword, condition, whileBody);
}
/**
* Parse while-keyword.
*
* @return While-keyword node
*/
private STNode parseWhileKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WHILE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.WHILE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse panic statement.
* <code>panic-stmt := panic expression ;</code>
*
* @return Panic statement
*/
private STNode parsePanicStatement() {
startContext(ParserRuleContext.PANIC_STMT);
STNode panicKeyword = parsePanicKeyword();
STNode expression = parseExpression();
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createPanicStatementNode(panicKeyword, expression, semicolon);
}
/**
* Parse panic-keyword.
*
* @return Panic-keyword node
*/
private STNode parsePanicKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.PANIC_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.PANIC_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse check expression. This method is used to parse both check expression
* as well as check action.
*
* <p>
* <code>
* checking-expr := checking-keyword expression
* checking-action := checking-keyword action
* </code>
*
* @param allowActions Allow actions
* @param isRhsExpr Is rhs expression
* @return Check expression node
*/
private STNode parseCheckExpression(boolean isRhsExpr, boolean allowActions) {
STNode checkingKeyword = parseCheckingKeyword();
STNode expr = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions);
if (isAction(expr)) {
return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_ACTION, checkingKeyword, expr);
} else {
return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, expr);
}
}
/**
* Parse checking keyword.
* <p>
* <code>
* checking-keyword := check | checkpanic
* </code>
*
* @return Parsed node
*/
private STNode parseCheckingKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CHECK_KEYWORD || token.kind == SyntaxKind.CHECKPANIC_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CHECKING_KEYWORD);
return sol.recoveredNode;
}
}
/**
*
* Parse continue statement.
* <code>continue-stmt := continue ; </code>
*
* @return continue statement
*/
private STNode parseContinueStatement() {
startContext(ParserRuleContext.CONTINUE_STATEMENT);
STNode continueKeyword = parseContinueKeyword();
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createContinueStatementNode(continueKeyword, semicolon);
}
/**
* Parse continue-keyword.
*
* @return continue-keyword node
*/
private STNode parseContinueKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONTINUE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CONTINUE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse return statement.
* <code>return-stmt := return [ action-or-expr ] ;</code>
*
* @return Return statement
*/
private STNode parseReturnStatement() {
startContext(ParserRuleContext.RETURN_STMT);
STNode returnKeyword = parseReturnKeyword();
STNode returnRhs = parseReturnStatementRhs(returnKeyword);
endContext();
return returnRhs;
}
/**
* Parse return-keyword.
*
* @return Return-keyword node
*/
private STNode parseReturnKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.RETURN_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.RETURN_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse break statement.
* <code>break-stmt := break ; </code>
*
* @return break statement
*/
private STNode parseBreakStatement() {
startContext(ParserRuleContext.BREAK_STATEMENT);
STNode breakKeyword = parseBreakKeyword();
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createBreakStatementNode(breakKeyword, semicolon);
}
/**
* Parse break-keyword.
*
* @return break-keyword node
*/
private STNode parseBreakKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BREAK_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.BREAK_KEYWORD);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse the right hand side of a return statement.
* </p>
* <code>
* return-stmt-rhs := ; | action-or-expr ;
* </code>
*
* @return Parsed node
*/
private STNode parseReturnStatementRhs(STNode returnKeyword) {
STNode expr;
STNode semicolon;
STToken token = peek();
switch (token.kind) {
case SEMICOLON_TOKEN:
expr = STNodeFactory.createEmptyNode();
break;
default:
expr = parseActionOrExpression();
break;
}
semicolon = parseSemicolon();
return STNodeFactory.createReturnStatementNode(returnKeyword, expr, semicolon);
}
/**
* Parse mapping constructor expression.
* <p>
* <code>mapping-constructor-expr := { [field (, field)*] }</code>
*
* @return Parsed node
*/
private STNode parseMappingConstructorExpr() {
startContext(ParserRuleContext.MAPPING_CONSTRUCTOR);
STNode openBrace = parseOpenBrace();
STNode fields = parseMappingConstructorFields();
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createMappingConstructorExpressionNode(openBrace, fields, closeBrace);
}
/**
* Parse mapping constructor fields.
*
* @return Parsed node
*/
private STNode parseMappingConstructorFields() {
List<STNode> fields = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfMappingConstructor(nextToken.kind)) {
return STNodeFactory.createNodeList(fields);
}
STNode mappingFieldEnd = STNodeFactory.createEmptyNode();
STNode field = parseMappingField(ParserRuleContext.FIRST_MAPPING_FIELD, mappingFieldEnd);
fields.add(field);
nextToken = peek();
while (!isEndOfMappingConstructor(nextToken.kind)) {
mappingFieldEnd = parseMappingFieldEnd(nextToken.kind);
if (mappingFieldEnd == null) {
break;
}
field = parseMappingField(ParserRuleContext.MAPPING_FIELD, mappingFieldEnd);
fields.add(field);
nextToken = peek();
}
return STNodeFactory.createNodeList(fields);
}
private STNode parseMappingFieldEnd() {
return parseMappingFieldEnd(peek().kind);
}
private STNode parseMappingFieldEnd(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.MAPPING_FIELD_END);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseMappingFieldEnd(solution.tokenKind);
}
}
private boolean isEndOfMappingConstructor(SyntaxKind tokenKind) {
switch (tokenKind) {
case IDENTIFIER_TOKEN:
return false;
case EOF_TOKEN:
case AT_TOKEN:
case DOCUMENTATION_LINE:
case CLOSE_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case PUBLIC_KEYWORD:
case PRIVATE_KEYWORD:
case FUNCTION_KEYWORD:
case RETURNS_KEYWORD:
case SERVICE_KEYWORD:
case TYPE_KEYWORD:
case LISTENER_KEYWORD:
case CONST_KEYWORD:
case FINAL_KEYWORD:
case RESOURCE_KEYWORD:
return true;
default:
return isSimpleType(tokenKind);
}
}
/**
* Parse mapping constructor field.
* <p>
* <code>field := specific-field | computed-name-field | spread-field</code>
*
* @param fieldContext Context of the mapping field
* @param leadingComma Leading comma
* @return Parsed node
*/
private STNode parseMappingField(ParserRuleContext fieldContext, STNode leadingComma) {
STToken nextToken = peek();
return parseMappingField(nextToken.kind, fieldContext, leadingComma);
}
private STNode parseMappingField(SyntaxKind tokenKind, ParserRuleContext fieldContext, STNode leadingComma) {
switch (tokenKind) {
case IDENTIFIER_TOKEN:
return parseSpecificFieldWithOptionValue(leadingComma);
case STRING_LITERAL:
STNode key = parseStringLiteral();
STNode colon = parseColon();
STNode valueExpr = parseExpression();
return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr);
case OPEN_BRACKET_TOKEN:
return parseComputedField(leadingComma);
case ELLIPSIS_TOKEN:
STNode ellipsis = parseEllipsis();
STNode expr = parseExpression();
return STNodeFactory.createSpreadFieldNode(leadingComma, ellipsis, expr);
case CLOSE_BRACE_TOKEN:
if (fieldContext == ParserRuleContext.FIRST_MAPPING_FIELD) {
return null;
}
default:
STToken token = peek();
Solution solution = recover(token, fieldContext, fieldContext, leadingComma);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseMappingField(solution.tokenKind, fieldContext, leadingComma);
}
}
/**
* Parse mapping constructor specific-field with an optional value.
*
* @param leadingComma
* @return Parsed node
*/
private STNode parseSpecificFieldWithOptionValue(STNode leadingComma) {
STNode key = parseIdentifier(ParserRuleContext.MAPPING_FIELD_NAME);
return parseSpecificFieldRhs(leadingComma, key);
}
private STNode parseSpecificFieldRhs(STNode leadingComma, STNode key) {
STToken nextToken = peek();
return parseSpecificFieldRhs(nextToken.kind, leadingComma, key);
}
private STNode parseSpecificFieldRhs(SyntaxKind tokenKind, STNode leadingComma, STNode key) {
STNode colon;
STNode valueExpr;
switch (tokenKind) {
case COLON_TOKEN:
colon = parseColon();
valueExpr = parseExpression();
break;
case COMMA_TOKEN:
colon = STNodeFactory.createEmptyNode();
valueExpr = STNodeFactory.createEmptyNode();
break;
default:
if (isEndOfMappingConstructor(tokenKind)) {
colon = STNodeFactory.createEmptyNode();
valueExpr = STNodeFactory.createEmptyNode();
break;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.SPECIFIC_FIELD_RHS, leadingComma, key);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseSpecificFieldRhs(solution.tokenKind, leadingComma, key);
}
return STNodeFactory.createSpecificFieldNode(leadingComma, key, colon, valueExpr);
}
/**
* Parse string literal.
*
* @return Parsed node
*/
private STNode parseStringLiteral() {
STToken token = peek();
if (token.kind == SyntaxKind.STRING_LITERAL) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.STRING_LITERAL);
return sol.recoveredNode;
}
}
/**
* Parse colon token.
*
* @return Parsed node
*/
private STNode parseColon() {
STToken token = peek();
if (token.kind == SyntaxKind.COLON_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.COLON);
return sol.recoveredNode;
}
}
/**
* Parse computed-name-field of a mapping constructor expression.
* <p>
* <code>computed-name-field := [ field-name-expr ] : value-expr</code>
*
* @param leadingComma Leading comma
* @return Parsed node
*/
private STNode parseComputedField(STNode leadingComma) {
startContext(ParserRuleContext.COMPUTED_FIELD_NAME);
STNode openBracket = parseOpenBracket();
STNode fieldNameExpr = parseExpression();
STNode closeBracket = parseCloseBracket();
endContext();
STNode colon = parseColon();
STNode valueExpr = parseExpression();
return STNodeFactory.createComputedNameFieldNode(leadingComma, openBracket, fieldNameExpr, closeBracket, colon,
valueExpr);
}
/**
* Parse open bracket.
*
* @return Parsed node
*/
private STNode parseOpenBracket() {
STToken token = peek();
if (token.kind == SyntaxKind.OPEN_BRACKET_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.OPEN_BRACKET);
return sol.recoveredNode;
}
}
/**
* <p>
* Parse compound assignment statement, which takes the following format.
* </p>
* <code>assignment-stmt := lvexpr CompoundAssignmentOperator action-or-expr ;</code>
*
* @return Parsed node
*/
private STNode parseCompoundAssignmentStmt() {
startContext(ParserRuleContext.COMPOUND_ASSIGNMENT_STMT);
STNode varName = parseVariableName();
STNode compoundAssignmentStmt = parseCompoundAssignmentStmtRhs(varName);
endContext();
return compoundAssignmentStmt;
}
/**
* <p>
* Parse the RHS portion of the compound assignment.
* </p>
* <code>compound-assignment-stmt-rhs := CompoundAssignmentOperator action-or-expr ;</code>
*
* @param lvExpr LHS expression
* @return Parsed node
*/
private STNode parseCompoundAssignmentStmtRhs(STNode lvExpr) {
validateLVExpr(lvExpr);
STNode binaryOperator = parseCompoundBinaryOperator();
STNode equalsToken = parseAssignOp();
STNode expr = parseActionOrExpression();
STNode semicolon = parseSemicolon();
return STNodeFactory.createCompoundAssignmentStatementNode(lvExpr, binaryOperator, equalsToken, expr,
semicolon);
}
/**
* Parse compound binary operator.
* <code>BinaryOperator := + | - | * | / | & | | | ^ | << | >> | >>></code>
*
* @return Parsed node
*/
private STNode parseCompoundBinaryOperator() {
STToken token = peek();
if (isCompoundBinaryOperator(token.kind)) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.COMPOUND_BINARY_OPERATOR);
return sol.recoveredNode;
}
}
/**
* Parse service declaration.
* <p>
* <code>
* service-decl := metadata service [variable-name] on expression-list service-body-block
* <br/>
* expression-list := expression (, expression)*
* </code>
*
* @param metadata Metadata
* @return Parsed node
*/
private STNode parseServiceDecl(STNode metadata) {
startContext(ParserRuleContext.SERVICE_DECL);
STNode serviceKeyword = parseServiceKeyword();
STNode serviceDecl = parseServiceRhs(metadata, serviceKeyword);
endContext();
return serviceDecl;
}
/**
* Parse rhs of the service declaration.
* <p>
* <code>
* service-rhs := [variable-name] on expression-list service-body-block
* </code>
*
* @param metadata Metadata
* @param serviceKeyword Service keyword
* @return Parsed node
*/
private STNode parseServiceRhs(STNode metadata, STNode serviceKeyword) {
STNode serviceName = parseServiceName();
STNode onKeyword = parseOnKeyword();
STNode expressionList = parseListeners();
STNode serviceBody = parseServiceBody();
STNode service = STNodeFactory.createServiceDeclarationNode(metadata, serviceKeyword, serviceName, onKeyword,
expressionList, serviceBody);
return service;
}
private STNode parseServiceName() {
STToken nextToken = peek();
return parseServiceName(nextToken.kind);
}
private STNode parseServiceName(SyntaxKind kind) {
switch (kind) {
case IDENTIFIER_TOKEN:
return parseIdentifier(ParserRuleContext.SERVICE_NAME);
case ON_KEYWORD:
return STNodeFactory.createEmptyNode();
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.OPTIONAL_SERVICE_NAME);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseServiceName(solution.tokenKind);
}
}
/**
* Parse service keyword.
*
* @return Parsed node
*/
private STNode parseServiceKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SERVICE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.SERVICE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Check whether the given token kind is a compound binary operator.
* <p>
* <code>compound-binary-operator := + | - | * | / | & | | | ^ | << | >> | >>></code>
*
* @param tokenKind STToken kind
* @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise
*/
private boolean isCompoundBinaryOperator(SyntaxKind tokenKind) {
switch (tokenKind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case SLASH_TOKEN:
case ASTERISK_TOKEN:
case BITWISE_AND_TOKEN:
case BITWISE_XOR_TOKEN:
case PIPE_TOKEN:
return getNextNextToken(tokenKind).kind == SyntaxKind.EQUAL_TOKEN;
default:
return false;
}
}
/**
* Parse on keyword.
*
* @return Parsed node
*/
private STNode parseOnKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ON_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.ON_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse listener references.
* <p>
* <code>expression-list := expression (, expression)*</code>
*
* @return Parsed node
*/
private STNode parseListeners() {
startContext(ParserRuleContext.LISTENERS_LIST);
List<STNode> listeners = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfExpressionsList(nextToken.kind)) {
endContext();
this.errorHandler.reportMissingTokenError("missing expression");
return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
}
STNode leadingComma = STNodeFactory.createEmptyNode();
STNode exprListItem = parseExpressionListItem(leadingComma);
listeners.add(exprListItem);
nextToken = peek();
while (!isEndOfExpressionsList(nextToken.kind)) {
leadingComma = parseComma();
exprListItem = parseExpressionListItem(leadingComma);
listeners.add(exprListItem);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(listeners);
}
private boolean isEndOfExpressionsList(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
return false;
case EOF_TOKEN:
case SEMICOLON_TOKEN:
case CLOSE_BRACKET_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case OPEN_BRACE_TOKEN:
return true;
default:
return !isValidExprStart(tokenKind);
}
}
/**
* Parse expression list item.
*
* @param leadingComma Leading comma
* @return Parsed node
*/
private STNode parseExpressionListItem(STNode leadingComma) {
STNode expr = parseExpression();
return STNodeFactory.createExpressionListItemNode(leadingComma, expr);
}
/**
* Parse service body.
* <p>
* <code>
* service-body-block := { service-method-defn* }
* </code>
*
* @return Parsed node
*/
private STNode parseServiceBody() {
STNode openBrace = parseOpenBrace();
STNode resources = parseResources();
STNode closeBrace = parseCloseBrace();
return STNodeFactory.createServiceBodyNode(openBrace, resources, closeBrace);
}
/**
* Parse service resource definitions.
*
* @return Parsed node
*/
private STNode parseResources() {
List<STNode> resources = new ArrayList<>();
STToken nextToken = peek();
while (!isEndOfServiceDecl(nextToken.kind)) {
STNode serviceMethod = parseResource();
if (serviceMethod == null) {
break;
}
resources.add(serviceMethod);
nextToken = peek();
}
return STNodeFactory.createNodeList(resources);
}
private boolean isEndOfServiceDecl(SyntaxKind tokenKind) {
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
case TYPE_KEYWORD:
case SERVICE_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse resource definition (i.e. service-method-defn).
* <p>
* <code>
* service-body-block := { service-method-defn* }
* <br/>
* service-method-defn := metadata [resource] function identifier function-signature method-defn-body
* </code>
*
* @return Parsed node
*/
private STNode parseResource() {
STToken nextToken = peek();
return parseResource(nextToken.kind);
}
private STNode parseResource(SyntaxKind nextTokenKind) {
STNode metadata;
switch (nextTokenKind) {
case RESOURCE_KEYWORD:
case FUNCTION_KEYWORD:
metadata = createEmptyMetadata();
break;
case DOCUMENTATION_LINE:
case AT_TOKEN:
metadata = parseMetaData(nextTokenKind);
nextTokenKind = peek().kind;
break;
default:
if (isEndOfServiceDecl(nextTokenKind)) {
return null;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseResource(solution.tokenKind);
}
return parseResource(nextTokenKind, metadata);
}
private STNode parseResource(SyntaxKind nextTokenKind, STNode metadata) {
switch (nextTokenKind) {
case RESOURCE_KEYWORD:
STNode resourceKeyword = parseResourceKeyword();
return parseFuncDefinition(metadata, resourceKeyword, false);
case FUNCTION_KEYWORD:
return parseFuncDefinition(metadata, STNodeFactory.createEmptyNode(), false);
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.RESOURCE_DEF, metadata);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseResource(solution.tokenKind, metadata);
}
}
/**
* Parse resource keyword.
*
* @return Parsed node
*/
private STNode parseResourceKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.RESOURCE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.RESOURCE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Check whether next construct is a service declaration or not. This method is
* used to determine whether an end-of-block is reached, if the next token is
* a service-keyword. Because service-keyword can be used in statements as well
* as in top-level node (service-decl). We have reached a service-decl, then
* it could be due to missing close-brace at the end of the current block.
*
* @return <code>true</code> if the next construct is a service declaration.
* <code>false</code> otherwise
*/
private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {
switch (peek(lookahead + 1).kind) {
case IDENTIFIER_TOKEN:
SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;
switch (tokenAfterIdentifier) {
case ON_KEYWORD:
case OPEN_BRACE_TOKEN:
return true;
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
case QUESTION_MARK_TOKEN:
return false;
default:
return false;
}
case ON_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse listener declaration, given the qualifier.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.LISTENER_DECL);
STNode listenerKeyword = parseListenerKeyword();
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse listener keyword.
*
* @return Parsed node
*/
private STNode parseListenerKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LISTENER_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.LISTENER_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse constant declaration, given the qualifier.
* <p>
* <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code>
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.CONSTANT_DECL);
STNode constKeyword = parseConstantKeyword();
STNode constDecl = parseConstDecl(metadata, qualifier, constKeyword);
endContext();
return constDecl;
}
/**
* Parse the components that follows after the const keyword of a constant declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {
STToken nextToken = peek();
return parseConstDeclFromType(nextToken.kind, metadata, qualifier, constKeyword);
}
private STNode parseConstDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,
STNode constKeyword) {
switch (nextTokenKind) {
case ANNOTATION_KEYWORD:
switchContext(ParserRuleContext.ANNOTATION_DECL);
return parseAnnotationDeclaration(metadata, qualifier, constKeyword);
case IDENTIFIER_TOKEN:
return parseConstantDeclWithOptionalType(metadata, qualifier, constKeyword);
default:
if (isTypeStartingToken(nextTokenKind)) {
break;
}
STToken token = peek();
Solution solution =
recover(token, ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseConstDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
private STNode parseConstantDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword) {
STNode varNameOrTypeName = parseStatementStartIdentifier();
STNode constDecl = parseConstantDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName);
return constDecl;
}
/**
* Parse the component that follows the first identifier in a const decl. The identifier
* can be either the type-name (a user defined type) or the var-name there the type-name
* is not present.
*
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @param typeOrVarName Identifier that follows the const-keywoord
* @return Parsed node
*/
private STNode parseConstantDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword, STNode typeOrVarName) {
STToken token = peek();
return parseConstantDeclRhs(token.kind, metadata, qualifier, constKeyword, typeOrVarName);
}
private STNode parseConstantDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,
STNode constKeyword, STNode typeOrVarName) {
STNode type;
STNode variableName;
switch (nextTokenKind) {
case IDENTIFIER_TOKEN:
type = typeOrVarName;
variableName = parseVariableName();
break;
case EQUAL_TOKEN:
variableName = typeOrVarName;
type = STNodeFactory.createEmptyNode();
break;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, constKeyword,
typeOrVarName);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseConstantDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, typeOrVarName);
}
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, type, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse const keyword.
*
* @return Parsed node
*/
private STNode parseConstantKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONST_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.CONST_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse nil type descriptor.
* <p>
* <code>nil-type-descriptor := ( ) </code>
* </p>
*
* @return Parsed node
*/
private STNode parseNilTypeDescriptor() {
startContext(ParserRuleContext.NIL_TYPE_DESCRIPTOR);
STNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createNilTypeDescriptorNode(openParenthesisToken, closeParenthesisToken);
}
/**
* Parse typeof expression.
* <p>
* <code>
* typeof-expr := typeof expression
* </code>
*
* @param isRhsExpr
* @return Typeof expression node
*/
private STNode parseTypeofExpression(boolean isRhsExpr) {
STNode typeofKeyword = parseTypeofKeyword();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);
}
/**
* Parse typeof-keyword.
*
* @return Typeof-keyword node
*/
private STNode parseTypeofKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TYPEOF_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.TYPEOF_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse optional type descriptor.
* <p>
* <code>optional-type-descriptor := type-descriptor ? </code>
* </p>
*
* @return Parsed node
*/
private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {
startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);
STNode questionMarkToken = parseQuestionMark();
endContext();
return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken);
}
/**
* Parse unary expression.
* <p>
* <code>
* unary-expr := + expression | - expression | ~ expression | ! expression
* </code>
*
* @param isRhsExpr
* @return Unary expression node
*/
private STNode parseUnaryExpression(boolean isRhsExpr) {
STNode unaryOperator = parseUnaryOperator();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);
}
/**
* Parse unary operator.
* <code>UnaryOperator := + | - | ~ | !</code>
*
* @return Parsed node
*/
private STNode parseUnaryOperator() {
STToken token = peek();
if (isUnaryOperator(token.kind)) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.UNARY_OPERATOR);
return sol.recoveredNode;
}
}
/**
* Check whether the given token kind is a unary operator.
*
* @param kind STToken kind
* @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise
*/
private boolean isUnaryOperator(SyntaxKind kind) {
switch (kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse array type descriptor.
* <p>
* <code>
* array-type-descriptor := member-type-descriptor [ [ array-length ] ]
* member-type-descriptor := type-descriptor
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* inferred-array-length := *
* </code>
* </p>
*
* @param typeDescriptorNode
*
* @return Parsed Node
*/
private STNode parseArrayTypeDescriptor(STNode typeDescriptorNode) {
startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);
STNode openBracketToken = parseOpenBracket();
STNode arrayLengthNode = parseArrayLength();
STNode closeBracketToken = parseCloseBracket();
endContext();
return STNodeFactory.createIndexedExpressionNode(typeDescriptorNode, openBracketToken, arrayLengthNode,
closeBracketToken);
}
/**
* Parse array length.
* <p>
* <code>
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* constant-reference-expr := variable-reference-expr
* </code>
* </p>
*
* @return Parsed array length
*/
private STNode parseArrayLength() {
STToken token = peek();
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case ASTERISK_TOKEN:
return parseBasicLiteral();
case CLOSE_BRACKET_TOKEN:
return STNodeFactory.createEmptyNode();
case IDENTIFIER_TOKEN:
return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);
default:
Solution sol = recover(token, ParserRuleContext.ARRAY_LENGTH);
return sol.recoveredNode;
}
}
/**
* Parse annotations.
* <p>
* <i>Note: In the ballerina spec ({@link https:
* annotations-list is specified as one-or-more annotations. And the usage is marked as
* optional annotations-list. However, for the consistency of the tree, here we make the
* annotation-list as zero-or-more annotations, and the usage is not-optional.</i>
* <p>
* <code>annots := annotation*</code>
*
* @return Parsed node
*/
private STNode parseAnnotations() {
STToken nextToken = peek();
return parseAnnotations(nextToken.kind);
}
private STNode parseAnnotations(SyntaxKind nextTokenKind) {
startContext(ParserRuleContext.ANNOTATIONS);
List<STNode> annotList = new ArrayList<>();
while (nextTokenKind == SyntaxKind.AT_TOKEN) {
annotList.add(parseAnnotation());
nextTokenKind = peek().kind;
}
endContext();
return STNodeFactory.createNodeList(annotList);
}
/**
* Parse annotation attachment.
* <p>
* <code>annotation := @ annot-tag-reference annot-value</code>
*
* @return Parsed node
*/
private STNode parseAnnotation() {
STNode atToken = parseAtToken();
STNode annotReference;
if (peek().kind != SyntaxKind.IDENTIFIER_TOKEN) {
annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
} else {
annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);
}
STNode annotValue;
if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {
annotValue = parseMappingConstructorExpr();
} else {
annotValue = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);
}
/**
* Parse '@' token.
*
* @return Parsed node
*/
private STNode parseAtToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.AT_TOKEN) {
return consume();
} else {
Solution sol = recover(nextToken, ParserRuleContext.AT);
return sol.recoveredNode;
}
}
/**
* Parse metadata. Meta data consist of optional doc string and
* an annotations list.
* <p>
* <code>metadata := [DocumentationString] annots</code>
*
* @return Parse node
*/
private STNode parseMetaData(SyntaxKind nextTokenKind) {
STNode docString;
STNode annotations;
switch (nextTokenKind) {
case DOCUMENTATION_LINE:
docString = parseDocumentationString();
annotations = parseAnnotations();
break;
case AT_TOKEN:
docString = STNodeFactory.createEmptyNode();
annotations = parseAnnotations(nextTokenKind);
break;
default:
return createEmptyMetadata();
}
return STNodeFactory.createMetadataNode(docString, annotations);
}
/**
* Create empty metadata node.
*
* @return A metadata node with no doc string and no annotations
*/
private STNode createEmptyMetadata() {
return STNodeFactory.createMetadataNode(STNodeFactory.createEmptyNode(),
STNodeFactory.createNodeList(new ArrayList<>()));
}
/**
* Parse is expression.
* <code>
* is-expr := expression is type-descriptor
* </code>
*
* @param lhsExpr Preceding expression of the is expression
* @return Is expression node
*/
private STNode parseTypeTestExpression(STNode lhsExpr) {
STNode isKeyword = parseIsKeyword();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_EXPRESSION);
return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor);
}
/**
* Parse is-keyword.
*
* @return Is-keyword node
*/
private STNode parseIsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IS_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.IS_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse local type definition statement statement.
* <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code>
*
* @return local type definition statement statement
*/
private STNode parseLocalTypeDefinitionStatement(STNode annots) {
startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);
STNode typeKeyword = parseTypeKeyword();
STNode typeName = parseTypeName();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,
semicolon);
}
/**
* Pass statements that starts with an identifier.
*
* @param tokenKind Next token kind
* @return Parsed node
*/
private STNode parseStatementStartsWithIdentifier(STNode annots) {
startContext(ParserRuleContext.STMT_START_WITH_IDENTIFIER);
STNode identifier = parseStatementStartIdentifier();
STToken nextToken = peek();
STNode stmt = parseStatementStartsWithIdentifier(nextToken.kind, annots, identifier);
endContext();
return stmt;
}
private STNode parseStatementStartsWithIdentifier(STNode annots, STNode identifier) {
return parseStatementStartsWithIdentifier(peek().kind, annots, identifier);
}
private STNode parseStatementStartsWithIdentifier(SyntaxKind nextTokenKind, STNode annots, STNode identifier) {
switch (nextTokenKind) {
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
return parseTypeDescStartsWithIdentifier(identifier, annots);
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
return parseStamentStartWithExpr(nextTokenKind, annots, identifier);
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
STToken nextNextToken = peek(2);
if (nextNextToken.kind != SyntaxKind.EQUAL_TOKEN) {
return parseTypeDescStartsWithIdentifier(identifier, annots);
}
default:
if (isCompoundBinaryOperator(nextTokenKind)) {
return parseCompoundAssignmentStmtRhs(identifier);
}
if (isValidExprRhsStart(nextTokenKind)) {
STNode expression = parseActionOrExpressionInLhs(nextTokenKind, identifier);
return parseStamentStartWithExpr(annots, expression);
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_IDENTIFIER, annots, identifier);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseStatementStartsWithIdentifier(solution.tokenKind, annots, identifier);
}
}
private STNode parseTypeDescStartsWithIdentifier(STNode typeDesc, STNode annots) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
startContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false);
endContext();
STNode varName = parseVariableName();
STNode finalKeyword = STNodeFactory.createEmptyNode();
return parseVarDeclRhs(annots, finalKeyword, typeDesc, varName, false);
}
/**
* Parse statement which is only consists of an action or expression.
*
* @param annots Annotations
* @param nextTokenKind Next token kind
* @return Parsed node
*/
private STNode parseExpressionStament(SyntaxKind nextTokenKind, STNode annots) {
startContext(ParserRuleContext.EXPRESSION_STATEMENT);
STNode expression = parseActionOrExpressionInLhs(nextTokenKind);
STNode stmt = getExpressionAsStatement(expression);
endContext();
return stmt;
}
private STNode parseStamentStartWithExpr(SyntaxKind nextTokenKind, STNode annots) {
startContext(ParserRuleContext.EXPRESSION_STATEMENT);
STNode expression = parseActionOrExpressionInLhs(nextTokenKind);
STNode stmt = parseStamentStartWithExpr(annots, expression);
endContext();
return stmt;
}
/**
* Parse statements that starts with an expression.
*
* @param annots Annotations
* @return Parsed node
*/
private STNode parseStamentStartWithExpr(STNode annots, STNode expression) {
STToken nextToken = peek();
return parseStamentStartWithExpr(nextToken.kind, annots, expression);
}
/**
* Parse the component followed by the expression, at the beginning of a statement.
*
* @param nextTokenKind Kind of the next token
* @param annots Annotations
* @return Parsed node
*/
private STNode parseStamentStartWithExpr(SyntaxKind nextTokenKind, STNode annots, STNode expression) {
switch (nextTokenKind) {
case EQUAL_TOKEN:
switchContext(ParserRuleContext.ASSIGNMENT_STMT);
return parseAssignmentStmtRhs(expression);
case SEMICOLON_TOKEN:
return getExpressionAsStatement(expression);
case IDENTIFIER_TOKEN:
if (isPossibleArrayType(expression)) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
STNode varName = parseVariableName();
STNode finalKeyword = STNodeFactory.createEmptyNode();
return parseVarDeclRhs(annots, finalKeyword, expression, varName, false);
}
default:
if (isCompoundBinaryOperator(nextTokenKind)) {
return parseCompoundAssignmentStmtRhs(expression);
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.STMT_START_WITH_EXPR_RHS, annots, expression);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseStamentStartWithExpr(solution.tokenKind, annots, expression);
}
}
private STNode getExpressionAsStatement(STNode expression) {
switch (expression.kind) {
case METHOD_CALL:
case FUNCTION_CALL:
case CHECK_EXPRESSION:
return parseCallStatement(expression);
case REMOTE_METHOD_CALL_ACTION:
case CHECK_ACTION:
case BRACED_ACTION:
case START_ACTION:
case TRAP_ACTION:
case FLUSH_ACTION:
case ASYNC_SEND_ACTION:
case SYNC_SEND_ACTION:
case RECEIVE_SEND_ACTION:
return parseActionStatement(expression);
default:
this.errorHandler.reportInvalidNode(null,
"left hand side of an assignment must be a variable reference");
STNode semicolon = parseSemicolon();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID, expression, semicolon);
}
}
/**
* <p>
* Parse call statement, given the call expression.
* <p>
* <code>
* call-stmt := call-expr ;
* <br/>
* call-expr := function-call-expr | method-call-expr | checking-keyword call-expr
* </code>
*
* @param expression Call expression associated with the call statement
* @return Call statement node
*/
private STNode parseCallStatement(STNode expression) {
validateExprInCallStmt(expression);
STNode semicolon = parseSemicolon();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);
}
private void validateExprInCallStmt(STNode expression) {
switch (expression.kind) {
case FUNCTION_CALL:
case METHOD_CALL:
break;
case CHECK_EXPRESSION:
validateExprInCallStmt(((STCheckExpressionNode) expression).expression);
break;
case REMOTE_METHOD_CALL_ACTION:
break;
case BRACED_EXPRESSION:
validateExprInCallStmt(((STBracedExpressionNode) expression).expression);
break;
default:
if (isMissingNode(expression)) {
break;
}
this.errorHandler.reportInvalidNode(null, "expression followed by the checking keyword must be a " +
"func-call, a method-call or a check-expr");
break;
}
}
/**
* Check whether a node is a missing node.
*
* @param node Node to check
* @return <code>true</code> if the node is a missing node. <code>false</code> otherwise
*/
private boolean isMissingNode(STNode node) {
if (node.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
return isMissingNode(((STSimpleNameReferenceNode) node).name);
}
return node instanceof STMissingToken;
}
private STNode parseActionStatement(STNode action) {
STNode semicolon = parseSemicolon();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);
}
/**
* Parse remote method call action, given the starting expression.
* <p>
* <code>
* remote-method-call-action := expression -> method-name ( arg-list )
* <br/>
* async-send-action := expression -> peer-worker ;
* </code>
*
* @param isRhsExpr Is this an RHS action
* @param expression LHS expression
* @return
*/
private STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) {
STNode rightArrow = parseRightArrow();
return parseRemoteCallOrAsyncSendActionRhs(peek().kind, expression, isRhsExpr, rightArrow);
}
private STNode parseRemoteCallOrAsyncSendActionRhs(SyntaxKind nextTokenKind, STNode expression, boolean isRhsExpr,
STNode rightArrow) {
STNode name;
switch (nextTokenKind) {
case DEFAULT_KEYWORD:
name = parseDefaultKeyword();
return parseAsyncSendAction(expression, rightArrow, name);
case IDENTIFIER_TOKEN:
name = parseFunctionName();
break;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression,
isRhsExpr, rightArrow);
if (solution.action == Action.REMOVE) {
name = solution.recoveredNode;
break;
}
return parseRemoteCallOrAsyncSendActionRhs(solution.tokenKind, expression, isRhsExpr, rightArrow);
}
return parseRemoteCallOrAsyncSendEnd(peek().kind, expression, rightArrow, name);
}
private STNode parseRemoteCallOrAsyncSendEnd(SyntaxKind nextTokenKind, STNode expression, STNode rightArrow,
STNode name) {
switch (nextTokenKind) {
case OPEN_PAREN_TOKEN:
return parseRemoteMethodCallAction(expression, rightArrow, name);
case SEMICOLON_TOKEN:
return parseAsyncSendAction(expression, rightArrow, name);
default:
STToken token = peek();
Solution solution =
recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseRemoteCallOrAsyncSendEnd(solution.tokenKind, expression, rightArrow, name);
}
}
/**
* Parse default keyword.
*
* @return default keyword node
*/
private STNode parseDefaultKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.DEFAULT_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.DEFAULT_KEYWORD);
return sol.recoveredNode;
}
}
private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) {
return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker);
}
private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) {
STNode openParenToken = parseOpenParenthesis(ParserRuleContext.ARG_LIST_START);
STNode arguments = parseArgsList();
STNode closeParenToken = parseCloseParenthesis();
return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments,
closeParenToken);
}
/**
* Parse right arrow (<code>-></code>) token.
*
* @return Parsed node
*/
private STNode parseRightArrow() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {
return consume();
} else {
Solution sol = recover(nextToken, ParserRuleContext.RIGHT_ARROW);
return sol.recoveredNode;
}
}
/**
* Check whether this is a valid lhs expression.
*
* @param tokenKind Kind of the next token
* @return <code>true</code>if this is a start of an expression. <code>false</code> otherwise
*/
private boolean isValidLHSExpression(SyntaxKind tokenKind) {
switch (tokenKind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case STRING_LITERAL:
case IDENTIFIER_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case TYPEOF_KEYWORD:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
return true;
case PLUS_TOKEN:
case MINUS_TOKEN:
return !isCompoundBinaryOperator(tokenKind);
case OPEN_PAREN_TOKEN:
default:
return false;
}
}
/**
* Parse parameterized type descriptor.
* parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter
*
* @return Parsed node
*/
private STNode parseParameterizedTypeDescriptor() {
STNode parameterizedTypeKeyword = parseParameterizedTypeKeyword();
STNode ltToken = parseLTToken();
STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
STNode gtToken = parseGTToken();
return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, ltToken, typeNode,
gtToken);
}
/**
* Parse <code>map</code> or <code>future</code> or <code>typedesc</code> keyword token.
*
* @return Parsed node
*/
private STNode parseParameterizedTypeKeyword() {
STToken nextToken = peek();
switch (nextToken.kind) {
case MAP_KEYWORD:
case FUTURE_KEYWORD:
case TYPEDESC_KEYWORD:
return consume();
default:
Solution sol = recover(nextToken, ParserRuleContext.PARAMETERIZED_TYPE);
return sol.recoveredNode;
}
}
/**
* Parse <code> < </code> token.
*
* @return Parsed node
*/
private STNode parseGTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.GT_TOKEN) {
return consume();
} else {
Solution sol = recover(nextToken, ParserRuleContext.GT);
return sol.recoveredNode;
}
}
/**
* Parse <code> > </code> token.
*
* @return Parsed node
*/
private STNode parseLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
return consume();
} else {
Solution sol = recover(nextToken, ParserRuleContext.LT);
return sol.recoveredNode;
}
}
/**
* Parse nil literal. Here nil literal is only referred to ( ).
*
* @return Parsed node
*/
private STNode parseNilLiteral() {
startContext(ParserRuleContext.NIL_LITERAL);
STNode openParenthesisToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);
}
/**
* Parse annotation declaration, given the qualifier.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {
startContext(ParserRuleContext.ANNOTATION_DECL);
STNode annotationKeyword = parseAnnotationKeyword();
STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);
endContext();
return annotDecl;
}
/**
* Parse annotation keyword.
*
* @return Parsed node
*/
private STNode parseAnnotationKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.ANNOTATION_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse the components that follows after the annotation keyword of a annotation declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @param annotationKeyword
* @return Parsed node
*/
private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STToken nextToken = peek();
return parseAnnotationDeclFromType(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword);
}
private STNode parseAnnotationDeclFromType(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,
STNode constKeyword, STNode annotationKeyword) {
switch (nextTokenKind) {
case IDENTIFIER_TOKEN:
return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);
default:
if (isTypeStartingToken(nextTokenKind)) {
break;
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier,
constKeyword, annotationKeyword);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseAnnotationDeclFromType(solution.tokenKind, metadata, qualifier, constKeyword,
annotationKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
/**
* Parse annotation tag.
* <p>
* <code>annot-tag := identifier</code>
*
* @return
*/
private STNode parseAnnotationTag() {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.ANNOTATION_TAG);
return sol.recoveredNode;
}
}
private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STNode typeDescOrAnnotTag = parseAnnotationTag();
if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,
typeDescOrAnnotTag, annotTag);
}
return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag);
}
/**
* Parse the component that follows the first identifier in an annotation decl. The identifier
* can be either the type-name (a user defined type) or the annot-tag, where the type-name
* is not present.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the annotation decl
* @param constKeyword Const keyword
* @param annotationKeyword Annotation keyword
* @param typeDescOrAnnotTag Identifier that follows the annotation-keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDescOrAnnotTag) {
STToken token = peek();
return parseAnnotationDeclRhs(token.kind, metadata, qualifier, constKeyword, annotationKeyword,
typeDescOrAnnotTag);
}
private STNode parseAnnotationDeclRhs(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,
STNode constKeyword, STNode annotationKeyword, STNode typeDescOrAnnotTag) {
STNode typeDesc;
STNode annotTag;
switch (nextTokenKind) {
case IDENTIFIER_TOKEN:
typeDesc = typeDescOrAnnotTag;
annotTag = parseAnnotationTag();
break;
case SEMICOLON_TOKEN:
case ON_KEYWORD:
typeDesc = STNodeFactory.createEmptyNode();
annotTag = typeDescOrAnnotTag;
break;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword,
annotationKeyword, typeDescOrAnnotTag);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseAnnotationDeclRhs(solution.tokenKind, metadata, qualifier, constKeyword, annotationKeyword,
typeDescOrAnnotTag);
}
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDesc, STNode annotTag) {
STToken nextToken = peek();
return parseAnnotationDeclAttachPoints(nextToken.kind, metadata, qualifier, constKeyword, annotationKeyword,
typeDesc, annotTag);
}
private STNode parseAnnotationDeclAttachPoints(SyntaxKind nextTokenKind, STNode metadata, STNode qualifier,
STNode constKeyword, STNode annotationKeyword, STNode typeDesc,
STNode annotTag) {
STNode onKeyword;
STNode attachPoints;
switch (nextTokenKind) {
case SEMICOLON_TOKEN:
onKeyword = STNodeFactory.createEmptyNode();
attachPoints = STNodeFactory.createEmptyNode();
break;
case ON_KEYWORD:
onKeyword = parseOnKeyword();
attachPoints = parseAnnotationAttachPoints();
break;
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier,
constKeyword, annotationKeyword, typeDesc, annotTag);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseAnnotationDeclAttachPoints(solution.tokenKind, metadata, qualifier, constKeyword,
annotationKeyword, typeDesc, annotTag);
}
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,
typeDesc, annotTag, onKeyword, attachPoints, semicolonToken);
}
/**
* Parse annotation attach points.
* <p>
* <code>
* annot-attach-points := annot-attach-point (, annot-attach-point)*
* <br/><br/>
* annot-attach-point := dual-attach-point | source-only-attach-point
* <br/><br/>
* dual-attach-point := [source] dual-attach-point-ident
* <br/><br/>
* dual-attach-point-ident :=
* [object] type
* | [object|resource] function
* | parameter
* | return
* | service
* | [object|record] field
* <br/><br/>
* source-only-attach-point := source source-only-attach-point-ident
* <br/><br/>
* source-only-attach-point-ident :=
* annotation
* | external
* | var
* | const
* | listener
* | worker
* </code>
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoints() {
startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);
List<STNode> attachPoints = new ArrayList<>();
STToken nextToken = peek();
if (isEndAnnotAttachPointList(nextToken.kind)) {
endContext();
this.errorHandler.reportMissingTokenError("missing attach point");
return STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
}
STNode attachPoint = parseAnnotationAttachPoint();
attachPoints.add(attachPoint);
nextToken = peek();
STNode leadingComma;
while (!isEndAnnotAttachPointList(nextToken.kind)) {
leadingComma = parseAttachPointEnd();
if (leadingComma == null) {
break;
}
attachPoints.add(leadingComma);
attachPoint = parseAnnotationAttachPoint();
if (attachPoint == null) {
this.errorHandler.reportMissingTokenError("missing attach point");
attachPoint = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
attachPoints.add(attachPoint);
break;
}
attachPoints.add(attachPoint);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(attachPoints);
}
/**
* Parse annotation attach point end.
*
* @return Parsed node
*/
private STNode parseAttachPointEnd() {
STToken nextToken = peek();
return parseAttachPointEnd(nextToken.kind);
}
private STNode parseAttachPointEnd(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case SEMICOLON_TOKEN:
return null;
case COMMA_TOKEN:
return consume();
default:
Solution sol = recover(peek(), ParserRuleContext.ATTACH_POINT_END);
if (sol.action == Action.REMOVE) {
return sol.recoveredNode;
}
return sol.tokenKind == SyntaxKind.COMMA_TOKEN ? sol.recoveredNode : null;
}
}
private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse annotation attach point.
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoint() {
return parseAnnotationAttachPoint(peek().kind);
}
private STNode parseAnnotationAttachPoint(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
return null;
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
case SOURCE_KEYWORD:
STNode sourceKeyword = parseSourceKeyword();
return parseAttachPointIdent(sourceKeyword);
case OBJECT_KEYWORD:
case TYPE_KEYWORD:
case RESOURCE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
case RECORD_KEYWORD:
sourceKeyword = STNodeFactory.createEmptyNode();
STNode firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT);
return solution.recoveredNode;
}
}
/**
* Parse source keyword.
*
* @return Parsed node
*/
private STNode parseSourceKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SOURCE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.SOURCE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse attach point ident gievn.
* <p>
* <code>
* source-only-attach-point-ident := annotation | external | var | const | listener | worker
* <br/><br/>
* dual-attach-point-ident := [object] type | [object|resource] function | parameter
* | return | service | [object|record] field
* </code>
*
* @param sourceKeyword Source keyword
* @return Parsed node
*/
private STNode parseAttachPointIdent(STNode sourceKeyword) {
return parseAttachPointIdent(peek().kind, sourceKeyword);
}
private STNode parseAttachPointIdent(SyntaxKind nextTokenKind, STNode sourceKeyword) {
switch (nextTokenKind) {
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
STNode firstIdent = consume();
STNode secondIdent = STNodeFactory.createEmptyNode();
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent);
case OBJECT_KEYWORD:
case RESOURCE_KEYWORD:
case RECORD_KEYWORD:
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
Solution solution = recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
firstIdent = solution.recoveredNode;
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
}
}
/**
* Parse dual-attach-point ident.
*
* @param sourceKeyword Source keyword
* @param firstIdent first part of the dual attach-point
* @return Parsed node
*/
private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {
STNode secondIdent;
switch (firstIdent.kind) {
case OBJECT_KEYWORD:
secondIdent = parseIdentAfterObjectIdent();
break;
case RESOURCE_KEYWORD:
secondIdent = parseFunctionIdent();
break;
case RECORD_KEYWORD:
secondIdent = parseFieldIdent();
break;
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
default:
secondIdent = STNodeFactory.createEmptyNode();
break;
}
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, firstIdent, secondIdent);
}
/**
* Parse the idents that are supported after object-ident.
*
* @return Parsed node
*/
private STNode parseIdentAfterObjectIdent() {
STToken token = peek();
switch (token.kind) {
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case FIELD_KEYWORD:
return consume();
default:
Solution sol = recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);
return sol.recoveredNode;
}
}
/**
* Parse function ident.
*
* @return Parsed node
*/
private STNode parseFunctionIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FUNCTION_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FUNCTION_IDENT);
return sol.recoveredNode;
}
}
/**
* Parse field ident.
*
* @return Parsed node
*/
private STNode parseFieldIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FIELD_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FIELD_IDENT);
return sol.recoveredNode;
}
}
/**
* Parse XML namespace declaration.
* <p>
* <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;
* <br/>
* xml-namespace-uri := simple-const-expr
* <br/>
* xml-namespace-prefix := identifier
* </code>
*
* @return
*/
private STNode parseXMLNamepsaceDeclaration() {
startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);
STNode xmlnsKeyword = parseXMLNSKeyword();
STNode namespaceUri = parseXMLNamespaceUri();
STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri);
endContext();
return xmlnsDecl;
}
/**
* Parse xmlns keyword.
*
* @return Parsed node
*/
private STNode parseXMLNSKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XMLNS_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.XMLNS_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse namespace uri.
*
* @return Parsed node
*/
private STNode parseXMLNamespaceUri() {
STNode expr = parseConstExpr();
switch (expr.kind) {
case STRING_LITERAL:
case IDENTIFIER_TOKEN:
case QUALIFIED_NAME_REFERENCE:
break;
default:
this.errorHandler.reportInvalidNode(null, "namespace uri must be a subtype of string");
}
return expr;
}
private STNode parseConstExpr() {
startContext(ParserRuleContext.CONSTANT_EXPRESSION);
STNode expr = parseConstExprInternal();
endContext();
return expr;
}
private STNode parseConstExprInternal() {
STToken nextToken = peek();
return parseConstExprInternal(nextToken.kind);
}
/**
* Parse constants expr.
*
* @return Parsed node
*/
private STNode parseConstExprInternal(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case STRING_LITERAL:
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
return parseBasicLiteral();
case IDENTIFIER_TOKEN:
return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
case PLUS_TOKEN:
case MINUS_TOKEN:
return parseSignedIntOrFloat();
case OPEN_BRACE_TOKEN:
return parseNilLiteral();
default:
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.CONSTANT_EXPRESSION_START);
return solution.recoveredNode;
}
}
/**
* Parse the portion after the namsepsace-uri of an XML declaration.
*
* @param xmlnsKeyword XMLNS keyword
* @param namespaceUri Namespace URI
* @return Parsed node
*/
private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri) {
return parseXMLDeclRhs(peek().kind, xmlnsKeyword, namespaceUri);
}
private STNode parseXMLDeclRhs(SyntaxKind nextTokenKind, STNode xmlnsKeyword, STNode namespaceUri) {
STNode asKeyword = STNodeFactory.createEmptyNode();
STNode namespacePrefix = STNodeFactory.createEmptyNode();
switch (nextTokenKind) {
case AS_KEYWORD:
asKeyword = parseAsKeyword();
namespacePrefix = parseNamespacePrefix();
break;
case SEMICOLON_TOKEN:
break;
default:
STToken token = peek();
Solution solution =
recover(token, ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseXMLDeclRhs(solution.tokenKind, xmlnsKeyword, namespaceUri);
}
STNode semicolon = parseSemicolon();
return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,
semicolon);
}
/**
* Parse import prefix.
*
* @return Parsed node
*/
private STNode parseNamespacePrefix() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.NAMESPACE_PREFIX);
return sol.recoveredNode;
}
}
/**
* Parse named worker declaration.
* <p>
* <code>named-worker-decl := [annots] worker worker-name return-type-descriptor { sequence-stmt }</code>
*
* @param annots Annotations attached to the worker decl
* @return Parsed node
*/
private STNode parseNamedWorkerDeclaration(STNode annots) {
startContext(ParserRuleContext.NAMED_WORKER_DECL);
STNode workerKeyword = parseWorkerKeyword();
STNode workerName = parseWorkerName();
STNode returnTypeDesc = parseReturnTypeDescriptor();
STNode workerBody = parseBlockNode();
endContext();
return STNodeFactory.createNamedWorkerDeclarationNode(annots, workerKeyword, workerName, returnTypeDesc,
workerBody);
}
private STNode parseReturnTypeDescriptor() {
STToken token = peek();
if (token.kind != SyntaxKind.RETURNS_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
STNode returnsKeyword = consume();
STNode annot = parseAnnotations();
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);
return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);
}
/**
* Parse worker keyword.
*
* @return Parsed node
*/
private STNode parseWorkerKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.WORKER_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse worker name.
* <p>
* <code>worker-name := identifier</code>
*
* @return Parsed node
*/
private STNode parseWorkerName() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
Solution sol = recover(peek(), ParserRuleContext.WORKER_NAME);
return sol.recoveredNode;
}
}
/**
* Parse documentation string.
* <p>
* <code>DocumentationString := DocumentationLine +</code>
* <p>
* Refer {@link BallerinaLexer
*
* @return Parsed node
*/
private STNode parseDocumentationString() {
List<STNode> docLines = new ArrayList<>();
STToken nextToken = peek();
while (nextToken.kind == SyntaxKind.DOCUMENTATION_LINE) {
docLines.add(consume());
nextToken = peek();
}
STNode documentationLines = STNodeFactory.createNodeList(docLines);
return STNodeFactory.createDocumentationStringNode(documentationLines);
}
/**
* Parse lock statement.
* <code>lock-stmt := lock block-stmt ;</code>
*
* @return Lock statement
*/
private STNode parseLockStatement() {
startContext(ParserRuleContext.LOCK_STMT);
STNode lockKeyword = parseLockKeyword();
STNode blockStatement = parseBlockNode();
endContext();
return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement);
}
/**
* Parse lock-keyword.
*
* @return lock-keyword node
*/
private STNode parseLockKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LOCK_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.LOCK_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Checks whether the given expression is a possible array-type-desc.
* <br/>
* i.e.: a member-access-expr, where its container is also a member-access.
* <code>a[b][]</code>
*
* @param expression EXpression to check
* @return <code>true</code> if the expression provided is a possible array-type desc. <code>false</code> otherwise
*/
private boolean isPossibleArrayType(STNode expression) {
switch (expression.kind) {
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
return true;
case INDEXED_EXPRESSION:
return isPossibleArrayType(((STIndexedExpressionNode) expression).containerExpression);
default:
return false;
}
}
/**
* Parse union type descriptor.
* union-type-descriptor := type-descriptor | type-descriptor
*
* @param leftTypeDesc Type desc in the LHS os the union type desc.
* @param context Current context.
* @return parsed union type desc node
*/
private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context) {
STNode pipeToken = parsePipeToken();
STNode rightTypeDesc = parseTypeDescriptor(context);
return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);
}
/**
* Parse pipe token.
*
* @return parsed pipe token node
*/
private STNode parsePipeToken() {
STToken token = peek();
if (token.kind == SyntaxKind.PIPE_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.PIPE);
return sol.recoveredNode;
}
}
private boolean isTypeStartingToken(SyntaxKind nodeKind) {
switch (nodeKind) {
case IDENTIFIER_TOKEN:
case SERVICE_KEYWORD:
case RECORD_KEYWORD:
case OBJECT_KEYWORD:
case ABSTRACT_KEYWORD:
case CLIENT_KEYWORD:
case OPEN_PAREN_TOKEN:
case MAP_KEYWORD:
case FUTURE_KEYWORD:
case TYPEDESC_KEYWORD:
case ERROR_KEYWORD:
case STREAM_KEYWORD:
case TABLE_KEYWORD:
case FUNCTION_KEYWORD:
case OPEN_BRACKET_TOKEN:
return true;
default:
if (isSingletonTypeDescStart(nodeKind, false)) {
return true;
}
return isSimpleType(nodeKind);
}
}
static boolean isSimpleType(SyntaxKind nodeKind) {
switch (nodeKind) {
case INT_KEYWORD:
case FLOAT_KEYWORD:
case DECIMAL_KEYWORD:
case BOOLEAN_KEYWORD:
case STRING_KEYWORD:
case BYTE_KEYWORD:
case XML_KEYWORD:
case JSON_KEYWORD:
case HANDLE_KEYWORD:
case ANY_KEYWORD:
case ANYDATA_KEYWORD:
case NEVER_KEYWORD:
case SERVICE_KEYWORD:
case VAR_KEYWORD:
case ERROR_KEYWORD:
case STREAM_KEYWORD:
case READONLY_KEYWORD:
case DISTINCT_KEYWORD:
return true;
case TYPE_DESC:
return true;
default:
return false;
}
}
private SyntaxKind getTypeSyntaxKind(SyntaxKind typeKeyword) {
switch (typeKeyword) {
case INT_KEYWORD:
return SyntaxKind.INT_TYPE_DESC;
case FLOAT_KEYWORD:
return SyntaxKind.FLOAT_TYPE_DESC;
case DECIMAL_KEYWORD:
return SyntaxKind.DECIMAL_TYPE_DESC;
case BOOLEAN_KEYWORD:
return SyntaxKind.BOOLEAN_TYPE_DESC;
case STRING_KEYWORD:
return SyntaxKind.STRING_TYPE_DESC;
case BYTE_KEYWORD:
return SyntaxKind.BYTE_TYPE_DESC;
case XML_KEYWORD:
return SyntaxKind.XML_TYPE_DESC;
case JSON_KEYWORD:
return SyntaxKind.JSON_TYPE_DESC;
case HANDLE_KEYWORD:
return SyntaxKind.HANDLE_TYPE_DESC;
case ANY_KEYWORD:
return SyntaxKind.ANY_TYPE_DESC;
case ANYDATA_KEYWORD:
return SyntaxKind.ANYDATA_TYPE_DESC;
case NEVER_KEYWORD:
return SyntaxKind.NEVER_TYPE_DESC;
case SERVICE_KEYWORD:
return SyntaxKind.SERVICE_TYPE_DESC;
case VAR_KEYWORD:
return SyntaxKind.VAR_TYPE_DESC;
default:
return SyntaxKind.TYPE_DESC;
}
}
/**
* Parse fork-keyword.
*
* @return Fork-keyword node
*/
private STNode parseForkKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FORK_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FORK_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse multiple named worker declarations.
*
* @return named-worker-declarations node array
*/
private STNode parseMultipleNamedWorkerDeclarations() {
ArrayList<STNode> workers = new ArrayList<>();
while (!isEndOfStatements()) {
STNode stmt = parseStatement();
if (stmt == null) {
break;
}
switch (stmt.kind) {
case NAMED_WORKER_DECLARATION:
workers.add(stmt);
break;
default:
this.errorHandler.reportInvalidNode(null, "Only named-workers are allowed here");
break;
}
}
if (workers.isEmpty()) {
this.errorHandler.reportInvalidNode(null, "Fork Statement must contain atleast one named-worker");
}
STNode namedWorkers = STNodeFactory.createNodeList(workers);
return namedWorkers;
}
/**
* Parse fork statement.
* <code>fork-stmt := fork { named-worker-decl+ }</code>
*
* @return Fork statement
*/
private STNode parseForkStatement() {
startContext(ParserRuleContext.FORK_STMT);
STNode forkKeyword = parseForkKeyword();
STNode openBrace = parseOpenBrace();
STNode namedWorkerDeclarations = parseMultipleNamedWorkerDeclarations();
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);
}
/**
* Parse decimal floating point literal.
*
* @return Parsed node
*/
private STNode parseDecimalFloatingPointLiteral() {
STToken token = peek();
if (token.kind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.DECIMAL_FLOATING_POINT_LITERAL);
return sol.recoveredNode;
}
}
/**
* Parse hex floating point literal.
*
* @return Parsed node
*/
private STNode parseHexFloatingPointLiteral() {
STToken token = peek();
if (token.kind == SyntaxKind.HEX_FLOATING_POINT_LITERAL) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.HEX_FLOATING_POINT_LITERAL);
return sol.recoveredNode;
}
}
/**
* Parse trap expression.
* <p>
* <code>
* trap-expr := trap expression
* </code>
*
* @param allowActions Allow actions
* @param isRhsExpr Whether this is a RHS expression or not
* @return Trap expression node
*/
private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions) {
STNode trapKeyword = parseTrapKeyword();
STNode expr = parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions);
if (isAction(expr)) {
return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr);
}
return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr);
}
/**
* Parse trap-keyword.
*
* @return Trap-keyword node
*/
private STNode parseTrapKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TRAP_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.TRAP_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse list constructor expression.
* <p>
* <code>
* list-constructor-expr := [ [ expr-list ] ]
* <br/>
* expr-list := expression (, expression)*
* </code>
*
* @return Parsed node
*/
private STNode parseListConstructorExpr() {
startContext(ParserRuleContext.LIST_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode expressions = parseOptionalExpressionsList();
STNode closeBracket = parseCloseBracket();
endContext();
return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket);
}
/**
* Parse optional expression list.
*
* @return Parsed node
*/
private STNode parseOptionalExpressionsList() {
List<STNode> expressions = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfExpressionsList(nextToken.kind)) {
return STNodeFactory.createNodeList(new ArrayList<>());
}
STNode expr = parseExpression();
expressions.add(expr);
nextToken = peek();
STNode listConstructorMemberEnd;
while (!isEndOfExpressionsList(nextToken.kind)) {
listConstructorMemberEnd = parseListConstructorMemberEnd(nextToken.kind);
if (listConstructorMemberEnd == null) {
break;
}
expressions.add(listConstructorMemberEnd);
expr = parseExpression();
expressions.add(expr);
nextToken = peek();
}
return STNodeFactory.createNodeList(expressions);
}
private STNode parseListConstructorMemberEnd() {
return parseListConstructorMemberEnd(peek().kind);
}
private STNode parseListConstructorMemberEnd(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
Solution solution = recover(peek(), ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseListConstructorMemberEnd(solution.tokenKind);
}
}
/**
* Parse foreach statement.
* <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt</code>
*
* @return foreach statement
*/
private STNode parseForEachStatement() {
startContext(ParserRuleContext.FOREACH_STMT);
STNode forEachKeyword = parseForEachKeyword();
STNode typedBindingPattern = parseTypedBindingPattern();
STNode inKeyword = parseInKeyword();
STNode actionOrExpr = parseActionOrExpression();
STNode blockStatement = parseBlockNode();
endContext();
return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr,
blockStatement);
}
/**
* Parse foreach-keyword.
*
* @return ForEach-keyword node
*/
private STNode parseForEachKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FOREACH_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FOREACH_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse in-keyword.
*
* @return In-keyword node
*/
private STNode parseInKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IN_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.IN_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse type cast expression.
* <p>
* <code>
* type-cast-expr := < type-cast-param > expression
* <br/>
* type-cast-param := [annots] type-descriptor | annots
* </code>
*
* @return Parsed node
*/
private STNode parseTypeCastExpr(boolean isRhsExpr) {
startContext(ParserRuleContext.TYPE_CAST);
STNode ltToken = parseLTToken();
STNode typeCastParam = parseTypeCastParam();
STNode gtToken = parseGTToken();
endContext();
STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);
}
private STNode parseTypeCastParam() {
STNode annot;
STNode type;
STToken token = peek();
switch (token.kind) {
case AT_TOKEN:
annot = parseAnnotations();
token = peek();
if (isTypeStartingToken(token.kind)) {
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
} else {
type = STNodeFactory.createEmptyNode();
}
break;
default:
annot = STNodeFactory.createEmptyNode();
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
break;
}
return STNodeFactory.createTypeCastParamNode(annot, type);
}
/**
* Parse table constructor expression.
* <p>
* <code>
* table-constructor-expr-rhs := [ [row-list] ]
* </code>
*
* @param tableKeyword tableKeyword that precedes this rhs
* @param keySpecifier keySpecifier that precedes this rhs
* @return Parsed node
*/
private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {
switchContext(ParserRuleContext.TABLE_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode rowList = parseRowList();
STNode closeBracket = parseCloseBracket();
return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,
closeBracket);
}
/**
* Parse table-keyword.
*
* @return Table-keyword node
*/
private STNode parseTableKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TABLE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.TABLE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse table rows.
* <p>
* <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code>
*
* @return Parsed node
*/
private STNode parseRowList() {
List<STNode> mappings = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfTableRowList(nextToken.kind)) {
return STNodeFactory.createNodeList(new ArrayList<>());
}
STNode mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
STNode leadingComma;
while (!isEndOfTableRowList(nextToken.kind)) {
leadingComma = parseComma();
mappings.add(leadingComma);
mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
}
return STNodeFactory.createNodeList(mappings);
}
private boolean isEndOfTableRowList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
case COMMA_TOKEN:
case OPEN_BRACE_TOKEN:
return false;
default:
return isEndOfMappingConstructor(tokenKind);
}
}
/**
* Parse key specifier.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier() {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode keyKeyword = parseKeyKeyword();
STNode openParen = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);
STNode fieldNames = parseFieldNames();
STNode closeParen = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);
}
/**
* Parse key-keyword.
*
* @return Key-keyword node
*/
private STNode parseKeyKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.KEY_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.KEY_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse field names.
* <p>
* <code>field-name-list := [ field-name (, field-name)* ]</code>
*
* @return Parsed node
*/
private STNode parseFieldNames() {
List<STNode> fieldNames = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfFieldNamesList(nextToken.kind)) {
return STNodeFactory.createNodeList(new ArrayList<>());
}
STNode fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
STNode leadingComma;
while (!isEndOfFieldNamesList(nextToken.kind)) {
leadingComma = parseComma();
fieldNames.add(leadingComma);
fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
}
return STNodeFactory.createNodeList(fieldNames);
}
private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case IDENTIFIER_TOKEN:
return false;
default:
return true;
}
}
/**
* Parse error type descriptor.
* <p>
* error-type-descriptor := error [error-type-param]
* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >
* detail-type-descriptor := type-descriptor
* inferred-type-descriptor := *
* </p>
*
* @return Parsed node
*/
private STNode parseErrorTypeDescriptor() {
STNode errorKeywordToken = parseErrorKeyWord();
STNode errorTypeParamsNode;
STToken nextToken = peek();
STToken nextNextToken = peek(2);
if (nextToken.kind == SyntaxKind.LT_TOKEN || nextNextToken.kind == SyntaxKind.GT_TOKEN) {
errorTypeParamsNode = parseErrorTypeParamsNode();
} else {
errorTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode);
}
/**
* Parse error type param node.
* <p>
* error-type-param := < (detail-type-descriptor | inferred-type-descriptor) >
* detail-type-descriptor := type-descriptor
* inferred-type-descriptor := *
* </p>
*
* @return Parsed node
*/
private STNode parseErrorTypeParamsNode() {
STNode ltToken = parseLTToken();
STNode parameter;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {
parameter = consume();
} else {
parameter = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
}
STNode gtToken = parseGTToken();
return STNodeFactory.createErrorTypeParamsNode(ltToken, parameter, gtToken);
}
/**
* Parse error-keyword.
*
* @return Parsed error-keyword node
*/
private STNode parseErrorKeyWord() {
STToken token = peek();
if (token.kind == SyntaxKind.ERROR_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.ERROR_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse stream type descriptor.
* <p>
* stream-type-descriptor := stream [stream-type-parameters]
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type descriptor node
*/
private STNode parseStreamTypeDescriptor() {
STNode streamKeywordToken = parseStreamKeyword();
STNode streamTypeParamsNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
streamTypeParamsNode = parseStreamTypeParamsNode();
} else {
streamTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);
}
/**
* Parse stream type params node.
* <p>
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type params node
*/
private STNode parseStreamTypeParamsNode() {
STNode ltToken = parseLTToken();
startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode leftTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);
endContext();
return streamTypedesc;
}
private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {
return parseStreamTypeParamsNode(peek().kind, ltToken, leftTypeDescNode);
}
private STNode parseStreamTypeParamsNode(SyntaxKind nextTokenKind, STNode ltToken, STNode leftTypeDescNode) {
STNode commaToken, rightTypeDescNode, gtToken;
switch (nextTokenKind) {
case COMMA_TOKEN:
commaToken = parseComma();
rightTypeDescNode = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
break;
case GT_TOKEN:
commaToken = STNodeFactory.createEmptyNode();
rightTypeDescNode = STNodeFactory.createEmptyNode();
break;
default:
Solution solution =
recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseStreamTypeParamsNode(solution.tokenKind, ltToken, leftTypeDescNode);
}
gtToken = parseGTToken();
return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,
gtToken);
}
/**
* Parse stream-keyword.
*
* @return Parsed stream-keyword node
*/
private STNode parseStreamKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.STREAM_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.STREAM_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse let expression.
* <p>
* <code>
* let-expr := let let-var-decl [, let-var-decl]* in expression
* </code>
*
* @return Parsed node
*/
private STNode parseLetExpression(boolean isRhsExpr) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr);
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);
}
/**
* Parse let-keyword.
*
* @return Let-keyword node
*/
private STNode parseLetKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LET_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.LET_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse let variable declarations.
* <p>
* <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code>
*
* @return Parsed node
*/
private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) {
startContext(context);
List<STNode> varDecls = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfLetVarDeclarations(nextToken.kind)) {
endContext();
this.errorHandler.reportMissingTokenError("missing let variable declaration");
return STNodeFactory.createNodeList(varDecls);
}
STNode varDec = parseLetVarDec(isRhsExpr);
varDecls.add(varDec);
nextToken = peek();
STNode leadingComma;
while (!isEndOfLetVarDeclarations(nextToken.kind)) {
leadingComma = parseComma();
varDecls.add(leadingComma);
varDec = parseLetVarDec(isRhsExpr);
varDecls.add(varDec);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(varDecls);
}
private boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case AT_TOKEN:
return false;
case IN_KEYWORD:
return true;
default:
return !isTypeStartingToken(tokenKind);
}
}
/**
* Parse let variable declaration.
* <p>
* <code>let-var-decl := [annots] typed-binding-pattern = expression</code>
*
* @return Parsed node
*/
private STNode parseLetVarDec(boolean isRhsExpr) {
STNode annot = parseAnnotations();
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
STNode varName = parseVariableName();
STNode assign = parseAssignOp();
STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createLetVariableDeclarationNode(annot, type, varName, assign, expression);
}
/**
* Parse raw backtick string template expression.
* <p>
* <code>BacktickString := `expression`</code>
*
* @return Template expression node
*/
private STNode parseTemplateExpression() {
STNode type = STNodeFactory.createEmptyNode();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
private STNode parseTemplateContent() {
List<STNode> items = new ArrayList<>();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
items.add(contentItem);
nextToken = peek();
}
return STNodeFactory.createNodeList(items);
}
private boolean isEndOfBacktickContent(SyntaxKind kind) {
switch (kind) {
case EOF_TOKEN:
case BACKTICK_TOKEN:
return true;
default:
return false;
}
}
private STNode parseTemplateItem() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return parseInterpolation();
}
return consume();
}
/**
* Parse string template expression.
* <p>
* <code>string-template-expr := string ` expression `</code>
*
* @return String template expression node
*/
private STNode parseStringTemplateExpression() {
STNode type = parseStringKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
/**
* Parse <code>string</code> keyword.
*
* @return string keyword node
*/
private STNode parseStringKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.STRING_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.STRING_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse XML template expression.
* <p>
* <code>xml-template-expr := xml BacktickString</code>
*
* @return XML template expression
*/
private STNode parseXMLTemplateExpression() {
STNode xmlKeyword = parseXMLKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContentAsXML();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,
startingBackTick, content, endingBackTick);
}
/**
* Parse <code>xml</code> keyword.
*
* @return xml keyword node
*/
private STNode parseXMLKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XML_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.XML_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse the content of the template string as XML. This method first read the
* input in the same way as the raw-backtick-template (BacktickString). Then
* it parses the content as XML.
*
* @return XML node
*/
private STNode parseTemplateContentAsXML() {
ArrayDeque<STNode> expressions = new ArrayDeque<>();
StringBuilder xmlStringBuilder = new StringBuilder();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {
xmlStringBuilder.append(((STToken) contentItem).text());
} else {
xmlStringBuilder.append("${}");
expressions.add(contentItem);
}
nextToken = peek();
}
TextDocument textDocument = TextDocuments.from(xmlStringBuilder.toString());
AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(textDocument.getCharacterReader()));
XMLParser xmlParser = new XMLParser(tokenReader, expressions);
return xmlParser.parse();
}
/**
* Parse interpolation of a back-tick string.
* <p>
* <code>
* interpolation := ${ expression }
* </code>
*
* @return Interpolation node
*/
private STNode parseInterpolation() {
startContext(ParserRuleContext.INTERPOLATION);
STNode interpolStart = parseInterpolationStart();
STNode expr = parseExpression();
removeAdditionalTokensInInterpolation();
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);
}
/**
* Parse interpolation start token.
* <p>
* <code>interpolation-start := ${</code>
*
* @return Interpolation start token
*/
private STNode parseInterpolationStart() {
STToken token = peek();
if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);
return sol.recoveredNode;
}
}
/**
* Remove if there any tokens left after the expression inside the interpolation.
*/
private void removeAdditionalTokensInInterpolation() {
while (true) {
STToken nextToken = peek();
switch (nextToken.kind) {
case EOF_TOKEN:
return;
case CLOSE_BRACE_TOKEN:
return;
default:
consume();
this.errorHandler.reportInvalidNode(nextToken, "invalid token '" + nextToken.text() + "'");
}
}
}
/**
* Parse back-tick token.
*
* @return Back-tick token
*/
private STNode parseBacktickToken(ParserRuleContext ctx) {
STToken token = peek();
if (token.kind == SyntaxKind.BACKTICK_TOKEN) {
return consume();
} else {
Solution sol = recover(token, ctx);
return sol.recoveredNode;
}
}
/**
* Parse table type descriptor.
* <p>
* table-type-descriptor := table row-type-parameter [key-constraint]
* row-type-parameter := type-parameter
* key-constraint := key-specifier | key-type-constraint
* key-specifier := key ( [ field-name (, field-name)* ] )
* key-type-constraint := key type-parameter
* </p>
*
* @return Parsed table type desc node.
*/
private STNode parseTableTypeDescriptor() {
STNode tableKeywordToken = parseTableKeyword();
STNode rowTypeParameterNode = parseRowTypeParameter();
STNode keyConstraintNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.KEY_KEYWORD) {
STNode keyKeywordToken = parseKeyKeyword();
keyConstraintNode = parseKeyConstraint(keyKeywordToken);
} else {
keyConstraintNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);
}
/**
* Parse row type parameter node.
* <p>
* row-type-parameter := type-parameter
* </p>
*
* @return Parsed node.
*/
private STNode parseRowTypeParameter() {
startContext(ParserRuleContext.ROW_TYPE_PARAM);
STNode rowTypeParameterNode = parseTypeParameter();
endContext();
return rowTypeParameterNode;
}
/**
* Parse type parameter node.
* <p>
* type-parameter := < type-descriptor >
* </p>
*
* @return Parsed node
*/
private STNode parseTypeParameter() {
STNode ltToken = parseLTToken();
STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
STNode gtToken = parseGTToken();
return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);
}
/**
* Parse key constraint.
* <p>
* key-constraint := key-specifier | key-type-constraint
* </p>
*
* @return Parsed node.
*/
private STNode parseKeyConstraint(STNode keyKeywordToken) {
return parseKeyConstraint(peek().kind, keyKeywordToken);
}
private STNode parseKeyConstraint(SyntaxKind nextTokenKind, STNode keyKeywordToken) {
switch (nextTokenKind) {
case OPEN_PAREN_TOKEN:
return parseKeySpecifier(keyKeywordToken);
case LT_TOKEN:
return parseKeyTypeConstraint(keyKeywordToken);
default:
Solution solution = recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseKeyConstraint(solution.tokenKind, keyKeywordToken);
}
}
/**
* Parse key specifier given parsed key keyword token.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier(STNode keyKeywordToken) {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode openParenToken = parseOpenParenthesis(ParserRuleContext.OPEN_PARENTHESIS);
STNode fieldNamesNode = parseFieldNames();
STNode closeParenToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);
}
/**
* Parse key type constraint.
* <p>
* key-type-constraint := key type-parameter
* </p>
*
* @return Parsed node
*/
private STNode parseKeyTypeConstraint(STNode keyKeywordToken) {
STNode typeParameterNode = parseTypeParameter();
return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);
}
/**
* Parse function type descriptor.
* <p>
* <code>function-type-descriptor := function function-signature</code>
*
* @return Function type descriptor node
*/
private STNode parseFunctionTypeDesc() {
startContext(ParserRuleContext.FUNC_TYPE_DESC);
STNode functionKeyword = parseFunctionKeyword();
STNode signature = parseFuncSignature(true);
endContext();
return STNodeFactory.createFunctionTypeDescriptorNode(functionKeyword, signature);
}
/**
* Parse explicit anonymous function expression.
* <p>
* <code>explicit-anonymous-function-expr := [annots] function function-signature anon-func-body</code>
*
* @param annots Annotations.
* @return Anonymous function expression node
*/
private STNode parseExplicitFunctionExpression(STNode annots) {
startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);
STNode funcKeyword = parseFunctionKeyword();
STNode funcSignature = parseFuncSignature(false);
STNode funcBody = parseAnonFuncBody();
return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, funcKeyword, funcSignature,
funcBody);
}
/**
* Parse anonymous function body.
* <p>
* <code>anon-func-body := block-function-body | expr-function-body</code>
*
* @return
*/
private STNode parseAnonFuncBody() {
return parseAnonFuncBody(peek().kind);
}
private STNode parseAnonFuncBody(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case OPEN_BRACE_TOKEN:
case EOF_TOKEN:
STNode body = parseFunctionBodyBlock(true);
endContext();
return body;
case RIGHT_DOUBLE_ARROW:
endContext();
return parseExpressionFuncBody(true);
default:
Solution solution = recover(peek(), ParserRuleContext.ANON_FUNC_BODY);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseAnonFuncBody(solution.tokenKind);
}
}
/**
* Parse expression function body.
* <p>
* <code>expr-function-body := => expression</code>
*
* @return Expression function body node
*/
private STNode parseExpressionFuncBody(boolean isAnon) {
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode expression = parseExpression();
STNode semiColon;
if (isAnon) {
semiColon = STNodeFactory.createEmptyNode();
} else {
semiColon = parseSemicolon();
}
return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon);
}
/**
* Parse '=>' token.
*
* @return Double right arrow token
*/
private STNode parseDoubleRightArrow() {
STToken token = peek();
if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.EXPR_FUNC_BODY_START);
return sol.recoveredNode;
}
}
private STNode parseImplicitAnonFunc(STNode params) {
switch (params.kind) {
case SIMPLE_NAME_REFERENCE:
case INFER_PARAM_LIST:
break;
case BRACED_EXPRESSION:
params = getAnonFuncParam((STBracedExpressionNode) params);
break;
default:
this.errorHandler.reportInvalidNode(null, "lhs must be an identifier or a param list");
}
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode expression = parseExpression();
return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression);
}
/**
* Create a new anon-func-param node from a braced expression.
*
* @param params Braced expression
* @return Anon-func param node
*/
private STNode getAnonFuncParam(STBracedExpressionNode params) {
List<STNode> paramList = new ArrayList<>();
paramList.add(params.expression);
return STNodeFactory.createImplicitAnonymousFunctionParameters(params.openParen,
STNodeFactory.createNodeList(paramList), params.closeParen);
}
/**
* Parse implicit anon function expression.
*
* @param openParen Open parenthesis token
* @param firstParam First parameter
* @return Implicit anon function expression node
*/
private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam) {
List<STNode> paramList = new ArrayList<>();
paramList.add(firstParam);
STToken nextToken = peek();
STNode paramEnd;
STNode param;
while (!isEndOfAnonFuncParametersList(nextToken.kind)) {
paramEnd = parseImplicitAnonFuncParamEnd(nextToken.kind);
if (paramEnd == null) {
break;
}
paramList.add(paramEnd);
param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM);
paramList.add(param);
nextToken = peek();
}
STNode params = STNodeFactory.createNodeList(paramList);
STNode closeParen = parseCloseParenthesis();
endContext();
STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);
return parseImplicitAnonFunc(inferedParams);
}
private STNode parseImplicitAnonFuncParamEnd() {
return parseImplicitAnonFuncParamEnd(peek().kind);
}
private STNode parseImplicitAnonFuncParamEnd(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_PAREN_TOKEN:
return null;
default:
Solution solution = recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseImplicitAnonFuncParamEnd(solution.tokenKind);
}
}
private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case RETURNS_KEYWORD:
case TYPE_KEYWORD:
case LISTENER_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case OPEN_BRACE_TOKEN:
case RIGHT_DOUBLE_ARROW:
return true;
default:
return false;
}
}
/**
* Parse tuple type descriptor.
* <p>
* <code>tuple-type-descriptor := [ tuple-member-type-descriptors ]
* <br/><br/>
* tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]
* | [ tuple-rest-descriptor ]
* <br/><br/>
* tuple-rest-descriptor := type-descriptor ...
* </code>
*
* @return
*/
private STNode parseTupleTypeDesc() {
STNode openBracket = parseOpenBracket();
startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);
STNode memberTypeDesc = parseTupleMemberTypeDescList();
STNode restTypeDesc = parseTupleRestTypeDesc();
STNode closeBracket = parseCloseBracket();
endContext();
return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, restTypeDesc, closeBracket);
}
/**
* Parse tuple member type descriptors.
*
* @return Parsed node
*/
private STNode parseTupleMemberTypeDescList() {
List<STNode> typeDescList = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfTypeList(nextToken.kind)) {
this.errorHandler.reportMissingTokenError("missing type-desc");
return STNodeFactory.createNodeList(new ArrayList<>());
}
STNode typeDesc = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_TUPLE);
typeDescList.add(typeDesc);
nextToken = peek();
STNode tupleMemberRhs;
while (!isEndOfTypeList(nextToken.kind)) {
tupleMemberRhs = parseTupleMemberRhs(nextToken.kind);
if (tupleMemberRhs == null) {
break;
}
typeDescList.add(tupleMemberRhs);
typeDesc = parseTypeDescriptorInternal(ParserRuleContext.TYPE_DESC_IN_TUPLE);
typeDescList.add(typeDesc);
nextToken = peek();
}
return STNodeFactory.createNodeList(typeDescList);
}
private STNode parseTupleMemberRhs() {
return parseTupleMemberRhs(peek().kind);
}
private STNode parseTupleMemberRhs(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
Solution solution = recover(peek(), ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseTupleMemberRhs(solution.tokenKind);
}
}
private boolean isEndOfTypeList(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case CLOSE_BRACKET_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case EOF_TOKEN:
case EQUAL_TOKEN:
case OPEN_BRACE_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
private STNode parseTupleRestTypeDesc() {
return STNodeFactory.createEmptyNode();
}
/**
* Parse table constructor or query expression.
* <p>
* <code>
* table-constructor-or-query-expr := table-constructor-expr | query-expr
* <br/>
* table-constructor-expr := table [key-specifier] [ [row-list] ]
* <br/>
* query-expr := [query-construct-type] query-pipeline select-clause
* <br/>
* query-construct-type := table key-specifier | stream
* </code>
*
* @return Parsed node
*/
private STNode parseTableConstructorOrQuery(boolean isRhsExpr) {
startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);
STNode tableOrQueryExpr = parseTableConstructorOrQuery(peek().kind, isRhsExpr);
endContext();
return tableOrQueryExpr;
}
private STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, boolean isRhsExpr) {
STNode queryConstructType;
switch (nextTokenKind) {
case FROM_KEYWORD:
queryConstructType = STNodeFactory.createEmptyNode();
return parseQueryExprRhs(queryConstructType, isRhsExpr);
case STREAM_KEYWORD:
queryConstructType = parseStreamKeyword();
return parseQueryExprRhs(queryConstructType, isRhsExpr);
case TABLE_KEYWORD:
STNode tableKeyword = parseTableKeyword();
return parseTableConstructorOrQuery(tableKeyword, isRhsExpr);
default:
Solution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseTableConstructorOrQuery(solution.tokenKind, isRhsExpr);
}
}
private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) {
return parseTableConstructorOrQuery(peek().kind, tableKeyword, isRhsExpr);
}
private STNode parseTableConstructorOrQuery(SyntaxKind nextTokenKind, STNode tableKeyword, boolean isRhsExpr) {
STNode keySpecifier;
switch (nextTokenKind) {
case OPEN_BRACKET_TOKEN:
keySpecifier = STNodeFactory.createEmptyNode();
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
case KEY_KEYWORD:
keySpecifier = parseKeySpecifier();
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);
default:
Solution solution = recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseTableConstructorOrQuery(solution.tokenKind, tableKeyword, isRhsExpr);
}
}
private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) {
return parseTableConstructorOrQueryRhs(peek().kind, tableKeyword, keySpecifier, isRhsExpr);
}
private STNode parseTableConstructorOrQueryRhs(SyntaxKind nextTokenKind, STNode tableKeyword, STNode keySpecifier,
boolean isRhsExpr) {
switch (nextTokenKind) {
case FROM_KEYWORD:
return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr);
case OPEN_BRACKET_TOKEN:
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
default:
Solution solution = recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword,
keySpecifier, isRhsExpr);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseTableConstructorOrQueryRhs(solution.tokenKind, tableKeyword, keySpecifier, isRhsExpr);
}
}
/**
* Parse query construct type.
* <p>
* <code>query-construct-type := table key-specifier</code>
*
* @return Parsed node
*/
private STNode parseQueryConstructType(STNode tableKeyword, STNode keySpecifier) {
return STNodeFactory.createQueryConstructTypeNode(tableKeyword, keySpecifier);
}
/**
* Parse query expression.
* <p>
* <code>
* query-expr-rhs := query-pipeline select-clause
* <br/>
* query-pipeline := from-clause intermediate-clause*
* </code>
*
* @param queryConstructType queryConstructType that precedes this rhs
* @return Parsed node
*/
private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) {
switchContext(ParserRuleContext.QUERY_EXPRESSION);
STNode fromClause = parseFromClause(isRhsExpr);
List<STNode> clauses = new ArrayList<>();
boolean hasReachedSelectClause = false;
STNode intermediateClause;
STNode selectClause = null;
while (!isEndOfIntermediateClause(peek().kind)) {
intermediateClause = parseIntermediateClause(isRhsExpr);
if (!hasReachedSelectClause) {
if (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) {
selectClause = intermediateClause;
hasReachedSelectClause = true;
} else {
clauses.add(intermediateClause);
}
} else {
this.errorHandler.reportMissingTokenError("extra clauses after select clause");
}
}
if (!hasReachedSelectClause) {
selectClause = parseSelectClause(isRhsExpr);
}
STNode intermediateClauses = STNodeFactory.createNodeList(clauses);
STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);
return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause);
}
/**
* Parse an intermediate clause.
* <p>
* <code>
* intermediate-clause := from-clause | where-clause | let-clause
* </code>
*
* @return Parsed node
*/
private STNode parseIntermediateClause(boolean isRhsExpr) {
return parseIntermediateClause(peek().kind, isRhsExpr);
}
private STNode parseIntermediateClause(SyntaxKind nextTokenKind, boolean isRhsExpr) {
switch (nextTokenKind) {
case FROM_KEYWORD:
return parseFromClause(isRhsExpr);
case WHERE_KEYWORD:
return parseWhereClause(isRhsExpr);
case LET_KEYWORD:
return parseLetClause(isRhsExpr);
case SELECT_KEYWORD:
return parseSelectClause(isRhsExpr);
default:
Solution solution = recover(peek(), ParserRuleContext.QUERY_EXPRESSION_RHS, isRhsExpr);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseIntermediateClause(solution.tokenKind, isRhsExpr);
}
}
private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) {
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case EOF_TOKEN:
case RESOURCE_KEYWORD:
case LISTENER_KEYWORD:
case DOCUMENTATION_LINE:
case PRIVATE_KEYWORD:
case RETURNS_KEYWORD:
case SERVICE_KEYWORD:
case TYPE_KEYWORD:
case CONST_KEYWORD:
case FINAL_KEYWORD:
return true;
default:
return isValidExprRhsStart(tokenKind);
}
}
/**
* Parse from clause.
* <p>
* <code>from-clause := from typed-binding-pattern in expression</code>
*
* @return Parsed node
*/
private STNode parseFromClause(boolean isRhsExpr) {
STNode fromKeyword = parseFromKeyword();
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
STNode varName = parseVariableName();
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createFromClauseNode(fromKeyword, type, varName, inKeyword, expression);
}
/**
* Parse from-keyword.
*
* @return From-keyword node
*/
private STNode parseFromKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FROM_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FROM_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse where clause.
* <p>
* <code>where-clause := where expression</code>
*
* @return Parsed node
*/
private STNode parseWhereClause(boolean isRhsExpr) {
STNode whereKeyword = parseWhereKeyword();
STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createWhereClauseNode(whereKeyword, expression);
}
/**
* Parse where-keyword.
*
* @return Where-keyword node
*/
private STNode parseWhereKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WHERE_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.WHERE_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse let clause.
* <p>
* <code>let-clause := let let-var-decl [, let-var-decl]* </code>
*
* @return Parsed node
*/
private STNode parseLetClause(boolean isRhsExpr) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr);
return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);
}
/**
* Parse select clause.
* <p>
* <code>select-clause := select expression</code>
*
* @return Parsed node
*/
private STNode parseSelectClause(boolean isRhsExpr) {
STNode selectKeyword = parseSelectKeyword();
STNode expression = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false);
return STNodeFactory.createSelectClauseNode(selectKeyword, expression);
}
/**
* Parse select-keyword.
*
* @return Select-keyword node
*/
private STNode parseSelectKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SELECT_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.SELECT_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse start action.
* <p>
* <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code>
*
* @return Start action node
*/
private STNode parseStartAction(STNode annots) {
STNode startKeyword = parseStartKeyword();
STNode expr = parseActionOrExpression();
validateExprInStartAction(expr);
return STNodeFactory.createStartActionNode(startKeyword, expr);
}
/**
* Parse start keyword.
*
* @return Start keyword node
*/
private STNode parseStartKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.START_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.START_KEYWORD);
return sol.recoveredNode;
}
}
private void validateExprInStartAction(STNode expression) {
switch (expression.kind) {
case FUNCTION_CALL:
case METHOD_CALL:
case REMOTE_METHOD_CALL_ACTION:
break;
default:
if (isMissingNode(expression)) {
break;
}
this.errorHandler.reportInvalidNode(null, "expression followed by the start keyword must be a " +
"func-call, a method-call or a remote-method-call");
break;
}
}
/**
* Parse flush action.
* <p>
* <code>flush-action := flush [peer-worker]</code>
*
* @return flush action node
*/
private STNode parseFlushAction() {
STNode flushKeyword = parseFlushKeyword();
STNode peerWorker = parseOptionalPeerWorkerName();
return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker);
}
/**
* Parse flush keyword.
*
* @return flush keyword node
*/
private STNode parseFlushKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FLUSH_KEYWORD) {
return consume();
} else {
Solution sol = recover(token, ParserRuleContext.FLUSH_KEYWORD);
return sol.recoveredNode;
}
}
/**
* Parse peer worker.
* <p>
* <code>peer-worker := worker-name | default</code>
*
* @return peer worker name node
*/
private STNode parseOptionalPeerWorkerName() {
STToken token = peek();
switch (token.kind) {
case IDENTIFIER_TOKEN:
case DEFAULT_KEYWORD:
return STNodeFactory.createSimpleNameReferenceNode(consume());
default:
return STNodeFactory.createEmptyNode();
}
}
/**
* Parse intersection type descriptor.
* <p>
* intersection-type-descriptor := type-descriptor & type-descriptor
* </p>
*
* @return Parsed node
*/
private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context) {
STNode bitwiseAndToken = consume();
STNode rightTypeDesc = parseTypeDescriptor(context);
return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc);
}
/**
* Parse singleton type descriptor.
* <p>singleton-type-descriptor := simple-const-expr
* simple-const-expr :=
* nil-literal
* | boolean-literal
* | [Sign] int-literal
* | [Sign] floating-point-literal
* | string-literal
* | constant-reference-expr</p>
*/
private STNode parseSingletonTypeDesc() {
STNode simpleContExpr = parseConstExpr();
return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr);
}
private STNode parseSignedIntOrFloat() {
STNode operator = parseUnaryOperator();
STNode literal;
STToken nextToken = peek();
switch (nextToken.kind) {
case HEX_INTEGER_LITERAL:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
literal = consume();
break;
default:
literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL);
}
return STNodeFactory.createUnaryExpressionNode(operator, literal);
}
private boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) {
STToken nextToken = peek();
STToken nextNextToken, nextNextNextToken;
if (tokenKind != nextToken.kind) {
nextNextToken = nextToken;
nextNextNextToken = peek(2);
} else {
nextNextToken = peek(2);
nextNextNextToken = peek(3);
}
switch (tokenKind) {
case STRING_LITERAL:
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
if (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) {
return true;
}
return false;
case PLUS_TOKEN:
case MINUS_TOKEN:
if (inTypeDescCtx) {
return true;
}
if (isIntOrFloat(nextNextToken) && nextNextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return true;
}
default:
return false;
}
}
static boolean isIntOrFloat(STToken token) {
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
return true;
default:
return false;
}
}
private boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) {
switch (token.kind) {
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
case OPEN_PAREN_TOKEN:
case OPEN_BRACKET_TOKEN:
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse binding-patterns.
*
* binding-pattern :=
* capture-binding-pattern
* | wildcard-binding-pattern
* | list-binding-pattern
* | mapping-binding-pattern
* | functional-binding-pattern
* capture-binding-pattern := variable-name
* variable-name := identifier
* wildcard-binding-pattern := _
* list-binding-pattern := [ list-member-binding-patterns ]
* list-member-binding-patterns :=
* binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* mapping-binding-pattern := { field-binding-patterns }
* field-binding-patterns :=
* field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* field-binding-pattern :=
* field-name : binding-pattern
* | variable-name
* rest-binding-pattern := ... variable-name
* functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )
* arg-list-binding-pattern :=
* positional-arg-binding-patterns [, other-arg-binding-patterns]
* | other-arg-binding-patterns
* positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*
* positional-arg-binding-pattern := binding-pattern
* other-arg-binding-patterns :=
* named-arg-binding-patterns [, rest-binding-pattern]
* | [rest-binding-pattern]
* named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*
* named-arg-binding-pattern := arg-name = binding-pattern
*
* @return binding-pattern node
*/
private STNode parseBindingPattern() {
STToken token = peek();
switch (token.kind) {
case OPEN_BRACKET_TOKEN:
return parseListBindingPattern();
case IDENTIFIER_TOKEN:
return parseCaptureBindingPattern();
default:
Solution sol = recover(token, ParserRuleContext.BINDING_PATTERN);
return sol.recoveredNode;
}
}
/**
* Parse capture-binding-pattern.
*
* capture-binding-pattern := variable-name
* variable-name := identifier
*
* @return capture-binding-pattern node
*/
private STNode parseCaptureBindingPattern() {
STToken token = peek();
switch (token.kind) {
case IDENTIFIER_TOKEN:
STNode varName = parseVariableName();
return STNodeFactory.createCaptureBindingPatternNode(varName);
default:
Solution sol = recover(token, ParserRuleContext.CAPTURE_BINDING_PATTERN);
return sol.recoveredNode;
}
}
/**
* Parse list-binding-patterns.
*
* list-binding-pattern := [ list-member-binding-patterns ]
* list-member-binding-patterns :=
* binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
*
* @return list-binding-pattern node
*/
private STNode parseListBindingPattern() {
startContext(ParserRuleContext.LIST_BINDING_PATTERN);
ArrayList<STNode> bindingPatterns = new ArrayList<>();
STNode openBracket = parseOpenBracket();
STNode listBindingPatternMember = parselistBindingPatternMember();
bindingPatterns.add(listBindingPatternMember);
STToken token = peek();
STNode listBindingPatternRhs = null;
while (!isEndOfListBindingPattern(token.kind) &&
listBindingPatternMember.kind != SyntaxKind.REST_BINDING_PATTERN) {
listBindingPatternRhs = parseListBindingpatternRhs(token.kind);
if (listBindingPatternRhs == null) {
break;
}
bindingPatterns.add(listBindingPatternRhs);
listBindingPatternMember = parselistBindingPatternMember();
bindingPatterns.add(listBindingPatternMember);
token = peek();
}
STNode closeBracket = parseCloseBracket();
STNode restBindingPattern = null;
if (listBindingPatternMember.kind == SyntaxKind.REST_BINDING_PATTERN) {
restBindingPattern = bindingPatterns.remove(bindingPatterns.size() - 1);
} else {
restBindingPattern = STNodeFactory.createEmptyNode();
}
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);
endContext();
return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, restBindingPattern,
closeBracket);
}
private STNode parseListBindingpatternRhs() {
return parseListBindingpatternRhs(peek().kind);
}
private STNode parseListBindingpatternRhs(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
Solution solution = recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_END_OR_CONTINUE);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseListBindingpatternRhs(solution.tokenKind);
}
}
private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case IN_KEYWORD:
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case ASTERISK_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse rest-binding-pattern.
*
* rest-binding-pattern := ... variable-name
*
* @return rest-binding-pattern node
*/
private STNode parseRestBindingPattern() {
STToken token = peek();
switch (token.kind) {
case ELLIPSIS_TOKEN:
startContext(ParserRuleContext.REST_BINDING_PATTERN);
STNode ellipsis = parseEllipsis();
STNode varName = parseVariableName();
endContext();
return STNodeFactory.createRestBindingPatternNode(ellipsis, varName);
default:
Solution sol = recover(token, ParserRuleContext.REST_BINDING_PATTERN);
return sol.recoveredNode;
}
}
/**
* Parse list-binding-pattern entry.
*
* list-binding-pattern := [ list-member-binding-patterns ]
* list-member-binding-patterns :=
* binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
*
* @return rest-binding-pattern node
*/
private STNode parselistBindingPatternMember() {
STToken token = peek();
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case ASTERISK_TOKEN:
return consume();
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
default:
return parseBindingPattern();
}
}
/**
* Parse Typed-binding-pattern.
*
* <code>typed-binding-pattern := inferable-type-descriptor binding-pattern</code>
* <code>inferable-type-descriptor := type-descriptor | var</code>
*
* @return Fork statement
*/
private STNode parseTypedBindingPattern() {
startContext(ParserRuleContext.TYPED_BINDING_PATTERN);
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
STNode bindingPattern = null;
if (peek().kind == SyntaxKind.OPEN_BRACKET_TOKEN) {
STNode typedBindingPattern = parseArrayTypeDescOrListBindingPattern(typeDesc);
endContext();
return typedBindingPattern;
}
bindingPattern = parseBindingPattern();
endContext();
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
}
private STNode parseArrayTypeDescOrListBindingPattern(STNode typeDesc) {
STNode arrayDescOrListBindingPattern = parseListBindingPattern();
if (isListBindingPatternDefinitively(arrayDescOrListBindingPattern) ||
isFollowTypedBindingPattern(peek().kind)) {
return STNodeFactory.createTypedBindingPatternNode(typeDesc,
validateListBindingPattern(arrayDescOrListBindingPattern));
}
typeDesc = mergeTypeDescAndListBindingPattern(typeDesc, arrayDescOrListBindingPattern);
if (peek().kind == SyntaxKind.OPEN_BRACKET_TOKEN) {
return parseArrayTypeDescOrListBindingPattern(typeDesc);
}
return STNodeFactory.createTypedBindingPatternNode(typeDesc, parseBindingPattern());
}
private STNode mergeTypeDescAndListBindingPattern(STNode typeDesc, STNode bindingPattern) {
STListBindingPatternNode listBindingPattern = (STListBindingPatternNode) bindingPattern;
STNodeList childArray = (STNodeList) listBindingPattern.bindingPatterns;
STNode child = childArray.childInBucket(0);
if (child.kind == SyntaxKind.CAPTURE_BINDING_PATTERN) {
child = ((STCaptureBindingPatternNode) child).variableName;
}
return STNodeFactory.createIndexedExpressionNode(typeDesc, listBindingPattern.openBracket, child,
listBindingPattern.closeBracket);
}
private STNode validateListBindingPattern(STNode bindingPattern) {
STListBindingPatternNode listBindingPattern = (STListBindingPatternNode) bindingPattern;
STNodeList childArray = (STNodeList) listBindingPattern.bindingPatterns;
int numberOfChildren = childArray.bucketCount();
ArrayList<STNode> cleanedChildren = new ArrayList<>();
STNode child = null;
for (int i = 0; i < numberOfChildren; i++) {
child = childArray.childInBucket(i);
switch (child.kind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case ASTERISK_TOKEN:
this.errorHandler.reportInvalidNode(null,
"invalid Node " + child.kind + " in list-binding-pattern");
cleanedChildren.add(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));
break;
default:
cleanedChildren.add(child);
break;
}
}
return STNodeFactory.createListBindingPatternNode(listBindingPattern.openBracket,
STNodeFactory.createNodeList(cleanedChildren), listBindingPattern.restBindingPattern,
listBindingPattern.closeBracket);
}
private boolean isListBindingPatternDefinitively(STNode bindingPattern) {
STListBindingPatternNode listBindingPattern = (STListBindingPatternNode) bindingPattern;
STNodeList childArray = (STNodeList) listBindingPattern.bindingPatterns;
int numberOfChildren = childArray.bucketCount();
if (listBindingPattern.restBindingPattern != null) {
return true;
}
if (numberOfChildren == 1) {
STNode child = childArray.childInBucket(0);
switch (child.kind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case ASTERISK_TOKEN:
case CAPTURE_BINDING_PATTERN:
return false;
default:
return true;
}
} else {
return true;
}
}
/**
* Check if this could be typed-binding-pattern follow components.
*
* @return Boolean
*/
private Boolean isFollowTypedBindingPattern(SyntaxKind tokenKind) {
switch (tokenKind) {
case IN_KEYWORD:
case EOF_TOKEN:
return true;
default:
return false;
}
}
/**
* Check whether the parser reached to a valid expression start.
*
* @param kind Kind of the next immediate token.
* @param nextTokenIndex Index to the next token.
* @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise
*/
private boolean isValidExpressionStart(SyntaxKind kind, int nextTokenIndex) {
switch (kind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case STRING_LITERAL:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
case IDENTIFIER_TOKEN:
return isValidExprRhsStart(peek(nextTokenIndex).kind);
case OPEN_PAREN_TOKEN:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case OPEN_BRACE_TOKEN:
case TYPEOF_KEYWORD:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
case TRAP_KEYWORD:
case OPEN_BRACKET_TOKEN:
case LT_TOKEN:
case TABLE_KEYWORD:
case STREAM_KEYWORD:
case FROM_KEYWORD:
case ERROR_KEYWORD:
case LET_KEYWORD:
case BACKTICK_TOKEN:
case XML_KEYWORD:
case STRING_KEYWORD:
case FUNCTION_KEYWORD:
case NEW_KEYWORD:
case LEFT_ARROW_TOKEN:
return true;
case PLUS_TOKEN:
case MINUS_TOKEN:
return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex + 1);
case START_KEYWORD:
case FLUSH_KEYWORD:
default:
return false;
}
}
/**
* Parse sync send action.
* <p>
* <code>sync-send-action := expression ->> peer-worker</code>
*
* @param expression LHS expression of the sync send action
* @return Sync send action node
*/
private STNode parseSyncSendAction(STNode expression) {
STNode syncSendToken = parseSyncSendToken();
STNode peerWorker = parsePeerWorkerName();
return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker);
}
/**
* Parse peer worker.
* <p>
* <code>peer-worker := worker-name | default</code>
*
* @return peer worker name node
*/
private STNode parsePeerWorkerName() {
STToken token = peek();
switch (token.kind) {
case IDENTIFIER_TOKEN:
case DEFAULT_KEYWORD:
return STNodeFactory.createSimpleNameReferenceNode(consume());
default:
Solution sol = recover(token, ParserRuleContext.PEER_WORKER_NAME);
return sol.recoveredNode;
}
}
/**
* Parse sync send token.
* <p>
* <code>sync-send-token := ->> </code>
*
* @return sync send token
*/
private STNode parseSyncSendToken() {
STToken token = peek();
switch (token.kind) {
case SYNC_SEND_TOKEN:
return consume();
default:
Solution sol = recover(token, ParserRuleContext.SYNC_SEND_TOKEN);
return sol.recoveredNode;
}
}
/**
* Parse receive action.
* <p>
* <code>receive-action := single-receive-action | multiple-receive-action</code>
*
* @return Receive action
*/
private STNode parseReceiveAction() {
STNode leftArrow = parseLeftArrowToken();
STNode receiveWorkers = parseReceiveWorkers();
return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers);
}
private STNode parseReceiveWorkers() {
return parseReceiveWorkers(peek().kind);
}
private STNode parseReceiveWorkers(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case DEFAULT_KEYWORD:
case IDENTIFIER_TOKEN:
return parsePeerWorkerName();
case OPEN_BRACE_TOKEN:
return parseMultipleReceiveWorkers();
default:
Solution solution = recover(peek(), ParserRuleContext.RECEIVE_WORKERS);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseReceiveWorkers(solution.tokenKind);
}
}
/**
* Parse multiple worker receivers.
* <p>
* <code>{ receive-field (, receive-field)* }</code>
*
* @return Multiple worker receiver node
*/
private STNode parseMultipleReceiveWorkers() {
startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS);
STNode openBrace = parseOpenBrace();
STNode receiveFields = parseReceiveFields();
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace);
}
private STNode parseReceiveFields() {
List<STNode> receiveFields = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfReceiveFields(nextToken.kind)) {
this.errorHandler.reportMissingTokenError("missing receive field");
return STNodeFactory.createNodeList(new ArrayList<>());
}
STNode receiveField = parseReceiveField();
receiveFields.add(receiveField);
nextToken = peek();
STNode recieveFieldEnd;
while (!isEndOfTypeList(nextToken.kind)) {
recieveFieldEnd = parseReceiveFieldEnd(nextToken.kind);
if (recieveFieldEnd == null) {
break;
}
receiveFields.add(recieveFieldEnd);
receiveField = parseReceiveField();
receiveFields.add(receiveField);
nextToken = peek();
}
return STNodeFactory.createNodeList(receiveFields);
}
private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
default:
return false;
}
}
private STNode parseReceiveFieldEnd(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_PAREN_TOKEN:
return null;
default:
Solution solution = recover(peek(), ParserRuleContext.RECEIVE_FIELD_END);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
return parseReceiveFieldEnd(solution.tokenKind);
}
}
private STNode parseReceiveField() {
return parseReceiveField(peek().kind);
}
/**
* Parse receive field.
* <p>
* <code>receive-field := peer-worker | field-name : peer-worker</code>
*
* @param nextTokenKind Kind of the next token
* @return Receiver field node
*/
private STNode parseReceiveField(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case DEFAULT_KEYWORD:
return parseDefaultKeyword();
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME);
return createQualifiedReceiveField(identifier);
default:
Solution solution = recover(peek(), ParserRuleContext.RECEIVE_FIELD);
if (solution.action == Action.REMOVE) {
return solution.recoveredNode;
}
if (solution.tokenKind == SyntaxKind.IDENTIFIER_TOKEN) {
return createQualifiedReceiveField(solution.recoveredNode);
}
return solution.recoveredNode;
}
}
private STNode createQualifiedReceiveField(STNode identifier) {
if (peek().kind != SyntaxKind.COLON_TOKEN) {
return identifier;
}
STNode colon = parseColon();
STNode peerWorker = parsePeerWorkerName();
return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker);
}
/**
*
* Parse left arrow (<-) token.
*
* @return left arrow token
*/
private STNode parseLeftArrowToken() {
STToken token = peek();
switch (token.kind) {
case LEFT_ARROW_TOKEN:
return consume();
default:
Solution sol = recover(token, ParserRuleContext.LEFT_ARROW_TOKEN);
return sol.recoveredNode;
}
}
/**
* Parse double-GT token.
*
* @return Parsed node
*/
private STNode parseDoubleGTToken() {
STNode openGTToken = parseGTToken();
reportInvalidShiftOperator(openGTToken);
STNode endLGToken = parseGTToken();
return STNodeFactory.createDoubleGTTokenNode(openGTToken, endLGToken);
}
/**
* Parse tripple-GT token.
*
* @return Parsed node
*/
/**
* Report invalid double-GT and tripple-GT tokens.
*
* @param node Preceding node
*/
private void reportInvalidShiftOperator(STNode node) {
int diff = node.widthWithTrailingMinutiae() - node.width();
if (diff > 0) {
this.errorHandler.reportMissingTokenError("no whitespaces allowed between >>");
}
}
} |
Moreover, if the credential is required, we can move the validation up to buildClient() | public FormRecognizerClientBuilder credential(AzureKeyCredential apiKeyCredential) {
this.credential = Objects.requireNonNull(apiKeyCredential, "'apiKeyCredential' cannot be null.");
return this;
} | this.credential = Objects.requireNonNull(apiKeyCredential, "'apiKeyCredential' cannot be null."); | public FormRecognizerClientBuilder credential(AzureKeyCredential apiKeyCredential) {
this.credential = Objects.requireNonNull(apiKeyCredential, "'apiKeyCredential' cannot be null.");
return this;
} | class FormRecognizerClientBuilder {
private static final String ECHO_REQUEST_ID_HEADER = "x-ms-return-client-request-id";
private static final String CONTENT_TYPE_HEADER_VALUE = ContentType.APPLICATION_JSON;
private static final String ACCEPT_HEADER = "Accept";
private static final String FORM_RECOGNIZER_PROPERTIES = "azure-ai-formrecognizer.properties";
private static final String NAME = "name";
private static final String VERSION = "version";
private static final RetryPolicy DEFAULT_RETRY_POLICY = new RetryPolicy("retry-after-ms", ChronoUnit.MILLIS);
private final ClientLogger logger = new ClientLogger(FormRecognizerClientBuilder.class);
private final List<HttpPipelinePolicy> policies;
private final HttpHeaders headers;
private final String clientName;
private final String clientVersion;
private String endpoint;
private AzureKeyCredential credential;
private HttpClient httpClient;
private HttpLogOptions httpLogOptions;
private HttpPipeline httpPipeline;
private Configuration configuration;
private RetryPolicy retryPolicy;
private FormRecognizerServiceVersion version;
static final String OCP_APIM_SUBSCRIPTION_KEY = "Ocp-Apim-Subscription-Key";
static final Duration DEFAULT_DURATION = Duration.ofSeconds(5);
/**
* The constructor with defaults.
*/
public FormRecognizerClientBuilder() {
policies = new ArrayList<>();
httpLogOptions = new HttpLogOptions();
Map<String, String> properties = CoreUtils.getProperties(FORM_RECOGNIZER_PROPERTIES);
clientName = properties.getOrDefault(NAME, "UnknownName");
clientVersion = properties.getOrDefault(VERSION, "UnknownVersion");
headers = new HttpHeaders()
.put(ECHO_REQUEST_ID_HEADER, "true")
.put(ACCEPT_HEADER, CONTENT_TYPE_HEADER_VALUE);
}
/**
* Creates a {@link FormRecognizerClient} based on options set in the builder. Every time
* {@code buildClient()} is called a new instance of {@link FormRecognizerClient} is created.
*
* <p>
* If {@link
* {@link
* settings are ignored
* </p>
*
* @return A FormRecognizerClient with the options set from the builder.
* @throws NullPointerException if {@link
* {@link
* @throws IllegalArgumentException if {@link
*/
public FormRecognizerClient buildClient() {
return new FormRecognizerClient(buildAsyncClient());
}
/**
* Creates a {@link FormRecognizerAsyncClient} based on options set in the builder. Every time
* {@code buildAsyncClient()} is called a new instance of {@link FormRecognizerAsyncClient} is created.
*
* <p>
* If {@link
* {@link
* settings are ignored.
* </p>
*
* @return A FormRecognizerAsyncClient with the options set from the builder.
* @throws NullPointerException if {@link
* {@link
* @throws IllegalArgumentException if {@link
*/
public FormRecognizerAsyncClient buildAsyncClient() {
Objects.requireNonNull(endpoint, "'Endpoint' is required and can not be null.");
final Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration().clone() : configuration;
final FormRecognizerServiceVersion serviceVersion =
version != null ? version : FormRecognizerServiceVersion.getLatest();
HttpPipeline pipeline = httpPipeline;
if (pipeline == null) {
pipeline = getDefaultHttpPipeline(buildConfiguration);
}
final FormRecognizerClientImpl formRecognizerAPI = new FormRecognizerClientImplBuilder()
.endpoint(endpoint)
.pipeline(pipeline)
.build();
return new FormRecognizerAsyncClient(formRecognizerAPI, serviceVersion);
}
private HttpPipeline getDefaultHttpPipeline(Configuration buildConfiguration) {
final List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy(httpLogOptions.getApplicationId(), clientName, clientVersion,
buildConfiguration));
policies.add(new RequestIdPolicy());
policies.add(new AddHeadersPolicy(headers));
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(retryPolicy == null ? DEFAULT_RETRY_POLICY : retryPolicy);
policies.add(new AddDatePolicy());
if (credential != null) {
policies.add(new AzureKeyCredentialPolicy(OCP_APIM_SUBSCRIPTION_KEY, credential));
} else {
throw logger.logExceptionAsError(
new IllegalArgumentException("Missing credential information while building a client."));
}
policies.addAll(this.policies);
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.httpClient(httpClient)
.build();
}
/**
* Sets the service endpoint for the Azure Form Recognizer instance.
*
* @param endpoint The URL of the Azure Form Recognizer instance service requests to and receive responses from.
*
* @return The updated FormRecognizerClientBuilder object.
* @throws NullPointerException if {@code endpoint} is null
* @throws IllegalArgumentException if {@code endpoint} cannot be parsed into a valid URL.
*/
public FormRecognizerClientBuilder endpoint(String endpoint) {
Objects.requireNonNull(endpoint, "'endpoint' cannot be null.");
try {
new URL(endpoint);
} catch (MalformedURLException ex) {
throw logger.logExceptionAsWarning(new IllegalArgumentException("'endpoint' must be a valid URL.", ex));
}
if (endpoint.endsWith("/")) {
this.endpoint = endpoint.substring(0, endpoint.length() - 1);
} else {
this.endpoint = endpoint;
}
return this;
}
/**
* Sets the {@link AzureKeyCredential} to use when authenticating HTTP requests for this FormRecognizerClientBuilder.
*
* @param apiKeyCredential {@link AzureKeyCredential} API key credential
*
* @return The updated FormRecognizerClientBuilder object.
* @throws NullPointerException If {@code apiKeyCredential} is {@code null}
*/
/**
* Sets the logging configuration for HTTP requests and responses.
*
* <p>If {@code logOptions} isn't provided, the default options will use {@link HttpLogDetailLevel
* which will prevent logging.</p>
*
* @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses.
*
* @return The updated FormRecognizerClientBuilder object.
*/
public FormRecognizerClientBuilder httpLogOptions(HttpLogOptions logOptions) {
this.httpLogOptions = logOptions;
return this;
}
/**
* Adds a policy to the set of existing policies that are executed after required policies.
*
* @param policy The retry policy for service requests.
*
* @return The updated FormRecognizerClientBuilder object.
* @throws NullPointerException If {@code policy} is {@code null}.
*/
public FormRecognizerClientBuilder addPolicy(HttpPipelinePolicy policy) {
policies.add(Objects.requireNonNull(policy, "'policy' cannot be null."));
return this;
}
/**
* Sets the HTTP client to use for sending and receiving requests to and from the service.
*
* @param client The HTTP client to use for requests.
*
* @return The updated FormRecognizerClientBuilder object.
*/
public FormRecognizerClientBuilder httpClient(HttpClient client) {
if (this.httpClient != null && client == null) {
logger.info("HttpClient is being set to 'null' when it was previously configured.");
}
this.httpClient = client;
return this;
}
/**
* Sets the HTTP pipeline to use for the service client.
* <p>
* If {@code pipeline} is set, all other settings are ignored, aside from
* {@link FormRecognizerClientBuilder
* {@link FormRecognizerClient}.
*
* @param httpPipeline The HTTP pipeline to use for sending service requests and receiving responses.
*
* @return The updated FormRecognizerClientBuilder object.
*/
public FormRecognizerClientBuilder pipeline(HttpPipeline httpPipeline) {
if (this.httpPipeline != null && httpPipeline == null) {
logger.info("HttpPipeline is being set to 'null' when it was previously configured.");
}
this.httpPipeline = httpPipeline;
return this;
}
/**
* Sets the configuration store that is used during construction of the service client.
* <p>
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
*
* @return The updated FormRecognizerClientBuilder object.
*/
public FormRecognizerClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets the {@link RetryPolicy
* <p>
* The default retry policy will be used if not provided {@link FormRecognizerClientBuilder
* to build {@link FormRecognizerAsyncClient} or {@link FormRecognizerClient}.
*
* @param retryPolicy user's retry policy applied to each request.
*
* @return The updated FormRecognizerClientBuilder object.
*/
public FormRecognizerClientBuilder retryPolicy(RetryPolicy retryPolicy) {
this.retryPolicy = retryPolicy;
return this;
}
/**
* Sets the {@link FormRecognizerServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version the client library will have the result of potentially moving to a newer service version.
*
* @param version {@link FormRecognizerServiceVersion} of the service to be used when making requests.
*
* @return The updated FormRecognizerClientBuilder object.
*/
public FormRecognizerClientBuilder serviceVersion(FormRecognizerServiceVersion version) {
this.version = version;
return this;
}
} | class FormRecognizerClientBuilder {
private static final String ECHO_REQUEST_ID_HEADER = "x-ms-return-client-request-id";
private static final String CONTENT_TYPE_HEADER_VALUE = ContentType.APPLICATION_JSON;
private static final String ACCEPT_HEADER = "Accept";
private static final String FORM_RECOGNIZER_PROPERTIES = "azure-ai-formrecognizer.properties";
private static final String NAME = "name";
private static final String VERSION = "version";
private static final RetryPolicy DEFAULT_RETRY_POLICY = new RetryPolicy("retry-after-ms", ChronoUnit.MILLIS);
private final ClientLogger logger = new ClientLogger(FormRecognizerClientBuilder.class);
private final List<HttpPipelinePolicy> policies;
private final HttpHeaders headers;
private final String clientName;
private final String clientVersion;
private String endpoint;
private AzureKeyCredential credential;
private HttpClient httpClient;
private HttpLogOptions httpLogOptions;
private HttpPipeline httpPipeline;
private Configuration configuration;
private RetryPolicy retryPolicy;
private FormRecognizerServiceVersion version;
static final String OCP_APIM_SUBSCRIPTION_KEY = "Ocp-Apim-Subscription-Key";
static final Duration DEFAULT_DURATION = Duration.ofSeconds(5);
/**
* The constructor with defaults.
*/
public FormRecognizerClientBuilder() {
policies = new ArrayList<>();
httpLogOptions = new HttpLogOptions();
Map<String, String> properties = CoreUtils.getProperties(FORM_RECOGNIZER_PROPERTIES);
clientName = properties.getOrDefault(NAME, "UnknownName");
clientVersion = properties.getOrDefault(VERSION, "UnknownVersion");
headers = new HttpHeaders()
.put(ECHO_REQUEST_ID_HEADER, "true")
.put(ACCEPT_HEADER, CONTENT_TYPE_HEADER_VALUE);
}
/**
* Creates a {@link FormRecognizerClient} based on options set in the builder. Every time
* {@code buildClient()} is called a new instance of {@link FormRecognizerClient} is created.
*
* <p>
* If {@link
* {@link
* settings are ignored
* </p>
*
* @return A FormRecognizerClient with the options set from the builder.
* @throws NullPointerException if {@link
* {@link
* @throws IllegalArgumentException if {@link
*/
public FormRecognizerClient buildClient() {
return new FormRecognizerClient(buildAsyncClient());
}
/**
* Creates a {@link FormRecognizerAsyncClient} based on options set in the builder. Every time
* {@code buildAsyncClient()} is called a new instance of {@link FormRecognizerAsyncClient} is created.
*
* <p>
* If {@link
* {@link
* settings are ignored.
* </p>
*
* @return A FormRecognizerAsyncClient with the options set from the builder.
* @throws NullPointerException if {@link
* has not been set.
* @throws IllegalArgumentException if {@link
*/
public FormRecognizerAsyncClient buildAsyncClient() {
Objects.requireNonNull(endpoint, "'Endpoint' is required and can not be null.");
final Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration().clone() : configuration;
final FormRecognizerServiceVersion serviceVersion =
version != null ? version : FormRecognizerServiceVersion.getLatest();
HttpPipeline pipeline = httpPipeline;
if (pipeline == null) {
pipeline = getDefaultHttpPipeline(buildConfiguration);
}
final FormRecognizerClientImpl formRecognizerAPI = new FormRecognizerClientImplBuilder()
.endpoint(endpoint)
.pipeline(pipeline)
.build();
return new FormRecognizerAsyncClient(formRecognizerAPI, serviceVersion);
}
private HttpPipeline getDefaultHttpPipeline(Configuration buildConfiguration) {
final List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy(httpLogOptions.getApplicationId(), clientName, clientVersion,
buildConfiguration));
policies.add(new RequestIdPolicy());
policies.add(new AddHeadersPolicy(headers));
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(retryPolicy == null ? DEFAULT_RETRY_POLICY : retryPolicy);
policies.add(new AddDatePolicy());
if (credential != null) {
policies.add(new AzureKeyCredentialPolicy(OCP_APIM_SUBSCRIPTION_KEY, credential));
} else {
throw logger.logExceptionAsError(
new IllegalArgumentException("Missing credential information while building a client."));
}
policies.addAll(this.policies);
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.httpClient(httpClient)
.build();
}
/**
* Sets the service endpoint for the Azure Form Recognizer instance.
*
* @param endpoint The URL of the Azure Form Recognizer instance service requests to and receive responses from.
*
* @return The updated FormRecognizerClientBuilder object.
* @throws NullPointerException if {@code endpoint} is null
* @throws IllegalArgumentException if {@code endpoint} cannot be parsed into a valid URL.
*/
public FormRecognizerClientBuilder endpoint(String endpoint) {
Objects.requireNonNull(endpoint, "'endpoint' cannot be null.");
try {
new URL(endpoint);
} catch (MalformedURLException ex) {
throw logger.logExceptionAsWarning(new IllegalArgumentException("'endpoint' must be a valid URL.", ex));
}
if (endpoint.endsWith("/")) {
this.endpoint = endpoint.substring(0, endpoint.length() - 1);
} else {
this.endpoint = endpoint;
}
return this;
}
/**
* Sets the {@link AzureKeyCredential} to use when authenticating HTTP requests for this
* FormRecognizerClientBuilder.
*
* @param apiKeyCredential {@link AzureKeyCredential} API key credential
*
* @return The updated FormRecognizerClientBuilder object.
* @throws NullPointerException If {@code apiKeyCredential} is {@code null}
*/
/**
* Sets the logging configuration for HTTP requests and responses.
*
* <p>If {@code logOptions} isn't provided, the default options will use {@link HttpLogDetailLevel
* which will prevent logging.</p>
*
* @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses.
*
* @return The updated FormRecognizerClientBuilder object.
*/
public FormRecognizerClientBuilder httpLogOptions(HttpLogOptions logOptions) {
this.httpLogOptions = logOptions;
return this;
}
/**
* Adds a policy to the set of existing policies that are executed after required policies.
*
* @param policy The retry policy for service requests.
*
* @return The updated FormRecognizerClientBuilder object.
* @throws NullPointerException If {@code policy} is {@code null}.
*/
public FormRecognizerClientBuilder addPolicy(HttpPipelinePolicy policy) {
policies.add(Objects.requireNonNull(policy, "'policy' cannot be null."));
return this;
}
/**
* Sets the HTTP client to use for sending and receiving requests to and from the service.
*
* @param client The HTTP client to use for requests.
*
* @return The updated FormRecognizerClientBuilder object.
*/
public FormRecognizerClientBuilder httpClient(HttpClient client) {
if (this.httpClient != null && client == null) {
logger.info("HttpClient is being set to 'null' when it was previously configured.");
}
this.httpClient = client;
return this;
}
/**
* Sets the HTTP pipeline to use for the service client.
* <p>
* If {@code pipeline} is set, all other settings are ignored, aside from
* {@link FormRecognizerClientBuilder
* {@link FormRecognizerClient}.
*
* @param httpPipeline The HTTP pipeline to use for sending service requests and receiving responses.
*
* @return The updated FormRecognizerClientBuilder object.
*/
public FormRecognizerClientBuilder pipeline(HttpPipeline httpPipeline) {
if (this.httpPipeline != null && httpPipeline == null) {
logger.info("HttpPipeline is being set to 'null' when it was previously configured.");
}
this.httpPipeline = httpPipeline;
return this;
}
/**
* Sets the configuration store that is used during construction of the service client.
* <p>
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
*
* @return The updated FormRecognizerClientBuilder object.
*/
public FormRecognizerClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets the {@link RetryPolicy
* <p>
* The default retry policy will be used if not provided {@link FormRecognizerClientBuilder
* to build {@link FormRecognizerAsyncClient} or {@link FormRecognizerClient}.
*
* @param retryPolicy user's retry policy applied to each request.
*
* @return The updated FormRecognizerClientBuilder object.
*/
public FormRecognizerClientBuilder retryPolicy(RetryPolicy retryPolicy) {
this.retryPolicy = retryPolicy;
return this;
}
/**
* Sets the {@link FormRecognizerServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version the client library will have the result of potentially moving to a newer service version.
*
* @param version {@link FormRecognizerServiceVersion} of the service to be used when making requests.
*
* @return The updated FormRecognizerClientBuilder object.
*/
public FormRecognizerClientBuilder serviceVersion(FormRecognizerServiceVersion version) {
this.version = version;
return this;
}
} |
`baseMessage` could be returned here instead of retrieving it from the super class again. | public String getMessage() {
String baseMessage = super.getMessage();
if (this.errorCodeValue == null) {
return super.getMessage();
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "ErrorCodeValue",
errorCodeValue);
}
if (this.target == null) {
return baseMessage;
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "target", target);
}
return baseMessage;
} | return super.getMessage(); | public String getMessage() {
StringBuilder baseMessage = new StringBuilder().append(super.getMessage()).append(" ").append(ERROR_CODE)
.append(": {").append(errorCodeValue).append("}");
if (this.target == null) {
return baseMessage.toString();
} else {
return baseMessage.append(", ").append(TARGET).append(": {").append(target).append("}").toString();
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the String value of TextAnalyticsErrorCode for this exception.
*
* @return The String value of TextAnalyticsErrorCode for this exception.
*/
public String getErrorCodeValue() {
return errorCodeValue;
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private static final String ERROR_CODE = "ErrorCodeValue";
private static final String TARGET = "target";
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
*
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the TextAnalyticsErrorCode for this exception.
*
* @return The TextAnalyticsErrorCode for this exception.
*/
public TextAnalyticsErrorCode getErrorCodeValue() {
return TextAnalyticsErrorCode.fromString(errorCodeValue);
}
} |
duplicated code could probably be moved to a method | String bindUpdate(Class<?> clazz, String query, Map<String, Object> params) {
String bindUpdate = bindQuery(clazz, query, params);
boolean containsOperator = false;
for (String operator : UPDATE_OPERATORS) {
if (bindUpdate.contains(operator)) {
containsOperator = true;
}
}
if (!containsOperator) {
bindUpdate = "{'$set':" + bindUpdate + "}";
}
LOGGER.debug(bindUpdate);
return bindUpdate;
} | boolean containsOperator = false; | String bindUpdate(Class<?> clazz, String query, Map<String, Object> params) {
String bindUpdate = bindQuery(clazz, query, params);
if (!containsUpdateOperator(query)) {
bindUpdate = "{'$set':" + bindUpdate + "}";
}
LOGGER.debug(bindUpdate);
return bindUpdate;
} | class ReactiveMongoOperations<QueryType, UpdateType> {
public final String ID = "_id";
private static final Logger LOGGER = Logger.getLogger(ReactiveMongoOperations.class);
private static final List<String> UPDATE_OPERATORS = Arrays.asList(
"$currentDate", "$inc", "$min", "$max", "$mul", "$rename", "$set", "$setOnInsert", "$unset");
private static final Map<String, String> defaultDatabaseName = new ConcurrentHashMap<>();
protected abstract QueryType createQuery(ReactiveMongoCollection collection, Document query, Document sortDoc);
protected abstract UpdateType createUpdate(ReactiveMongoCollection<?> collection, Class<?> entityClass,
Document docUpdate);
protected abstract Uni<?> list(QueryType query);
protected abstract Multi<?> stream(QueryType query);
public Uni<Void> persist(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
return persist(collection, entity);
}
public Uni<Void> persist(Iterable<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = new ArrayList<>();
for (Object entity : entities) {
objects.add(entity);
}
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persist(collection, objects);
}
return nullUni();
});
}
public Uni<Void> persist(Object firstEntity, Object... entities) {
ReactiveMongoCollection collection = mongoCollection(firstEntity);
if (entities == null || entities.length == 0) {
return persist(collection, firstEntity);
} else {
List<Object> entityList = new ArrayList<>();
entityList.add(firstEntity);
entityList.addAll(Arrays.asList(entities));
return persist(collection, entityList);
}
}
public Uni<Void> persist(Stream<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = entities.collect(Collectors.toList());
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persist(collection, objects);
}
return nullUni();
});
}
public Uni<Void> update(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
return update(collection, entity);
}
public Uni<Void> update(Iterable<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = new ArrayList<>();
for (Object entity : entities) {
objects.add(entity);
}
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return update(collection, objects);
}
return nullUni();
});
}
public Uni<Void> update(Object firstEntity, Object... entities) {
ReactiveMongoCollection collection = mongoCollection(firstEntity);
if (entities == null || entities.length == 0) {
return update(collection, firstEntity);
} else {
List<Object> entityList = new ArrayList<>();
entityList.add(firstEntity);
entityList.addAll(Arrays.asList(entities));
return update(collection, entityList);
}
}
public Uni<Void> update(Stream<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = entities.collect(Collectors.toList());
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return update(collection, objects);
}
return nullUni();
});
}
public Uni<Void> persistOrUpdate(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
return persistOrUpdate(collection, entity);
}
public Uni<Void> persistOrUpdate(Iterable<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = new ArrayList<>();
for (Object entity : entities) {
objects.add(entity);
}
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persistOrUpdate(collection, objects);
}
return nullUni();
});
}
public Uni<Void> persistOrUpdate(Object firstEntity, Object... entities) {
ReactiveMongoCollection collection = mongoCollection(firstEntity);
if (entities == null || entities.length == 0) {
return persistOrUpdate(collection, firstEntity);
} else {
List<Object> entityList = new ArrayList<>();
entityList.add(firstEntity);
entityList.addAll(Arrays.asList(entities));
return persistOrUpdate(collection, entityList);
}
}
public Uni<Void> persistOrUpdate(Stream<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = entities.collect(Collectors.toList());
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persistOrUpdate(collection, objects);
}
return nullUni();
});
}
public Uni<Void> delete(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
BsonDocument query = new BsonDocument().append(ID, id);
return collection.deleteOne(query).onItem().ignore().andContinueWithNull();
}
public ReactiveMongoCollection mongoCollection(Class<?> entityClass) {
MongoEntity mongoEntity = entityClass.getAnnotation(MongoEntity.class);
ReactiveMongoDatabase database = mongoDatabase(mongoEntity);
if (mongoEntity != null && !mongoEntity.collection().isEmpty()) {
return database.getCollection(mongoEntity.collection(), entityClass);
}
return database.getCollection(entityClass.getSimpleName(), entityClass);
}
public ReactiveMongoDatabase mongoDatabase(Class<?> entityClass) {
MongoEntity mongoEntity = entityClass.getAnnotation(MongoEntity.class);
return mongoDatabase(mongoEntity);
}
public Uni<Void> nullUni() {
return Uni.createFrom().item((Void) null);
}
private Uni<Void> persist(ReactiveMongoCollection collection, Object entity) {
return collection.insertOne(entity).onItem().ignore().andContinueWithNull();
}
private Uni<Void> persist(ReactiveMongoCollection collection, List<Object> entities) {
return collection.insertMany(entities).onItem().ignore().andContinueWithNull();
}
private Uni<Void> update(ReactiveMongoCollection collection, Object entity) {
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
BsonDocument query = new BsonDocument().append(ID, id);
return collection.replaceOne(query, entity).onItem().ignore().andContinueWithNull();
}
private Uni<Void> update(ReactiveMongoCollection collection, List<Object> entities) {
List<Uni<Void>> unis = entities.stream().map(entity -> update(collection, entity)).collect(Collectors.toList());
return Uni.combine().all().unis(unis).combinedWith(u -> null);
}
private Uni<Void> persistOrUpdate(ReactiveMongoCollection collection, Object entity) {
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
if (id == null) {
return collection.insertOne(entity).onItem().ignore().andContinueWithNull();
} else {
BsonDocument query = new BsonDocument().append(ID, id);
return collection.replaceOne(query, entity, new ReplaceOptions().upsert(true))
.onItem().ignore().andContinueWithNull();
}
}
private Uni<Void> persistOrUpdate(ReactiveMongoCollection collection, List<Object> entities) {
List<WriteModel> bulk = new ArrayList<>();
for (Object entity : entities) {
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
if (id == null) {
bulk.add(new InsertOneModel(entity));
} else {
BsonDocument query = new BsonDocument().append(ID, id);
bulk.add(new ReplaceOneModel(query, entity,
new ReplaceOptions().upsert(true)));
}
}
return collection.bulkWrite(bulk).onItem().ignore().andContinueWithNull();
}
private BsonDocument getBsonDocument(ReactiveMongoCollection collection, Object entity) {
BsonDocument document = new BsonDocument();
Codec codec = collection.getCodecRegistry().get(entity.getClass());
codec.encode(new BsonDocumentWriter(document), entity, EncoderContext.builder().build());
return document;
}
private ReactiveMongoCollection mongoCollection(Object entity) {
Class<?> entityClass = entity.getClass();
return mongoCollection(entityClass);
}
private ReactiveMongoDatabase mongoDatabase(MongoEntity entity) {
ReactiveMongoClient mongoClient = clientFromArc(entity, ReactiveMongoClient.class, true);
if (entity != null && !entity.database().isEmpty()) {
return mongoClient.getDatabase(entity.database());
}
String databaseName = getDefaultDatabaseName(entity);
return mongoClient.getDatabase(databaseName);
}
private String getDefaultDatabaseName(MongoEntity entity) {
return defaultDatabaseName.computeIfAbsent(beanName(entity), new Function<String, String>() {
@Override
public String apply(String beanName) {
return getDatabaseName(entity, beanName);
}
});
}
public Uni<Object> findById(Class<?> entityClass, Object id) {
Uni<Optional> optionalEntity = findByIdOptional(entityClass, id);
return optionalEntity.onItem().transform(optional -> optional.orElse(null));
}
public Uni<Optional> findByIdOptional(Class<?> entityClass, Object id) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.find(new Document(ID, id)).collectItems().first()
.onItem().transform(Optional::ofNullable);
}
public QueryType find(Class<?> entityClass, String query, Object... params) {
return find(entityClass, query, null, params);
}
@SuppressWarnings("rawtypes")
public QueryType find(Class<?> entityClass, String query, Sort sort, Object... params) {
String bindQuery = bindFilter(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
Document docSort = sortToDocument(sort);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return createQuery(collection, docQuery, docSort);
}
/**
* We should have a query like <code>{'firstname': ?1, 'lastname': ?2}</code> for native one
* and like <code>firstname = ?1</code> for PanacheQL one.
*/
public String bindFilter(Class<?> clazz, String query, Object[] params) {
String bindQuery = bindQuery(clazz, query, params);
LOGGER.debug(bindQuery);
return bindQuery;
}
/**
* We should have a query like <code>{'firstname': :firstname, 'lastname': :lastname}</code> for native one
* and like <code>firstname = :firstname and lastname = :lastname</code> for PanacheQL one.
*/
public String bindFilter(Class<?> clazz, String query, Map<String, Object> params) {
String bindQuery = bindQuery(clazz, query, params);
LOGGER.debug(bindQuery);
return bindQuery;
}
/**
* We should have a query like <code>{'firstname': ?1, 'lastname': ?2}</code> for native one
* and like <code>firstname = ?1 and lastname = ?2</code> for PanacheQL one.
* As update document needs an update operator, we add <code>$set</code> if none is provided.
*/
String bindUpdate(Class<?> clazz, String query, Object[] params) {
String bindUpdate = bindQuery(clazz, query, params);
boolean containsOperator = false;
for (String operator : UPDATE_OPERATORS) {
if (bindUpdate.contains(operator)) {
containsOperator = true;
}
}
if (!containsOperator) {
bindUpdate = "{'$set':" + bindUpdate + "}";
}
LOGGER.debug(bindUpdate);
return bindUpdate;
}
/**
* We should have a query like <code>{'firstname': :firstname, 'lastname': :lastname}</code> for native one
* and like <code>firstname = :firstname and lastname = :lastname</code> for PanacheQL one.
* As update document needs an update operator, we add <code>$set</code> if none is provided.
*/
String bindQuery(Class<?> clazz, String query, Object[] params) {
String bindQuery = null;
if (query.charAt(0) == '{') {
bindQuery = NativeQueryBinder.bindQuery(query, params);
} else {
bindQuery = PanacheQlQueryBinder.bindQuery(clazz, query, params);
}
return bindQuery;
}
String bindQuery(Class<?> clazz, String query, Map<String, Object> params) {
String bindQuery = null;
if (query.charAt(0) == '{') {
bindQuery = NativeQueryBinder.bindQuery(query, params);
} else {
bindQuery = PanacheQlQueryBinder.bindQuery(clazz, query, params);
}
return bindQuery;
}
public QueryType find(Class<?> entityClass, String query, Map<String, Object> params) {
return find(entityClass, query, null, params);
}
@SuppressWarnings("rawtypes")
public QueryType find(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) {
String bindQuery = bindFilter(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
Document docSort = sortToDocument(sort);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return createQuery(collection, docQuery, docSort);
}
public QueryType find(Class<?> entityClass, String query, Parameters params) {
return find(entityClass, query, null, params.map());
}
public QueryType find(Class<?> entityClass, String query, Sort sort, Parameters params) {
return find(entityClass, query, sort, params.map());
}
@SuppressWarnings("rawtypes")
public QueryType find(Class<?> entityClass, Document query, Sort sort) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
Document sortDoc = sortToDocument(sort);
return createQuery(collection, query, sortDoc);
}
public QueryType find(Class<?> entityClass, Document query, Document sort) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return createQuery(collection, query, sort);
}
public QueryType find(Class<?> entityClass, Document query) {
return find(entityClass, query, (Document) null);
}
public Uni<List<?>> list(Class<?> entityClass, String query, Object... params) {
return (Uni) list(find(entityClass, query, params));
}
public Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Object... params) {
return (Uni) list(find(entityClass, query, sort, params));
}
public Uni<List<?>> list(Class<?> entityClass, String query, Map<String, Object> params) {
return (Uni) list(find(entityClass, query, params));
}
public Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) {
return (Uni) list(find(entityClass, query, sort, params));
}
public Uni<List<?>> list(Class<?> entityClass, String query, Parameters params) {
return (Uni) list(find(entityClass, query, params));
}
public Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Parameters params) {
return (Uni) list(find(entityClass, query, sort, params));
}
public Uni<List<?>> list(Class<?> entityClass, Document query) {
return (Uni) list(find(entityClass, query));
}
public Uni<List<?>> list(Class<?> entityClass, Document query, Document sort) {
return (Uni) list(find(entityClass, query, sort));
}
public Multi<?> stream(Class<?> entityClass, String query, Object... params) {
return stream(find(entityClass, query, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Sort sort, Object... params) {
return stream(find(entityClass, query, sort, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Map<String, Object> params) {
return stream(find(entityClass, query, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) {
return stream(find(entityClass, query, sort, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Parameters params) {
return stream(find(entityClass, query, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Sort sort, Parameters params) {
return stream(find(entityClass, query, sort, params));
}
public Multi<?> stream(Class<?> entityClass, Document query) {
return stream(find(entityClass, query));
}
public Multi<?> stream(Class<?> entityClass, Document query, Document sort) {
return stream(find(entityClass, query, sort));
}
@SuppressWarnings("rawtypes")
public QueryType findAll(Class<?> entityClass) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return createQuery(collection, null, null);
}
@SuppressWarnings("rawtypes")
public QueryType findAll(Class<?> entityClass, Sort sort) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
Document sortDoc = sortToDocument(sort);
return createQuery(collection, null, sortDoc);
}
private Document sortToDocument(Sort sort) {
if (sort == null) {
return null;
}
Document sortDoc = new Document();
for (Sort.Column col : sort.getColumns()) {
sortDoc.append(col.getName(), col.getDirection() == Sort.Direction.Ascending ? 1 : -1);
}
return sortDoc;
}
public Uni<List<?>> listAll(Class<?> entityClass) {
return (Uni) list(findAll(entityClass));
}
public Uni<List<?>> listAll(Class<?> entityClass, Sort sort) {
return (Uni) list(findAll(entityClass, sort));
}
public Multi<?> streamAll(Class<?> entityClass) {
return stream(findAll(entityClass));
}
public Multi<?> streamAll(Class<?> entityClass, Sort sort) {
return stream(findAll(entityClass, sort));
}
public Uni<Long> count(Class<?> entityClass) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.countDocuments();
}
public Uni<Long> count(Class<?> entityClass, String query, Object... params) {
String bindQuery = bindFilter(entityClass, query, params);
BsonDocument docQuery = BsonDocument.parse(bindQuery);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.countDocuments(docQuery);
}
public Uni<Long> count(Class<?> entityClass, String query, Map<String, Object> params) {
String bindQuery = bindFilter(entityClass, query, params);
BsonDocument docQuery = BsonDocument.parse(bindQuery);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.countDocuments(docQuery);
}
public Uni<Long> count(Class<?> entityClass, String query, Parameters params) {
return count(entityClass, query, params.map());
}
public Uni<Long> count(Class<?> entityClass, Document query) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.countDocuments(query);
}
public Uni<Long> deleteAll(Class<?> entityClass) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(new Document()).map(deleteResult -> deleteResult.getDeletedCount());
}
public Uni<Boolean> deleteById(Class<?> entityClass, Object id) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
Document query = new Document().append(ID, id);
return collection.deleteOne(query).map(results -> results.getDeletedCount() == 1);
}
public Uni<Long> delete(Class<?> entityClass, String query, Object... params) {
String bindQuery = bindFilter(entityClass, query, params);
BsonDocument docQuery = BsonDocument.parse(bindQuery);
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(docQuery).map(deleteResult -> deleteResult.getDeletedCount());
}
public Uni<Long> delete(Class<?> entityClass, String query, Map<String, Object> params) {
String bindQuery = bindFilter(entityClass, query, params);
BsonDocument docQuery = BsonDocument.parse(bindQuery);
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(docQuery).map(deleteResult -> deleteResult.getDeletedCount());
}
public Uni<Long> delete(Class<?> entityClass, String query, Parameters params) {
return delete(entityClass, query, params.map());
}
public Uni<Long> delete(Class<?> entityClass, Document query) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(query).map(deleteResult -> deleteResult.getDeletedCount());
}
public UpdateType update(Class<?> entityClass, String update, Map<String, Object> params) {
return executeUpdate(entityClass, update, params);
}
public UpdateType update(Class<?> entityClass, String update, Parameters params) {
return update(entityClass, update, params.map());
}
public UpdateType update(Class<?> entityClass, String update, Object... params) {
return executeUpdate(entityClass, update, params);
}
private UpdateType executeUpdate(Class<?> entityClass, String update, Object... params) {
String bindUpdate = bindUpdate(entityClass, update, params);
Document docUpdate = Document.parse(bindUpdate);
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return createUpdate(collection, entityClass, docUpdate);
}
private UpdateType executeUpdate(Class<?> entityClass, String update, Map<String, Object> params) {
String bindUpdate = bindUpdate(entityClass, update, params);
Document docUpdate = Document.parse(bindUpdate);
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return createUpdate(collection, entityClass, docUpdate);
}
public IllegalStateException implementationInjectionMissing() {
return new IllegalStateException(
"This method is normally automatically overridden in subclasses");
}
} | class ReactiveMongoOperations<QueryType, UpdateType> {
public final String ID = "_id";
private static final Logger LOGGER = Logger.getLogger(ReactiveMongoOperations.class);
private static final List<String> UPDATE_OPERATORS = Arrays.asList(
"$currentDate", "$inc", "$min", "$max", "$mul", "$rename", "$set", "$setOnInsert", "$unset",
"$addToSet", "$pop", "$pull", "$push", "$pullAll",
"$each", "$position", "$slice", "$sort",
"$bit");
private static final Map<String, String> defaultDatabaseName = new ConcurrentHashMap<>();
protected abstract QueryType createQuery(ReactiveMongoCollection collection, Document query, Document sortDoc);
protected abstract UpdateType createUpdate(ReactiveMongoCollection<?> collection, Class<?> entityClass,
Document docUpdate);
protected abstract Uni<?> list(QueryType query);
protected abstract Multi<?> stream(QueryType query);
public Uni<Void> persist(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
return persist(collection, entity);
}
public Uni<Void> persist(Iterable<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = new ArrayList<>();
for (Object entity : entities) {
objects.add(entity);
}
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persist(collection, objects);
}
return nullUni();
});
}
public Uni<Void> persist(Object firstEntity, Object... entities) {
ReactiveMongoCollection collection = mongoCollection(firstEntity);
if (entities == null || entities.length == 0) {
return persist(collection, firstEntity);
} else {
List<Object> entityList = new ArrayList<>();
entityList.add(firstEntity);
entityList.addAll(Arrays.asList(entities));
return persist(collection, entityList);
}
}
public Uni<Void> persist(Stream<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = entities.collect(Collectors.toList());
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persist(collection, objects);
}
return nullUni();
});
}
public Uni<Void> update(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
return update(collection, entity);
}
public Uni<Void> update(Iterable<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = new ArrayList<>();
for (Object entity : entities) {
objects.add(entity);
}
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return update(collection, objects);
}
return nullUni();
});
}
public Uni<Void> update(Object firstEntity, Object... entities) {
ReactiveMongoCollection collection = mongoCollection(firstEntity);
if (entities == null || entities.length == 0) {
return update(collection, firstEntity);
} else {
List<Object> entityList = new ArrayList<>();
entityList.add(firstEntity);
entityList.addAll(Arrays.asList(entities));
return update(collection, entityList);
}
}
public Uni<Void> update(Stream<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = entities.collect(Collectors.toList());
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return update(collection, objects);
}
return nullUni();
});
}
public Uni<Void> persistOrUpdate(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
return persistOrUpdate(collection, entity);
}
public Uni<Void> persistOrUpdate(Iterable<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = new ArrayList<>();
for (Object entity : entities) {
objects.add(entity);
}
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persistOrUpdate(collection, objects);
}
return nullUni();
});
}
public Uni<Void> persistOrUpdate(Object firstEntity, Object... entities) {
ReactiveMongoCollection collection = mongoCollection(firstEntity);
if (entities == null || entities.length == 0) {
return persistOrUpdate(collection, firstEntity);
} else {
List<Object> entityList = new ArrayList<>();
entityList.add(firstEntity);
entityList.addAll(Arrays.asList(entities));
return persistOrUpdate(collection, entityList);
}
}
public Uni<Void> persistOrUpdate(Stream<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = entities.collect(Collectors.toList());
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persistOrUpdate(collection, objects);
}
return nullUni();
});
}
public Uni<Void> delete(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
BsonDocument query = new BsonDocument().append(ID, id);
return collection.deleteOne(query).onItem().ignore().andContinueWithNull();
}
public ReactiveMongoCollection mongoCollection(Class<?> entityClass) {
MongoEntity mongoEntity = entityClass.getAnnotation(MongoEntity.class);
ReactiveMongoDatabase database = mongoDatabase(mongoEntity);
if (mongoEntity != null && !mongoEntity.collection().isEmpty()) {
return database.getCollection(mongoEntity.collection(), entityClass);
}
return database.getCollection(entityClass.getSimpleName(), entityClass);
}
public ReactiveMongoDatabase mongoDatabase(Class<?> entityClass) {
MongoEntity mongoEntity = entityClass.getAnnotation(MongoEntity.class);
return mongoDatabase(mongoEntity);
}
public Uni<Void> nullUni() {
return Uni.createFrom().item((Void) null);
}
private Uni<Void> persist(ReactiveMongoCollection collection, Object entity) {
return collection.insertOne(entity).onItem().ignore().andContinueWithNull();
}
private Uni<Void> persist(ReactiveMongoCollection collection, List<Object> entities) {
return collection.insertMany(entities).onItem().ignore().andContinueWithNull();
}
private Uni<Void> update(ReactiveMongoCollection collection, Object entity) {
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
BsonDocument query = new BsonDocument().append(ID, id);
return collection.replaceOne(query, entity).onItem().ignore().andContinueWithNull();
}
private Uni<Void> update(ReactiveMongoCollection collection, List<Object> entities) {
List<Uni<Void>> unis = entities.stream().map(entity -> update(collection, entity)).collect(Collectors.toList());
return Uni.combine().all().unis(unis).combinedWith(u -> null);
}
private Uni<Void> persistOrUpdate(ReactiveMongoCollection collection, Object entity) {
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
if (id == null) {
return collection.insertOne(entity).onItem().ignore().andContinueWithNull();
} else {
BsonDocument query = new BsonDocument().append(ID, id);
return collection.replaceOne(query, entity, new ReplaceOptions().upsert(true))
.onItem().ignore().andContinueWithNull();
}
}
private Uni<Void> persistOrUpdate(ReactiveMongoCollection collection, List<Object> entities) {
List<WriteModel> bulk = new ArrayList<>();
for (Object entity : entities) {
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
if (id == null) {
bulk.add(new InsertOneModel(entity));
} else {
BsonDocument query = new BsonDocument().append(ID, id);
bulk.add(new ReplaceOneModel(query, entity,
new ReplaceOptions().upsert(true)));
}
}
return collection.bulkWrite(bulk).onItem().ignore().andContinueWithNull();
}
private BsonDocument getBsonDocument(ReactiveMongoCollection collection, Object entity) {
BsonDocument document = new BsonDocument();
Codec codec = collection.getCodecRegistry().get(entity.getClass());
codec.encode(new BsonDocumentWriter(document), entity, EncoderContext.builder().build());
return document;
}
private ReactiveMongoCollection mongoCollection(Object entity) {
Class<?> entityClass = entity.getClass();
return mongoCollection(entityClass);
}
private ReactiveMongoDatabase mongoDatabase(MongoEntity entity) {
ReactiveMongoClient mongoClient = clientFromArc(entity, ReactiveMongoClient.class, true);
if (entity != null && !entity.database().isEmpty()) {
return mongoClient.getDatabase(entity.database());
}
String databaseName = getDefaultDatabaseName(entity);
return mongoClient.getDatabase(databaseName);
}
private String getDefaultDatabaseName(MongoEntity entity) {
return defaultDatabaseName.computeIfAbsent(beanName(entity), new Function<String, String>() {
@Override
public String apply(String beanName) {
return getDatabaseName(entity, beanName);
}
});
}
public Uni<Object> findById(Class<?> entityClass, Object id) {
Uni<Optional> optionalEntity = findByIdOptional(entityClass, id);
return optionalEntity.onItem().transform(optional -> optional.orElse(null));
}
public Uni<Optional> findByIdOptional(Class<?> entityClass, Object id) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.find(new Document(ID, id)).collectItems().first()
.onItem().transform(Optional::ofNullable);
}
public QueryType find(Class<?> entityClass, String query, Object... params) {
return find(entityClass, query, null, params);
}
@SuppressWarnings("rawtypes")
public QueryType find(Class<?> entityClass, String query, Sort sort, Object... params) {
String bindQuery = bindFilter(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
Document docSort = sortToDocument(sort);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return createQuery(collection, docQuery, docSort);
}
/**
* We should have a query like <code>{'firstname': ?1, 'lastname': ?2}</code> for native one
* and like <code>firstname = ?1</code> for PanacheQL one.
*/
public String bindFilter(Class<?> clazz, String query, Object[] params) {
String bindQuery = bindQuery(clazz, query, params);
LOGGER.debug(bindQuery);
return bindQuery;
}
/**
* We should have a query like <code>{'firstname': :firstname, 'lastname': :lastname}</code> for native one
* and like <code>firstname = :firstname and lastname = :lastname</code> for PanacheQL one.
*/
public String bindFilter(Class<?> clazz, String query, Map<String, Object> params) {
String bindQuery = bindQuery(clazz, query, params);
LOGGER.debug(bindQuery);
return bindQuery;
}
/**
* We should have a query like <code>{'firstname': ?1, 'lastname': ?2}</code> for native one
* and like <code>firstname = ?1 and lastname = ?2</code> for PanacheQL one.
* As update document needs an update operator, we add <code>$set</code> if none is provided.
*/
String bindUpdate(Class<?> clazz, String query, Object[] params) {
String bindUpdate = bindQuery(clazz, query, params);
if (!containsUpdateOperator(query)) {
bindUpdate = "{'$set':" + bindUpdate + "}";
}
LOGGER.debug(bindUpdate);
return bindUpdate;
}
/**
* We should have a query like <code>{'firstname': :firstname, 'lastname': :lastname}</code> for native one
* and like <code>firstname = :firstname and lastname = :lastname</code> for PanacheQL one.
* As update document needs an update operator, we add <code>$set</code> if none is provided.
*/
private boolean containsUpdateOperator(String update) {
for (String operator : UPDATE_OPERATORS) {
if (update.contains(operator)) {
return true;
}
}
return false;
}
String bindQuery(Class<?> clazz, String query, Object[] params) {
String bindQuery = null;
if (query.charAt(0) == '{') {
bindQuery = NativeQueryBinder.bindQuery(query, params);
} else {
bindQuery = PanacheQlQueryBinder.bindQuery(clazz, query, params);
}
return bindQuery;
}
String bindQuery(Class<?> clazz, String query, Map<String, Object> params) {
String bindQuery = null;
if (query.charAt(0) == '{') {
bindQuery = NativeQueryBinder.bindQuery(query, params);
} else {
bindQuery = PanacheQlQueryBinder.bindQuery(clazz, query, params);
}
return bindQuery;
}
public QueryType find(Class<?> entityClass, String query, Map<String, Object> params) {
return find(entityClass, query, null, params);
}
@SuppressWarnings("rawtypes")
public QueryType find(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) {
String bindQuery = bindFilter(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
Document docSort = sortToDocument(sort);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return createQuery(collection, docQuery, docSort);
}
public QueryType find(Class<?> entityClass, String query, Parameters params) {
return find(entityClass, query, null, params.map());
}
public QueryType find(Class<?> entityClass, String query, Sort sort, Parameters params) {
return find(entityClass, query, sort, params.map());
}
@SuppressWarnings("rawtypes")
public QueryType find(Class<?> entityClass, Document query, Sort sort) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
Document sortDoc = sortToDocument(sort);
return createQuery(collection, query, sortDoc);
}
public QueryType find(Class<?> entityClass, Document query, Document sort) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return createQuery(collection, query, sort);
}
public QueryType find(Class<?> entityClass, Document query) {
return find(entityClass, query, (Document) null);
}
public Uni<List<?>> list(Class<?> entityClass, String query, Object... params) {
return (Uni) list(find(entityClass, query, params));
}
public Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Object... params) {
return (Uni) list(find(entityClass, query, sort, params));
}
public Uni<List<?>> list(Class<?> entityClass, String query, Map<String, Object> params) {
return (Uni) list(find(entityClass, query, params));
}
public Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) {
return (Uni) list(find(entityClass, query, sort, params));
}
public Uni<List<?>> list(Class<?> entityClass, String query, Parameters params) {
return (Uni) list(find(entityClass, query, params));
}
public Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Parameters params) {
return (Uni) list(find(entityClass, query, sort, params));
}
public Uni<List<?>> list(Class<?> entityClass, Document query) {
return (Uni) list(find(entityClass, query));
}
public Uni<List<?>> list(Class<?> entityClass, Document query, Document sort) {
return (Uni) list(find(entityClass, query, sort));
}
public Multi<?> stream(Class<?> entityClass, String query, Object... params) {
return stream(find(entityClass, query, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Sort sort, Object... params) {
return stream(find(entityClass, query, sort, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Map<String, Object> params) {
return stream(find(entityClass, query, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) {
return stream(find(entityClass, query, sort, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Parameters params) {
return stream(find(entityClass, query, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Sort sort, Parameters params) {
return stream(find(entityClass, query, sort, params));
}
public Multi<?> stream(Class<?> entityClass, Document query) {
return stream(find(entityClass, query));
}
public Multi<?> stream(Class<?> entityClass, Document query, Document sort) {
return stream(find(entityClass, query, sort));
}
@SuppressWarnings("rawtypes")
public QueryType findAll(Class<?> entityClass) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return createQuery(collection, null, null);
}
@SuppressWarnings("rawtypes")
public QueryType findAll(Class<?> entityClass, Sort sort) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
Document sortDoc = sortToDocument(sort);
return createQuery(collection, null, sortDoc);
}
private Document sortToDocument(Sort sort) {
if (sort == null) {
return null;
}
Document sortDoc = new Document();
for (Sort.Column col : sort.getColumns()) {
sortDoc.append(col.getName(), col.getDirection() == Sort.Direction.Ascending ? 1 : -1);
}
return sortDoc;
}
public Uni<List<?>> listAll(Class<?> entityClass) {
return (Uni) list(findAll(entityClass));
}
public Uni<List<?>> listAll(Class<?> entityClass, Sort sort) {
return (Uni) list(findAll(entityClass, sort));
}
public Multi<?> streamAll(Class<?> entityClass) {
return stream(findAll(entityClass));
}
public Multi<?> streamAll(Class<?> entityClass, Sort sort) {
return stream(findAll(entityClass, sort));
}
public Uni<Long> count(Class<?> entityClass) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.countDocuments();
}
public Uni<Long> count(Class<?> entityClass, String query, Object... params) {
String bindQuery = bindFilter(entityClass, query, params);
BsonDocument docQuery = BsonDocument.parse(bindQuery);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.countDocuments(docQuery);
}
public Uni<Long> count(Class<?> entityClass, String query, Map<String, Object> params) {
String bindQuery = bindFilter(entityClass, query, params);
BsonDocument docQuery = BsonDocument.parse(bindQuery);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.countDocuments(docQuery);
}
public Uni<Long> count(Class<?> entityClass, String query, Parameters params) {
return count(entityClass, query, params.map());
}
public Uni<Long> count(Class<?> entityClass, Document query) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.countDocuments(query);
}
public Uni<Long> deleteAll(Class<?> entityClass) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(new Document()).map(deleteResult -> deleteResult.getDeletedCount());
}
public Uni<Boolean> deleteById(Class<?> entityClass, Object id) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
Document query = new Document().append(ID, id);
return collection.deleteOne(query).map(results -> results.getDeletedCount() == 1);
}
public Uni<Long> delete(Class<?> entityClass, String query, Object... params) {
String bindQuery = bindFilter(entityClass, query, params);
BsonDocument docQuery = BsonDocument.parse(bindQuery);
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(docQuery).map(deleteResult -> deleteResult.getDeletedCount());
}
public Uni<Long> delete(Class<?> entityClass, String query, Map<String, Object> params) {
String bindQuery = bindFilter(entityClass, query, params);
BsonDocument docQuery = BsonDocument.parse(bindQuery);
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(docQuery).map(deleteResult -> deleteResult.getDeletedCount());
}
public Uni<Long> delete(Class<?> entityClass, String query, Parameters params) {
return delete(entityClass, query, params.map());
}
public Uni<Long> delete(Class<?> entityClass, Document query) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(query).map(deleteResult -> deleteResult.getDeletedCount());
}
public UpdateType update(Class<?> entityClass, String update, Map<String, Object> params) {
return executeUpdate(entityClass, update, params);
}
public UpdateType update(Class<?> entityClass, String update, Parameters params) {
return update(entityClass, update, params.map());
}
public UpdateType update(Class<?> entityClass, String update, Object... params) {
return executeUpdate(entityClass, update, params);
}
private UpdateType executeUpdate(Class<?> entityClass, String update, Object... params) {
String bindUpdate = bindUpdate(entityClass, update, params);
Document docUpdate = Document.parse(bindUpdate);
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return createUpdate(collection, entityClass, docUpdate);
}
private UpdateType executeUpdate(Class<?> entityClass, String update, Map<String, Object> params) {
String bindUpdate = bindUpdate(entityClass, update, params);
Document docUpdate = Document.parse(bindUpdate);
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return createUpdate(collection, entityClass, docUpdate);
}
public IllegalStateException implementationInjectionMissing() {
return new IllegalStateException(
"This method is normally automatically overridden in subclasses");
}
} |
Can join the apostrophe to the string itself. | public static void pushPackages(String packageName, String sourceRoot, String installToRepo, boolean noBuild) {
Path prjDirPath = LauncherUtils.getSourceRootPath(sourceRoot);
if (Files.notExists(prjDirPath.resolve(ProjectDirConstants.MANIFEST_FILE_NAME))) {
throw createLauncherException("Couldn't locate Ballerina.toml in the project directory. Run " +
"'ballerina init' to create the Ballerina.toml file " +
"automatically and re-run the 'ballerina push' command");
}
Manifest manifest = TomlParserUtils.getManifest(prjDirPath);
if (manifest.getName().isEmpty()) {
throw createLauncherException("An org-name is required when pushing. This is not specified in " +
"Ballerina.toml inside the project");
}
if (manifest.getVersion().isEmpty()) {
throw createLauncherException("A package version is required when pushing. This is not specified " +
"in Ballerina.toml inside the project");
}
String orgName = manifest.getName();
if (!validateOrg(orgName)) {
throw createLauncherException("invalid organization name provided " + "'" + orgName + "'." + " Only " +
"lowercase alphanumerics and underscores are allowed in an organization " +
"name and the maximum length is 256 characters");
}
if (!validatePkg(packageName)) {
throw createLauncherException("invalid package name provided " + "'" + packageName + "'." + " Only " +
"alphanumerics, underscores and periods are allowed in a package name " +
"and the maximum length is 256 characters");
}
String version = manifest.getVersion();
String ballerinaVersion = RepoUtils.getBallerinaVersion();
PackageID packageID = new PackageID(new Name(orgName), new Name(packageName), new Name(version));
Path pkgPathFromPrjtDir = Paths.get(prjDirPath.toString(), ProjectDirConstants.DOT_BALLERINA_DIR_NAME,
ProjectDirConstants.DOT_BALLERINA_REPO_DIR_NAME, orgName,
packageName, version, packageName + ".zip");
if (!noBuild) {
BuilderUtils.compileWithTestsAndWrite(prjDirPath, packageName, packageName, false, false, false, false);
} else if (Files.notExists(pkgPathFromPrjtDir)) {
throw createLauncherException("Couldn't locate the package artifact to be pushed. Run 'ballerina " +
"push' without the --no-build flag");
}
if (installToRepo == null) {
String accessToken = checkAccessToken();
String mdFileContent = getPackageMDFileContent(pkgPathFromPrjtDir.toString(), packageName);
if (mdFileContent == null) {
throw createLauncherException("Cannot find Package.md file in the artifact");
}
String description = readSummary(mdFileContent);
String homepageURL = manifest.getHomepageURL();
String repositoryURL = manifest.getRepositoryURL();
String apiDocURL = manifest.getDocumentationURL();
String authors = String.join(",", manifest.getAuthors());
String keywords = String.join(",", manifest.getKeywords());
String license = manifest.getLicense();
String resourcePath = resolvePkgPathInRemoteRepo(packageID);
String msg = orgName + "/" + packageName + ":" + version + " [project repo -> central]";
Proxy proxy = settings.getProxy();
String baloVersionOfPkg = String.valueOf(ProgramFileConstants.VERSION_NUMBER);
executor.executeFunction("packaging_push/packaging_push.balx", MAIN_FUNCTION_NAME, accessToken,
mdFileContent, description, homepageURL, repositoryURL, apiDocURL, authors, keywords, license,
resourcePath, pkgPathFromPrjtDir.toString(), msg, ballerinaVersion, proxy.getHost(),
proxy.getPort(), proxy.getUserName(), proxy.getPassword(), baloVersionOfPkg);
} else {
if (!installToRepo.equals("home")) {
throw createLauncherException("Unknown repository provided to push the package");
}
installToHomeRepo(packageID, pkgPathFromPrjtDir);
}
} | throw createLauncherException("invalid organization name provided " + "'" + orgName + "'." + " Only " + | public static void pushPackages(String packageName, String sourceRoot, String installToRepo, boolean noBuild) {
Path prjDirPath = LauncherUtils.getSourceRootPath(sourceRoot);
if (Files.notExists(prjDirPath.resolve(ProjectDirConstants.MANIFEST_FILE_NAME))) {
throw createLauncherException("Couldn't locate Ballerina.toml in the project directory. Run " +
"'ballerina init' to create the Ballerina.toml file " +
"automatically and re-run the 'ballerina push' command");
}
Manifest manifest = TomlParserUtils.getManifest(prjDirPath);
if (manifest.getName().isEmpty()) {
throw createLauncherException("An org-name is required when pushing. This is not specified in " +
"Ballerina.toml inside the project");
}
if (manifest.getVersion().isEmpty()) {
throw createLauncherException("A package version is required when pushing. This is not specified " +
"in Ballerina.toml inside the project");
}
String orgName = manifest.getName();
if (!RepoUtils.validateOrg(orgName)) {
throw createLauncherException("invalid organization name provided \'" + orgName + "\'. Only " +
"lowercase alphanumerics and underscores are allowed in an organization " +
"name and the maximum length is 256 characters");
}
if (!RepoUtils.validatePkg(packageName)) {
throw createLauncherException("invalid package name provided \'" + packageName + "\'. Only " +
"alphanumerics, underscores and periods are allowed in a package name and " +
"the maximum length is 256 characters");
}
String version = manifest.getVersion();
String ballerinaVersion = RepoUtils.getBallerinaVersion();
PackageID packageID = new PackageID(new Name(orgName), new Name(packageName), new Name(version));
Path pkgPathFromPrjtDir = Paths.get(prjDirPath.toString(), ProjectDirConstants.DOT_BALLERINA_DIR_NAME,
ProjectDirConstants.DOT_BALLERINA_REPO_DIR_NAME, orgName,
packageName, version, packageName + ".zip");
if (!noBuild) {
BuilderUtils.compileWithTestsAndWrite(prjDirPath, packageName, packageName, false, false, false, false);
} else if (Files.notExists(pkgPathFromPrjtDir)) {
throw createLauncherException("Couldn't locate the package artifact to be pushed. Run 'ballerina " +
"push' without the --no-build flag");
}
if (installToRepo == null) {
String accessToken = checkAccessToken();
String mdFileContent = getPackageMDFileContent(pkgPathFromPrjtDir.toString(), packageName);
if (mdFileContent == null) {
throw createLauncherException("Cannot find Package.md file in the artifact");
}
String description = readSummary(mdFileContent);
String homepageURL = manifest.getHomepageURL();
String repositoryURL = manifest.getRepositoryURL();
String apiDocURL = manifest.getDocumentationURL();
String authors = String.join(",", manifest.getAuthors());
String keywords = String.join(",", manifest.getKeywords());
String license = manifest.getLicense();
String resourcePath = resolvePkgPathInRemoteRepo(packageID);
String msg = orgName + "/" + packageName + ":" + version + " [project repo -> central]";
Proxy proxy = settings.getProxy();
String baloVersionOfPkg = String.valueOf(ProgramFileConstants.VERSION_NUMBER);
executor.executeFunction("packaging_push/packaging_push.balx", MAIN_FUNCTION_NAME, accessToken,
mdFileContent, description, homepageURL, repositoryURL, apiDocURL, authors, keywords, license,
resourcePath, pkgPathFromPrjtDir.toString(), msg, ballerinaVersion, proxy.getHost(),
proxy.getPort(), proxy.getUserName(), proxy.getPassword(), baloVersionOfPkg);
} else {
if (!installToRepo.equals("home")) {
throw createLauncherException("Unknown repository provided to push the package");
}
installToHomeRepo(packageID, pkgPathFromPrjtDir);
}
} | class PushUtils {
private static final String BALLERINA_CENTRAL_CLI_TOKEN = "https:
private static final PrintStream SYS_ERR = System.err;
private static final Path BALLERINA_HOME_PATH = RepoUtils.createAndGetHomeReposPath();
private static final Path SETTINGS_TOML_FILE_PATH = BALLERINA_HOME_PATH.resolve(
ProjectDirConstants.SETTINGS_FILE_NAME);
private static PrintStream outStream = System.out;
private static EmbeddedExecutor executor = EmbeddedExecutorProvider.getInstance().getExecutor();
private static Settings settings;
/**
* Push/Uploads packages to the central repository.
*
* @param packageName path of the package folder to be pushed
* @param sourceRoot path to the directory containing source files and packages
* @param installToRepo repo the package should be pushed to central or the home repository
* @param noBuild do not build sources before pushing
*/
/**
* Checks if the access token is available in Settings.toml or not.
*
* @return access token if its present
*/
private static String checkAccessToken() {
String accessToken = getAccessTokenOfCLI();
if (accessToken.isEmpty()) {
try {
SYS_ERR.println("Opening the web browser to " +
BALLERINA_CENTRAL_CLI_TOKEN +
" for auto token update ...");
BrowserLauncher.startInDefaultBrowser(BALLERINA_CENTRAL_CLI_TOKEN);
} catch (IOException e) {
throw createLauncherException("Access token is missing in " + SETTINGS_TOML_FILE_PATH.toString() +
"\nAuto update failed. Please visit https:
}
long modifiedTimeOfFileAtStart = getLastModifiedTimeOfFile(SETTINGS_TOML_FILE_PATH);
executor.executeFunction("packaging_token_updater/packaging_token_updater.balx", MAIN_FUNCTION_NAME);
boolean waitForToken = true;
while (waitForToken) {
pause();
long modifiedTimeOfFileAfter = getLastModifiedTimeOfFile(SETTINGS_TOML_FILE_PATH);
if (modifiedTimeOfFileAtStart != modifiedTimeOfFileAfter) {
accessToken = getAccessTokenOfCLI();
if (accessToken.isEmpty()) {
throw createLauncherException("Access token is missing in " +
SETTINGS_TOML_FILE_PATH.toString() + "\nPlease " +
"visit https:
} else {
waitForToken = false;
}
}
}
}
return accessToken;
}
/**
* Pause for 3s to check if the access token is received.
*/
private static void pause() {
try {
Thread.sleep(3000);
} catch (InterruptedException ex) {
throw createLauncherException("Error occurred while retrieving the access token");
}
}
/**
* Get last modified time of file.
*
* @param path file path
* @return last modified time in milliseconds
*/
private static long getLastModifiedTimeOfFile(Path path) {
if (!Files.isRegularFile(path)) {
return -1;
}
try {
return Files.getLastModifiedTime(path).toMillis();
} catch (IOException ex) {
throw createLauncherException("Error occurred when reading file for token " +
SETTINGS_TOML_FILE_PATH.toString());
}
}
/**
* Install the package artifact to the home repository.
*
* @param packageID packageID of the package
* @param pkgPathFromPrjtDir package path from the project directory
*/
private static void installToHomeRepo(PackageID packageID, Path pkgPathFromPrjtDir) {
Path targetDirectoryPath = Paths.get(BALLERINA_HOME_PATH.toString(),
ProjectDirConstants.DOT_BALLERINA_REPO_DIR_NAME,
packageID.orgName.getValue(),
packageID.name.getValue(),
packageID.version.getValue(),
packageID.name.getValue() + ".zip");
if (Files.exists(targetDirectoryPath)) {
throw createLauncherException("Ballerina package exists in the home repository");
} else {
try {
Files.createDirectories(targetDirectoryPath);
Files.copy(pkgPathFromPrjtDir, targetDirectoryPath, StandardCopyOption.REPLACE_EXISTING);
outStream.println(packageID.orgName.getValue() + "/" + packageID.name.getValue() + ":" +
packageID.version.getValue() + " [project repo -> home repo]");
} catch (IOException e) {
throw createLauncherException("Error occurred when creating directories in the home repository");
}
}
}
/**
* Get URI of the package from the remote repo.
*
* @param packageID packageID object
* @return full URI path of the package relative to the remote repo
*/
private static String resolvePkgPathInRemoteRepo(PackageID packageID) {
Repo<URI> remoteRepo = new RemoteRepo(URI.create(RepoUtils.getRemoteRepoURL()));
Patten patten = remoteRepo.calculate(packageID);
if (patten == Patten.NULL) {
throw createLauncherException("Couldn't find package " + packageID.toString());
}
Converter<URI> converter = remoteRepo.getConverterInstance();
List<URI> uris = patten.convert(converter, packageID).collect(Collectors.toList());
if (uris.isEmpty()) {
throw createLauncherException("Couldn't find package " + packageID.toString());
}
return uris.get(0).toString();
}
/**
* Read the access token generated for the CLI.
*
* @return access token for generated for the CLI
*/
private static String getAccessTokenOfCLI() {
settings = TomlParserUtils.readSettings();
if (settings.getCentral() != null) {
return settings.getCentral().getAccessToken();
}
return "";
}
/**
* Reads the content of Package.md inside the archived balo.
*
* @param archivedFilePath balo file path of the package
* @return content of Package.md as a string
*/
private static String getPackageMDFileContent(String archivedFilePath, String packageName) {
ZipFile zipFile = null;
try {
zipFile = new ZipFile(archivedFilePath);
Enumeration<? extends ZipEntry> entries = zipFile.entries();
while (entries.hasMoreElements()) {
ZipEntry entry = entries.nextElement();
if (entry.getName().equalsIgnoreCase(packageName + "/" + "Package.md")) {
InputStream stream = zipFile.getInputStream(entry);
Scanner scanner = new Scanner(stream, "UTF-8").useDelimiter("\\A");
return scanner.hasNext() ? scanner.next() : "";
}
}
} catch (IOException ignore) {
} finally {
try {
if (zipFile != null) {
zipFile.close();
}
} catch (IOException ignore) {
}
}
return null;
}
/**
* Read summary of the package from Package.md file.
*
* @param mdFileContent full content of Package.md
* @return summary of the package
*/
private static String readSummary(String mdFileContent) {
if (mdFileContent.isEmpty()) {
throw createLauncherException("Package.md in the artifact is empty");
}
Optional<String> result = Arrays.stream(mdFileContent.split("\n"))
.filter(line -> !line.isEmpty() && !line.startsWith("
.findFirst();
if (!result.isPresent()) {
throw createLauncherException("Cannot find package summary");
}
String firstLine = result.get();
if (firstLine.length() > 50) {
throw createLauncherException("Summary of the package exceeds 50 characters");
}
return firstLine;
}
/**
* Push all packages to central.
*
* @param sourceRoot source root or project root
* @param installToRepo repo the package should be pushed to central or the home repository
* @param noBuild do not build sources before pushing
*/
public static void pushAllPackages(String sourceRoot, String installToRepo, boolean noBuild) {
Path sourceRootPath = LauncherUtils.getSourceRootPath(sourceRoot);
try {
List<String> fileList = Files.list(sourceRootPath)
.filter(path -> Files.isDirectory(path, LinkOption.NOFOLLOW_LINKS))
.map(ProjectDirs::getLastComp)
.filter(dirName -> !isSpecialDirectory(dirName))
.map(Path::toString).collect(Collectors.toList());
if (fileList.size() == 0) {
throw createLauncherException("no packages found to push in " + sourceRootPath.toString());
}
fileList.forEach(path -> pushPackages(path, sourceRoot, installToRepo, noBuild));
} catch (IOException ex) {
throw createLauncherException("error occurred while pushing packages from " + sourceRootPath.toString()
+ " " + ex.getMessage());
}
}
/**
* Checks if the directory is a special directory that is not a package.
*
* @param dirName directory name
* @return if the directory is a special directory or not
*/
private static boolean isSpecialDirectory(Path dirName) {
List<String> ignoreDirs = Arrays.asList(ProjectDirConstants.TARGET_DIR_NAME,
ProjectDirConstants.RESOURCE_DIR_NAME);
String dirNameStr = dirName.toString();
return dirNameStr.startsWith(".") || dirName.toFile().isHidden() || ignoreDirs.contains(dirNameStr);
}
/**
* Validates the org-name and package name.
*
* @param orgName The org-name
* @return True if valid org-name or package name, else false.
*/
private static boolean validateOrg(String orgName) {
String validRegex = "^[a-z0-9_]*$";
return Pattern.matches(validRegex, orgName);
}
/**
* Validates the org-name and package name.
*
* @param pkgName The org-name or package name.
* @return True if valid org-name or package name, else false.
*/
private static boolean validatePkg(String pkgName) {
String validRegex = "^[a-zA-Z0-9_.]*$";
return Pattern.matches(validRegex, pkgName);
}
} | class PushUtils {
private static final String BALLERINA_CENTRAL_CLI_TOKEN = "https:
private static final PrintStream SYS_ERR = System.err;
private static final Path BALLERINA_HOME_PATH = RepoUtils.createAndGetHomeReposPath();
private static final Path SETTINGS_TOML_FILE_PATH = BALLERINA_HOME_PATH.resolve(
ProjectDirConstants.SETTINGS_FILE_NAME);
private static PrintStream outStream = System.out;
private static EmbeddedExecutor executor = EmbeddedExecutorProvider.getInstance().getExecutor();
private static Settings settings;
/**
* Push/Uploads packages to the central repository.
*
* @param packageName path of the package folder to be pushed
* @param sourceRoot path to the directory containing source files and packages
* @param installToRepo repo the package should be pushed to central or the home repository
* @param noBuild do not build sources before pushing
*/
/**
* Checks if the access token is available in Settings.toml or not.
*
* @return access token if its present
*/
private static String checkAccessToken() {
String accessToken = getAccessTokenOfCLI();
if (accessToken.isEmpty()) {
try {
SYS_ERR.println("Opening the web browser to " +
BALLERINA_CENTRAL_CLI_TOKEN +
" for auto token update ...");
BrowserLauncher.startInDefaultBrowser(BALLERINA_CENTRAL_CLI_TOKEN);
} catch (IOException e) {
throw createLauncherException("Access token is missing in " + SETTINGS_TOML_FILE_PATH.toString() +
"\nAuto update failed. Please visit https:
}
long modifiedTimeOfFileAtStart = getLastModifiedTimeOfFile(SETTINGS_TOML_FILE_PATH);
executor.executeFunction("packaging_token_updater/packaging_token_updater.balx", MAIN_FUNCTION_NAME);
boolean waitForToken = true;
while (waitForToken) {
pause();
long modifiedTimeOfFileAfter = getLastModifiedTimeOfFile(SETTINGS_TOML_FILE_PATH);
if (modifiedTimeOfFileAtStart != modifiedTimeOfFileAfter) {
accessToken = getAccessTokenOfCLI();
if (accessToken.isEmpty()) {
throw createLauncherException("Access token is missing in " +
SETTINGS_TOML_FILE_PATH.toString() + "\nPlease " +
"visit https:
} else {
waitForToken = false;
}
}
}
}
return accessToken;
}
/**
* Pause for 3s to check if the access token is received.
*/
private static void pause() {
try {
Thread.sleep(3000);
} catch (InterruptedException ex) {
throw createLauncherException("Error occurred while retrieving the access token");
}
}
/**
* Get last modified time of file.
*
* @param path file path
* @return last modified time in milliseconds
*/
private static long getLastModifiedTimeOfFile(Path path) {
if (!Files.isRegularFile(path)) {
return -1;
}
try {
return Files.getLastModifiedTime(path).toMillis();
} catch (IOException ex) {
throw createLauncherException("Error occurred when reading file for token " +
SETTINGS_TOML_FILE_PATH.toString());
}
}
/**
* Install the package artifact to the home repository.
*
* @param packageID packageID of the package
* @param pkgPathFromPrjtDir package path from the project directory
*/
private static void installToHomeRepo(PackageID packageID, Path pkgPathFromPrjtDir) {
Path targetDirectoryPath = Paths.get(BALLERINA_HOME_PATH.toString(),
ProjectDirConstants.DOT_BALLERINA_REPO_DIR_NAME,
packageID.orgName.getValue(),
packageID.name.getValue(),
packageID.version.getValue(),
packageID.name.getValue() + ".zip");
if (Files.exists(targetDirectoryPath)) {
throw createLauncherException("Ballerina package exists in the home repository");
} else {
try {
Files.createDirectories(targetDirectoryPath);
Files.copy(pkgPathFromPrjtDir, targetDirectoryPath, StandardCopyOption.REPLACE_EXISTING);
outStream.println(packageID.orgName.getValue() + "/" + packageID.name.getValue() + ":" +
packageID.version.getValue() + " [project repo -> home repo]");
} catch (IOException e) {
throw createLauncherException("Error occurred when creating directories in the home repository");
}
}
}
/**
* Get URI of the package from the remote repo.
*
* @param packageID packageID object
* @return full URI path of the package relative to the remote repo
*/
private static String resolvePkgPathInRemoteRepo(PackageID packageID) {
Repo<URI> remoteRepo = new RemoteRepo(URI.create(RepoUtils.getRemoteRepoURL()));
Patten patten = remoteRepo.calculate(packageID);
if (patten == Patten.NULL) {
throw createLauncherException("Couldn't find package " + packageID.toString());
}
Converter<URI> converter = remoteRepo.getConverterInstance();
List<URI> uris = patten.convert(converter, packageID).collect(Collectors.toList());
if (uris.isEmpty()) {
throw createLauncherException("Couldn't find package " + packageID.toString());
}
return uris.get(0).toString();
}
/**
* Read the access token generated for the CLI.
*
* @return access token for generated for the CLI
*/
private static String getAccessTokenOfCLI() {
settings = TomlParserUtils.readSettings();
if (settings.getCentral() != null) {
return settings.getCentral().getAccessToken();
}
return "";
}
/**
* Reads the content of Package.md inside the archived balo.
*
* @param archivedFilePath balo file path of the package
* @return content of Package.md as a string
*/
private static String getPackageMDFileContent(String archivedFilePath, String packageName) {
ZipFile zipFile = null;
try {
zipFile = new ZipFile(archivedFilePath);
Enumeration<? extends ZipEntry> entries = zipFile.entries();
while (entries.hasMoreElements()) {
ZipEntry entry = entries.nextElement();
if (entry.getName().equalsIgnoreCase(packageName + "/" + "Package.md")) {
InputStream stream = zipFile.getInputStream(entry);
Scanner scanner = new Scanner(stream, "UTF-8").useDelimiter("\\A");
return scanner.hasNext() ? scanner.next() : "";
}
}
} catch (IOException ignore) {
} finally {
try {
if (zipFile != null) {
zipFile.close();
}
} catch (IOException ignore) {
}
}
return null;
}
/**
* Read summary of the package from Package.md file.
*
* @param mdFileContent full content of Package.md
* @return summary of the package
*/
private static String readSummary(String mdFileContent) {
if (mdFileContent.isEmpty()) {
throw createLauncherException("Package.md in the artifact is empty");
}
Optional<String> result = Arrays.stream(mdFileContent.split("\n"))
.filter(line -> !line.isEmpty() && !line.startsWith("
.findFirst();
if (!result.isPresent()) {
throw createLauncherException("Cannot find package summary");
}
String firstLine = result.get();
if (firstLine.length() > 50) {
throw createLauncherException("Summary of the package exceeds 50 characters");
}
return firstLine;
}
/**
* Push all packages to central.
*
* @param sourceRoot source root or project root
* @param installToRepo repo the package should be pushed to central or the home repository
* @param noBuild do not build sources before pushing
*/
public static void pushAllPackages(String sourceRoot, String installToRepo, boolean noBuild) {
Path sourceRootPath = LauncherUtils.getSourceRootPath(sourceRoot);
try {
List<String> fileList = Files.list(sourceRootPath)
.filter(path -> Files.isDirectory(path, LinkOption.NOFOLLOW_LINKS))
.map(ProjectDirs::getLastComp)
.filter(dirName -> !isSpecialDirectory(dirName))
.map(Path::toString).collect(Collectors.toList());
if (fileList.size() == 0) {
throw createLauncherException("no packages found to push in " + sourceRootPath.toString());
}
fileList.forEach(path -> pushPackages(path, sourceRoot, installToRepo, noBuild));
} catch (IOException ex) {
throw createLauncherException("error occurred while pushing packages from " + sourceRootPath.toString()
+ " " + ex.getMessage());
}
}
/**
* Checks if the directory is a special directory that is not a package.
*
* @param dirName directory name
* @return if the directory is a special directory or not
*/
private static boolean isSpecialDirectory(Path dirName) {
List<String> ignoreDirs = Arrays.asList(ProjectDirConstants.TARGET_DIR_NAME,
ProjectDirConstants.RESOURCE_DIR_NAME);
String dirNameStr = dirName.toString();
return dirNameStr.startsWith(".") || dirName.toFile().isHidden() || ignoreDirs.contains(dirNameStr);
}
} |
We can use `internalKeyName` in L213, L216, and L219, right? | private static Object getStructData(BMap data, BField[] structFields, int index, BString key) {
if (structFields == null) {
ArrayValue jsonArray = new ArrayValueImpl(new BArrayType(PredefinedTypes.TYPE_JSON));
if (data != null) {
BArray dataArray = data.getArrayValue(key);
for (int i = 0; i < dataArray.size(); i++) {
Object value = dataArray.get(i);
if (value instanceof String) {
jsonArray.append(value);
} else if (value instanceof Boolean) {
jsonArray.append(value);
} else if (value instanceof Long) {
jsonArray.append(value);
} else if (value instanceof Double) {
jsonArray.append(value);
} else if (value instanceof Integer) {
jsonArray.append(value);
} else if (value instanceof Float) {
jsonArray.append(value);
} else if (value instanceof DecimalValue) {
jsonArray.append(((DecimalValue) value).floatValue());
}
}
}
return jsonArray;
} else {
MapValue<BString, Object> jsonData = new MapValueImpl<>(new BMapType(PredefinedTypes.TYPE_JSON));
boolean structError = true;
if (data != null) {
Type internalType = structFields[index].getFieldType();
if (internalType.getTag() == TypeTags.OBJECT_TYPE_TAG
|| internalType.getTag() == TypeTags.RECORD_TYPE_TAG) {
BField[] internalStructFields =
((BStructureType) internalType).getFields().values().toArray(new BField[0]);
for (int i = 0; i < internalStructFields.length; i++) {
BString internalKeyName = StringUtils.fromString(internalStructFields[i].getFieldName());
Object value = data.get(internalKeyName);
if (value instanceof BigDecimal) {
jsonData.put(StringUtils.fromString(internalStructFields[i].getFieldName()),
((BigDecimal) value).doubleValue());
} else if (value instanceof MapValueImpl) {
jsonData.put(StringUtils.fromString(internalStructFields[i].getFieldName()),
getStructData((MapValueImpl) value, internalStructFields, i, internalKeyName));
} else {
jsonData.put(StringUtils.fromString(internalStructFields[i].getFieldName()), value);
}
structError = false;
}
}
}
if (structError) {
throw new BallerinaException("error in constructing the json object from struct type data");
}
return jsonData;
}
} | jsonData.put(StringUtils.fromString(internalStructFields[i].getFieldName()), value); | private static Object getStructData(BMap data, BField[] structFields, int index, BString key) {
if (structFields == null) {
ArrayValue jsonArray = new ArrayValueImpl(new BArrayType(PredefinedTypes.TYPE_JSON));
if (data != null) {
BArray dataArray = data.getArrayValue(key);
for (int i = 0; i < dataArray.size(); i++) {
Object value = dataArray.get(i);
if (value instanceof String) {
jsonArray.append(value);
} else if (value instanceof Boolean) {
jsonArray.append(value);
} else if (value instanceof Long) {
jsonArray.append(value);
} else if (value instanceof Double) {
jsonArray.append(value);
} else if (value instanceof Integer) {
jsonArray.append(value);
} else if (value instanceof Float) {
jsonArray.append(value);
} else if (value instanceof DecimalValue) {
jsonArray.append(((DecimalValue) value).floatValue());
}
}
}
return jsonArray;
} else {
MapValue<BString, Object> jsonData = new MapValueImpl<>(new BMapType(PredefinedTypes.TYPE_JSON));
boolean structError = true;
if (data != null) {
Type internalType = structFields[index].getFieldType();
if (internalType.getTag() == TypeTags.OBJECT_TYPE_TAG
|| internalType.getTag() == TypeTags.RECORD_TYPE_TAG) {
BField[] internalStructFields =
((BStructureType) internalType).getFields().values().toArray(new BField[0]);
for (int i = 0; i < internalStructFields.length; i++) {
BString internalKeyName = StringUtils.fromString(internalStructFields[i].getFieldName());
Object value = data.get(internalKeyName);
if (value instanceof BigDecimal) {
jsonData.put(internalKeyName, ((BigDecimal) value).doubleValue());
} else if (value instanceof MapValueImpl) {
jsonData.put(internalKeyName,
getStructData((MapValueImpl) value, internalStructFields, i, internalKeyName));
} else {
jsonData.put(internalKeyName, value);
}
structError = false;
}
}
}
if (structError) {
throw new BallerinaException("error in constructing the json object from struct type data");
}
return jsonData;
}
} | class DefaultJSONObjectGenerator implements JSONObjectGenerator {
@Override
public Object transform(MapValueImpl record) {
MapValue<BString, Object> objNode = new MapValueImpl<>(new BMapType(PredefinedTypes.TYPE_JSON));
BStructureType structType = (BStructureType) record.getType();
BField[] structFields = null;
if (structType != null) {
structFields = structType.getFields().values().toArray(new BField[0]);
}
Map<String, Field> internalStructFields = structType.getFields();
if (structFields.length > 0) {
Iterator<Map.Entry<String, Field>> itr = internalStructFields.entrySet().iterator();
for (int i = 0; i < internalStructFields.size(); i++) {
Field internalStructField = itr.next().getValue();
int type = internalStructField.getFieldType().getTag();
String fieldName = internalStructField.getFieldName();
constructJsonData(record, objNode, fieldName, type, structFields, i);
}
}
return objNode;
}
} | class DefaultJSONObjectGenerator implements JSONObjectGenerator {
@Override
public Object transform(MapValueImpl record) {
MapValue<BString, Object> objNode = new MapValueImpl<>(new BMapType(PredefinedTypes.TYPE_JSON));
BStructureType structType = (BStructureType) record.getType();
BField[] structFields = null;
if (structType != null) {
structFields = structType.getFields().values().toArray(new BField[0]);
}
Map<String, Field> internalStructFields = structType.getFields();
if (structFields.length > 0) {
Iterator<Map.Entry<String, Field>> itr = internalStructFields.entrySet().iterator();
for (int i = 0; i < internalStructFields.size(); i++) {
Field internalStructField = itr.next().getValue();
int type = internalStructField.getFieldType().getTag();
String fieldName = internalStructField.getFieldName();
constructJsonData(record, objNode, fieldName, type, structFields, i);
}
}
return objNode;
}
} |
can be shortened to `notNumberGauges.forEach(gauges::remove);` | public void report() {
DatadogHttpRequest request = new DatadogHttpRequest();
List<Gauge> notNumberGauges = new ArrayList<>();
for (Map.Entry<Gauge, DGauge> entry : gauges.entrySet()) {
DGauge g = entry.getValue();
try {
g.getMetricValue();
request.addGauge(g);
} catch (Exception e) {
LOGGER.warn("the Gauge {} is not of Number type", g.getMetric());
notNumberGauges.add(entry.getKey());
}
}
if (!notNumberGauges.isEmpty()) {
notNumberGauges.stream().forEach(g -> gauges.remove(g));
}
for (DCounter c : counters.values()) {
request.addCounter(c);
}
for (DMeter m : meters.values()) {
request.addMeter(m);
}
try {
client.send(request);
} catch (SocketTimeoutException e) {
LOGGER.warn("Failed reporting metrics to Datadog because of socket timeout.", e.getMessage());
} catch (Exception e) {
LOGGER.warn("Failed reporting metrics to Datadog.", e);
}
} | notNumberGauges.stream().forEach(g -> gauges.remove(g)); | public void report() {
DatadogHttpRequest request = new DatadogHttpRequest();
List<Gauge> gaugesToRemove = new ArrayList<>();
for (Map.Entry<Gauge, DGauge> entry : gauges.entrySet()) {
DGauge g = entry.getValue();
try {
g.getMetricValue();
request.addGauge(g);
} catch (ClassCastException e) {
LOGGER.info("The metric {} will not be reported because only number types are supported by this reporter.", g.getMetric());
gaugesToRemove.add(entry.getKey());
} catch (Exception e) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("The metric {} will not be reported because it threw an exception.", g.getMetric(), e);
} else {
LOGGER.info("The metric {} will not be reported because it threw an exception.", g.getMetric());
}
gaugesToRemove.add(entry.getKey());
}
}
gaugesToRemove.forEach(gauges::remove);
for (DCounter c : counters.values()) {
request.addCounter(c);
}
for (DMeter m : meters.values()) {
request.addMeter(m);
}
try {
client.send(request);
} catch (SocketTimeoutException e) {
LOGGER.warn("Failed reporting metrics to Datadog because of socket timeout.", e.getMessage());
} catch (Exception e) {
LOGGER.warn("Failed reporting metrics to Datadog.", e);
}
} | class DatadogHttpReporter implements MetricReporter, Scheduled {
private static final Logger LOGGER = LoggerFactory.getLogger(DatadogHttpReporter.class);
private static final String HOST_VARIABLE = "<host>";
private final Map<Gauge, DGauge> gauges = new ConcurrentHashMap<>();
private final Map<Counter, DCounter> counters = new ConcurrentHashMap<>();
private final Map<Meter, DMeter> meters = new ConcurrentHashMap<>();
private DatadogHttpClient client;
private List<String> configTags;
public static final String API_KEY = "apikey";
public static final String TAGS = "tags";
@Override
public void notifyOfAddedMetric(Metric metric, String metricName, MetricGroup group) {
final String name = group.getMetricIdentifier(metricName);
List<String> tags = new ArrayList<>(configTags);
tags.addAll(getTagsFromMetricGroup(group));
String host = getHostFromMetricGroup(group);
if (metric instanceof Counter) {
Counter c = (Counter) metric;
counters.put(c, new DCounter(c, name, host, tags));
} else if (metric instanceof Gauge) {
Gauge g = (Gauge) metric;
gauges.put(g, new DGauge(g, name, host, tags));
} else if (metric instanceof Meter) {
Meter m = (Meter) metric;
meters.put(m, new DMeter(m, name, host, tags));
} else if (metric instanceof Histogram) {
LOGGER.warn("Cannot add {} because Datadog HTTP API doesn't support Histogram", metricName);
} else {
LOGGER.warn("Cannot add unknown metric type {}. This indicates that the reporter " +
"does not support this metric type.", metric.getClass().getName());
}
}
@Override
public void notifyOfRemovedMetric(Metric metric, String metricName, MetricGroup group) {
if (metric instanceof Counter) {
counters.remove(metric);
} else if (metric instanceof Gauge) {
gauges.remove(metric);
} else if (metric instanceof Meter) {
meters.remove(metric);
} else if (metric instanceof Histogram) {
} else {
LOGGER.warn("Cannot remove unknown metric type {}. This indicates that the reporter " +
"does not support this metric type.", metric.getClass().getName());
}
}
@Override
public void open(MetricConfig config) {
client = new DatadogHttpClient(config.getString(API_KEY, null));
LOGGER.info("Configured DatadogHttpReporter");
configTags = getTagsFromConfig(config.getString(TAGS, ""));
}
@Override
public void close() {
client.close();
LOGGER.info("Shut down DatadogHttpReporter");
}
@Override
/**
* Get config tags from config 'metrics.reporter.dghttp.tags'.
*/
private List<String> getTagsFromConfig(String str) {
return Arrays.asList(str.split(","));
}
/**
* Get tags from MetricGroup
*/
private List<String> getTagsFromMetricGroup(MetricGroup metricGroup) {
List<String> tags = new ArrayList<>();
for (Map.Entry<String, String> entry: metricGroup.getAllVariables().entrySet()) {
if (!entry.getKey().equals(HOST_VARIABLE)) {
tags.add(getVariableName(entry.getKey()) + ":" + entry.getValue());
}
}
return tags;
}
private String getHostFromMetricGroup(MetricGroup metricGroup) {
return metricGroup.getAllVariables().get(HOST_VARIABLE);
}
/**
* Removes leading and trailing angle brackets.
*/
private String getVariableName(String str) {
return str.substring(1, str.length() - 1);
}
/**
* Compact metrics in batch, serialize them, and send to Datadog via HTTP.
*/
static class DatadogHttpRequest {
private final DSeries series;
public DatadogHttpRequest() {
series = new DSeries();
}
public void addGauge(DGauge gauge) {
series.addMetric(gauge);
}
public void addCounter(DCounter counter) {
series.addMetric(counter);
}
public void addMeter(DMeter meter) {
series.addMetric(meter);
}
public DSeries getSeries() {
return series;
}
}
} | class DatadogHttpReporter implements MetricReporter, Scheduled {
private static final Logger LOGGER = LoggerFactory.getLogger(DatadogHttpReporter.class);
private static final String HOST_VARIABLE = "<host>";
private final Map<Gauge, DGauge> gauges = new ConcurrentHashMap<>();
private final Map<Counter, DCounter> counters = new ConcurrentHashMap<>();
private final Map<Meter, DMeter> meters = new ConcurrentHashMap<>();
private DatadogHttpClient client;
private List<String> configTags;
public static final String API_KEY = "apikey";
public static final String TAGS = "tags";
@Override
public void notifyOfAddedMetric(Metric metric, String metricName, MetricGroup group) {
final String name = group.getMetricIdentifier(metricName);
List<String> tags = new ArrayList<>(configTags);
tags.addAll(getTagsFromMetricGroup(group));
String host = getHostFromMetricGroup(group);
if (metric instanceof Counter) {
Counter c = (Counter) metric;
counters.put(c, new DCounter(c, name, host, tags));
} else if (metric instanceof Gauge) {
Gauge g = (Gauge) metric;
gauges.put(g, new DGauge(g, name, host, tags));
} else if (metric instanceof Meter) {
Meter m = (Meter) metric;
meters.put(m, new DMeter(m, name, host, tags));
} else if (metric instanceof Histogram) {
LOGGER.warn("Cannot add {} because Datadog HTTP API doesn't support Histogram", metricName);
} else {
LOGGER.warn("Cannot add unknown metric type {}. This indicates that the reporter " +
"does not support this metric type.", metric.getClass().getName());
}
}
@Override
public void notifyOfRemovedMetric(Metric metric, String metricName, MetricGroup group) {
if (metric instanceof Counter) {
counters.remove(metric);
} else if (metric instanceof Gauge) {
gauges.remove(metric);
} else if (metric instanceof Meter) {
meters.remove(metric);
} else if (metric instanceof Histogram) {
} else {
LOGGER.warn("Cannot remove unknown metric type {}. This indicates that the reporter " +
"does not support this metric type.", metric.getClass().getName());
}
}
@Override
public void open(MetricConfig config) {
client = new DatadogHttpClient(config.getString(API_KEY, null));
LOGGER.info("Configured DatadogHttpReporter");
configTags = getTagsFromConfig(config.getString(TAGS, ""));
}
@Override
public void close() {
client.close();
LOGGER.info("Shut down DatadogHttpReporter");
}
@Override
/**
* Get config tags from config 'metrics.reporter.dghttp.tags'.
*/
private List<String> getTagsFromConfig(String str) {
return Arrays.asList(str.split(","));
}
/**
* Get tags from MetricGroup
*/
private List<String> getTagsFromMetricGroup(MetricGroup metricGroup) {
List<String> tags = new ArrayList<>();
for (Map.Entry<String, String> entry: metricGroup.getAllVariables().entrySet()) {
if (!entry.getKey().equals(HOST_VARIABLE)) {
tags.add(getVariableName(entry.getKey()) + ":" + entry.getValue());
}
}
return tags;
}
private String getHostFromMetricGroup(MetricGroup metricGroup) {
return metricGroup.getAllVariables().get(HOST_VARIABLE);
}
/**
* Removes leading and trailing angle brackets.
*/
private String getVariableName(String str) {
return str.substring(1, str.length() - 1);
}
/**
* Compact metrics in batch, serialize them, and send to Datadog via HTTP.
*/
static class DatadogHttpRequest {
private final DSeries series;
public DatadogHttpRequest() {
series = new DSeries();
}
public void addGauge(DGauge gauge) {
series.addMetric(gauge);
}
public void addCounter(DCounter counter) {
series.addMetric(counter);
}
public void addMeter(DMeter meter) {
series.addMetric(meter);
}
public DSeries getSeries() {
return series;
}
}
} |
Your idea is good and easy to implement. I've created the PR#22 (https://github.com/dataArtisans/flink-benchmarks/pull/22) for benchmarks. | public boolean processInput() throws Exception {
if (!initialized) {
initialize();
}
int readingInputIndex = inputSelection.fairSelectNextIndexOutOf2(availableInputsMask, lastReadInputIndex);
if (readingInputIndex == -1) {
return waitForAvailableInput(inputSelection);
}
lastReadInputIndex = readingInputIndex;
if (availableInputsMask < 3 && inputSelection.isALLMaskOf2()) {
checkAndSetAvailable(1 - readingInputIndex);
}
StreamElement recordOrMark;
if (readingInputIndex == 0) {
recordOrMark = input1.pollNextNullable();
if (recordOrMark != null) {
processElement1(recordOrMark, input1.getLastChannel());
}
} else {
recordOrMark = input2.pollNextNullable();
if (recordOrMark != null) {
processElement2(recordOrMark, input2.getLastChannel());
}
}
if (recordOrMark == null && setUnavailableAndCheckFinished(readingInputIndex)) {
return false;
}
return true;
} | checkAndSetAvailable(1 - readingInputIndex); | public boolean processInput() throws Exception {
if (!isPrepared) {
prepareForProcessing();
}
int readingInputIndex = selectNextReadingInputIndex();
if (readingInputIndex == -1) {
return false;
}
lastReadInputIndex = readingInputIndex;
StreamElement recordOrMark;
if (readingInputIndex == 0) {
recordOrMark = input1.pollNextNullable();
if (recordOrMark != null) {
processElement1(recordOrMark, input1.getLastChannel());
}
} else {
recordOrMark = input2.pollNextNullable();
if (recordOrMark != null) {
processElement2(recordOrMark, input2.getLastChannel());
}
}
if (recordOrMark == null) {
setUnavailableInput(readingInputIndex);
}
return !checkFinished();
} | class StreamTwoInputSelectableProcessor<IN1, IN2> {
private static final Logger LOG = LoggerFactory.getLogger(StreamTwoInputSelectableProcessor.class);
private static final CompletableFuture<?> UNAVAILABLE = new CompletableFuture<>();
private final TwoInputStreamOperator<IN1, IN2, ?> streamOperator;
private final InputSelectable inputSelector;
private final Object lock;
private final StreamTaskInput input1;
private final StreamTaskInput input2;
/**
* Valves that control how watermarks and stream statuses from the 2 inputs are forwarded.
*/
private final StatusWatermarkValve statusWatermarkValve1;
private final StatusWatermarkValve statusWatermarkValve2;
/**
* Stream status for the two inputs. We need to keep track for determining when
* to forward stream status changes downstream.
*/
private StreamStatus firstStatus;
private StreamStatus secondStatus;
private int availableInputsMask;
private int lastReadInputIndex;
private InputSelection inputSelection;
private Counter numRecordsIn;
private boolean initialized;
public StreamTwoInputSelectableProcessor(
Collection<InputGate> inputGates1,
Collection<InputGate> inputGates2,
TypeSerializer<IN1> inputSerializer1,
TypeSerializer<IN2> inputSerializer2,
Object lock,
IOManager ioManager,
StreamStatusMaintainer streamStatusMaintainer,
TwoInputStreamOperator<IN1, IN2, ?> streamOperator,
WatermarkGauge input1WatermarkGauge,
WatermarkGauge input2WatermarkGauge) {
checkState(streamOperator instanceof InputSelectable);
this.streamOperator = checkNotNull(streamOperator);
this.inputSelector = (InputSelectable) streamOperator;
this.lock = checkNotNull(lock);
InputGate unionedInputGate1 = InputGateUtil.createInputGate(inputGates1.toArray(new InputGate[0]));
InputGate unionedInputGate2 = InputGateUtil.createInputGate(inputGates2.toArray(new InputGate[0]));
this.input1 = new StreamTaskNetworkInput(new BarrierDiscarder(unionedInputGate1), inputSerializer1, ioManager, 0);
this.input2 = new StreamTaskNetworkInput(new BarrierDiscarder(unionedInputGate2), inputSerializer2, ioManager, 1);
this.statusWatermarkValve1 = new StatusWatermarkValve(
unionedInputGate1.getNumberOfInputChannels(),
new ForwardingValveOutputHandler(streamOperator, lock, streamStatusMaintainer, input1WatermarkGauge, 0));
this.statusWatermarkValve2 = new StatusWatermarkValve(
unionedInputGate2.getNumberOfInputChannels(),
new ForwardingValveOutputHandler(streamOperator, lock, streamStatusMaintainer, input2WatermarkGauge, 1));
this.firstStatus = StreamStatus.ACTIVE;
this.secondStatus = StreamStatus.ACTIVE;
this.availableInputsMask = (int) new InputSelection.Builder().select(1).select(2).build().getInputMask();
this.lastReadInputIndex = 1;
this.initialized = false;
}
public void cleanup() throws Exception {
Exception ex = null;
try {
input1.close();
} catch (Exception e) {
ex = ExceptionUtils.firstOrSuppressed(e, ex);
}
try {
input2.close();
} catch (Exception e) {
ex = ExceptionUtils.firstOrSuppressed(e, ex);
}
if (ex != null) {
throw ex;
}
}
private void processElement1(StreamElement recordOrMark, int channel) throws Exception {
if (recordOrMark.isRecord()) {
StreamRecord<IN1> record = recordOrMark.asRecord();
synchronized (lock) {
numRecordsIn.inc();
streamOperator.setKeyContextElement1(record);
streamOperator.processElement1(record);
inputSelection = inputSelector.nextSelection();
}
}
else if (recordOrMark.isWatermark()) {
statusWatermarkValve1.inputWatermark(recordOrMark.asWatermark(), channel);
} else if (recordOrMark.isStreamStatus()) {
statusWatermarkValve1.inputStreamStatus(recordOrMark.asStreamStatus(), channel);
} else if (recordOrMark.isLatencyMarker()) {
synchronized (lock) {
streamOperator.processLatencyMarker1(recordOrMark.asLatencyMarker());
}
} else {
throw new UnsupportedOperationException("Unknown type of StreamElement on input1");
}
}
private void processElement2(StreamElement recordOrMark, int channel) throws Exception {
if (recordOrMark.isRecord()) {
StreamRecord<IN2> record = recordOrMark.asRecord();
synchronized (lock) {
numRecordsIn.inc();
streamOperator.setKeyContextElement2(record);
streamOperator.processElement2(record);
inputSelection = inputSelector.nextSelection();
}
}
else if (recordOrMark.isWatermark()) {
statusWatermarkValve2.inputWatermark(recordOrMark.asWatermark(), channel);
} else if (recordOrMark.isStreamStatus()) {
statusWatermarkValve2.inputStreamStatus(recordOrMark.asStreamStatus(), channel);
} else if (recordOrMark.isLatencyMarker()) {
synchronized (lock) {
streamOperator.processLatencyMarker2(recordOrMark.asLatencyMarker());
}
} else {
throw new UnsupportedOperationException("Unknown type of StreamElement on input2");
}
}
private void initialize() {
inputSelection = inputSelector.nextSelection();
try {
numRecordsIn = ((OperatorMetricGroup) streamOperator
.getMetricGroup()).getIOMetricGroup().getNumRecordsInCounter();
} catch (Exception e) {
LOG.warn("An exception occurred during the metrics setup.", e);
numRecordsIn = new SimpleCounter();
}
initialized = true;
}
private void checkAndSetAvailable(int inputIndex) {
StreamTaskInput input = getInput(inputIndex);
if (!input.isFinished() && input.isAvailable().isDone()) {
setAvailableInput(inputIndex);
}
}
private boolean waitForAvailableInput(InputSelection inputSelection)
throws ExecutionException, InterruptedException, IOException {
if (inputSelection.isALLMaskOf2()) {
CompletableFuture<?> future1 = input1.isFinished() ? UNAVAILABLE : input1.isAvailable();
CompletableFuture<?> future2 = input2.isFinished() ? UNAVAILABLE : input2.isAvailable();
if (future1 == UNAVAILABLE && future2 == UNAVAILABLE) {
return false;
}
CompletableFuture.anyOf(future1, future2).get();
if (future1.isDone()) {
setAvailableInput(input1.getInputIndex());
}
if (future2.isDone()) {
setAvailableInput(input2.getInputIndex());
}
} else {
StreamTaskInput input =
(inputSelection.getInputMask() == InputSelection.FIRST.getInputMask()) ? input1 : input2;
if (input.isFinished()) {
throw new IOException("Could not read the finished input: input" + (input.getInputIndex() + 1) + ".");
}
input.isAvailable().get();
setAvailableInput(input.getInputIndex());
}
return true;
}
private boolean setUnavailableAndCheckFinished(int inputIndex) {
setUnavailableInput(inputIndex);
if (getInput(inputIndex).isFinished()) {
return finishInput(inputIndex);
}
return false;
}
private void setAvailableInput(int inputIndex) {
availableInputsMask |= 1 << inputIndex;
}
private void setUnavailableInput(int inputIndex) {
availableInputsMask &= ~(1 << inputIndex);
}
private boolean finishInput(int inputIndex) {
if (inputIndex == 0) {
inputSelection = InputSelection.SECOND;
return input2.isFinished();
} else {
inputSelection = InputSelection.FIRST;
return input1.isFinished();
}
}
private StreamTaskInput getInput(int inputIndex) {
return inputIndex == 0 ? input1 : input2;
}
private class ForwardingValveOutputHandler implements StatusWatermarkValve.ValveOutputHandler {
private final TwoInputStreamOperator<IN1, IN2, ?> operator;
private final Object lock;
private final StreamStatusMaintainer streamStatusMaintainer;
private final WatermarkGauge inputWatermarkGauge;
private final int inputIndex;
private ForwardingValveOutputHandler(
TwoInputStreamOperator<IN1, IN2, ?> operator,
Object lock,
StreamStatusMaintainer streamStatusMaintainer,
WatermarkGauge inputWatermarkGauge,
int inputIndex) {
this.operator = checkNotNull(operator);
this.lock = checkNotNull(lock);
this.streamStatusMaintainer = checkNotNull(streamStatusMaintainer);
this.inputWatermarkGauge = inputWatermarkGauge;
this.inputIndex = inputIndex;
}
@Override
public void handleWatermark(Watermark watermark) {
try {
synchronized (lock) {
inputWatermarkGauge.setCurrentWatermark(watermark.getTimestamp());
if (inputIndex == 0) {
operator.processWatermark1(watermark);
} else {
operator.processWatermark2(watermark);
}
}
} catch (Exception e) {
throw new RuntimeException("Exception occurred while processing valve output watermark of input"
+ (inputIndex + 1) + ": ", e);
}
}
@Override
public void handleStreamStatus(StreamStatus streamStatus) {
try {
synchronized (lock) {
final StreamStatus anotherStreamStatus;
if (inputIndex == 0) {
firstStatus = streamStatus;
anotherStreamStatus = secondStatus;
} else {
secondStatus = streamStatus;
anotherStreamStatus = firstStatus;
}
if (!streamStatus.equals(streamStatusMaintainer.getStreamStatus())) {
if (streamStatus.isActive()) {
streamStatusMaintainer.toggleStreamStatus(StreamStatus.ACTIVE);
} else if (anotherStreamStatus.isIdle()) {
streamStatusMaintainer.toggleStreamStatus(StreamStatus.IDLE);
}
}
}
} catch (Exception e) {
throw new RuntimeException("Exception occurred while processing valve output stream status of input"
+ (inputIndex + 1) + ": ", e);
}
}
}
} | class StreamTwoInputSelectableProcessor<IN1, IN2> {
private static final Logger LOG = LoggerFactory.getLogger(StreamTwoInputSelectableProcessor.class);
private static final CompletableFuture<?> UNAVAILABLE = new CompletableFuture<>();
private final TwoInputStreamOperator<IN1, IN2, ?> streamOperator;
private final InputSelectable inputSelector;
private final Object lock;
private final StreamTaskInput input1;
private final StreamTaskInput input2;
/**
* Valves that control how watermarks and stream statuses from the 2 inputs are forwarded.
*/
private final StatusWatermarkValve statusWatermarkValve1;
private final StatusWatermarkValve statusWatermarkValve2;
/**
* Stream status for the two inputs. We need to keep track for determining when
* to forward stream status changes downstream.
*/
private StreamStatus firstStatus;
private StreamStatus secondStatus;
private int availableInputsMask;
private int lastReadInputIndex;
private InputSelection inputSelection;
private Counter numRecordsIn;
private boolean isPrepared;
public StreamTwoInputSelectableProcessor(
Collection<InputGate> inputGates1,
Collection<InputGate> inputGates2,
TypeSerializer<IN1> inputSerializer1,
TypeSerializer<IN2> inputSerializer2,
Object lock,
IOManager ioManager,
StreamStatusMaintainer streamStatusMaintainer,
TwoInputStreamOperator<IN1, IN2, ?> streamOperator,
WatermarkGauge input1WatermarkGauge,
WatermarkGauge input2WatermarkGauge) {
checkState(streamOperator instanceof InputSelectable);
this.streamOperator = checkNotNull(streamOperator);
this.inputSelector = (InputSelectable) streamOperator;
this.lock = checkNotNull(lock);
InputGate unionedInputGate1 = InputGateUtil.createInputGate(inputGates1.toArray(new InputGate[0]));
InputGate unionedInputGate2 = InputGateUtil.createInputGate(inputGates2.toArray(new InputGate[0]));
this.input1 = new StreamTaskNetworkInput(new BarrierDiscarder(unionedInputGate1), inputSerializer1, ioManager, 0);
this.input2 = new StreamTaskNetworkInput(new BarrierDiscarder(unionedInputGate2), inputSerializer2, ioManager, 1);
this.statusWatermarkValve1 = new StatusWatermarkValve(
unionedInputGate1.getNumberOfInputChannels(),
new ForwardingValveOutputHandler(streamOperator, lock, streamStatusMaintainer, input1WatermarkGauge, 0));
this.statusWatermarkValve2 = new StatusWatermarkValve(
unionedInputGate2.getNumberOfInputChannels(),
new ForwardingValveOutputHandler(streamOperator, lock, streamStatusMaintainer, input2WatermarkGauge, 1));
this.firstStatus = StreamStatus.ACTIVE;
this.secondStatus = StreamStatus.ACTIVE;
this.availableInputsMask = (int) new InputSelection.Builder().select(1).select(2).build().getInputMask();
this.lastReadInputIndex = 1;
this.isPrepared = false;
}
public void cleanup() throws Exception {
Exception ex = null;
try {
input1.close();
} catch (Exception e) {
ex = ExceptionUtils.firstOrSuppressed(e, ex);
}
try {
input2.close();
} catch (Exception e) {
ex = ExceptionUtils.firstOrSuppressed(e, ex);
}
if (ex != null) {
throw ex;
}
}
private int selectNextReadingInputIndex()
throws InterruptedException, ExecutionException, IOException {
int readingInputIndex;
while ((readingInputIndex = inputSelection.fairSelectNextIndexOutOf2(availableInputsMask, lastReadInputIndex)) == -1) {
if (!waitForAvailableInput(inputSelection)) {
return -1;
}
}
if (availableInputsMask < 3 && inputSelection.isALLMaskOf2()) {
checkAndSetAvailable(1 - readingInputIndex);
}
return readingInputIndex;
}
private void processElement1(StreamElement recordOrMark, int channel) throws Exception {
if (recordOrMark.isRecord()) {
StreamRecord<IN1> record = recordOrMark.asRecord();
synchronized (lock) {
numRecordsIn.inc();
streamOperator.setKeyContextElement1(record);
streamOperator.processElement1(record);
inputSelection = inputSelector.nextSelection();
}
}
else if (recordOrMark.isWatermark()) {
statusWatermarkValve1.inputWatermark(recordOrMark.asWatermark(), channel);
} else if (recordOrMark.isStreamStatus()) {
statusWatermarkValve1.inputStreamStatus(recordOrMark.asStreamStatus(), channel);
} else if (recordOrMark.isLatencyMarker()) {
synchronized (lock) {
streamOperator.processLatencyMarker1(recordOrMark.asLatencyMarker());
}
} else {
throw new UnsupportedOperationException("Unknown type of StreamElement on input1");
}
}
private void processElement2(StreamElement recordOrMark, int channel) throws Exception {
if (recordOrMark.isRecord()) {
StreamRecord<IN2> record = recordOrMark.asRecord();
synchronized (lock) {
numRecordsIn.inc();
streamOperator.setKeyContextElement2(record);
streamOperator.processElement2(record);
inputSelection = inputSelector.nextSelection();
}
}
else if (recordOrMark.isWatermark()) {
statusWatermarkValve2.inputWatermark(recordOrMark.asWatermark(), channel);
} else if (recordOrMark.isStreamStatus()) {
statusWatermarkValve2.inputStreamStatus(recordOrMark.asStreamStatus(), channel);
} else if (recordOrMark.isLatencyMarker()) {
synchronized (lock) {
streamOperator.processLatencyMarker2(recordOrMark.asLatencyMarker());
}
} else {
throw new UnsupportedOperationException("Unknown type of StreamElement on input2");
}
}
private void prepareForProcessing() {
inputSelection = inputSelector.nextSelection();
try {
numRecordsIn = ((OperatorMetricGroup) streamOperator
.getMetricGroup()).getIOMetricGroup().getNumRecordsInCounter();
} catch (Exception e) {
LOG.warn("An exception occurred during the metrics setup.", e);
numRecordsIn = new SimpleCounter();
}
isPrepared = true;
}
private void checkAndSetAvailable(int inputIndex) {
StreamTaskInput input = getInput(inputIndex);
if (!input.isFinished() && input.isAvailable().isDone()) {
setAvailableInput(inputIndex);
}
}
/**
* @return false if both of the inputs are finished, true otherwise.
*/
private boolean waitForAvailableInput(InputSelection inputSelection)
throws ExecutionException, InterruptedException, IOException {
if (inputSelection.isALLMaskOf2()) {
return waitForAvailableEitherInput();
} else {
waitForOneInput(
(inputSelection.getInputMask() == InputSelection.FIRST.getInputMask()) ? input1 : input2);
return true;
}
}
private boolean waitForAvailableEitherInput()
throws ExecutionException, InterruptedException {
CompletableFuture<?> future1 = input1.isFinished() ? UNAVAILABLE : input1.isAvailable();
CompletableFuture<?> future2 = input2.isFinished() ? UNAVAILABLE : input2.isAvailable();
if (future1 == UNAVAILABLE && future2 == UNAVAILABLE) {
return false;
}
CompletableFuture.anyOf(future1, future2).get();
if (future1.isDone()) {
setAvailableInput(input1.getInputIndex());
}
if (future2.isDone()) {
setAvailableInput(input2.getInputIndex());
}
return true;
}
private void waitForOneInput(StreamTaskInput input)
throws IOException, ExecutionException, InterruptedException {
if (input.isFinished()) {
throw new IOException("Could not read the finished input: input" + (input.getInputIndex() + 1) + ".");
}
input.isAvailable().get();
setAvailableInput(input.getInputIndex());
}
private boolean checkFinished() {
if (getInput(lastReadInputIndex).isFinished()) {
inputSelection = (lastReadInputIndex == 0) ? InputSelection.SECOND : InputSelection.FIRST;
}
return input1.isFinished() && input2.isFinished();
}
private void setAvailableInput(int inputIndex) {
availableInputsMask |= 1 << inputIndex;
}
private void setUnavailableInput(int inputIndex) {
availableInputsMask &= ~(1 << inputIndex);
}
private StreamTaskInput getInput(int inputIndex) {
return inputIndex == 0 ? input1 : input2;
}
private class ForwardingValveOutputHandler implements StatusWatermarkValve.ValveOutputHandler {
private final TwoInputStreamOperator<IN1, IN2, ?> operator;
private final Object lock;
private final StreamStatusMaintainer streamStatusMaintainer;
private final WatermarkGauge inputWatermarkGauge;
private final int inputIndex;
private ForwardingValveOutputHandler(
TwoInputStreamOperator<IN1, IN2, ?> operator,
Object lock,
StreamStatusMaintainer streamStatusMaintainer,
WatermarkGauge inputWatermarkGauge,
int inputIndex) {
this.operator = checkNotNull(operator);
this.lock = checkNotNull(lock);
this.streamStatusMaintainer = checkNotNull(streamStatusMaintainer);
this.inputWatermarkGauge = inputWatermarkGauge;
this.inputIndex = inputIndex;
}
@Override
public void handleWatermark(Watermark watermark) {
try {
synchronized (lock) {
inputWatermarkGauge.setCurrentWatermark(watermark.getTimestamp());
if (inputIndex == 0) {
operator.processWatermark1(watermark);
} else {
operator.processWatermark2(watermark);
}
}
} catch (Exception e) {
throw new RuntimeException("Exception occurred while processing valve output watermark of input"
+ (inputIndex + 1) + ": ", e);
}
}
@Override
public void handleStreamStatus(StreamStatus streamStatus) {
try {
synchronized (lock) {
final StreamStatus anotherStreamStatus;
if (inputIndex == 0) {
firstStatus = streamStatus;
anotherStreamStatus = secondStatus;
} else {
secondStatus = streamStatus;
anotherStreamStatus = firstStatus;
}
if (!streamStatus.equals(streamStatusMaintainer.getStreamStatus())) {
if (streamStatus.isActive()) {
streamStatusMaintainer.toggleStreamStatus(StreamStatus.ACTIVE);
} else if (anotherStreamStatus.isIdle()) {
streamStatusMaintainer.toggleStreamStatus(StreamStatus.IDLE);
}
}
}
} catch (Exception e) {
throw new RuntimeException("Exception occurred while processing valve output stream status of input"
+ (inputIndex + 1) + ": ", e);
}
}
}
} |
Actually, why isn't this in the format string? | public String getMessage() {
String baseMessage = super.getMessage();
if (this.errorCodeValue == null) {
return super.getMessage();
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "ErrorCodeValue",
errorCodeValue);
}
if (this.target == null) {
return baseMessage;
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "target", target);
}
return baseMessage;
} | baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "ErrorCodeValue", | public String getMessage() {
StringBuilder baseMessage = new StringBuilder().append(super.getMessage()).append(" ").append(ERROR_CODE)
.append(": {").append(errorCodeValue).append("}");
if (this.target == null) {
return baseMessage.toString();
} else {
return baseMessage.append(", ").append(TARGET).append(": {").append(target).append("}").toString();
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the String value of TextAnalyticsErrorCode for this exception.
*
* @return The String value of TextAnalyticsErrorCode for this exception.
*/
public String getErrorCodeValue() {
return errorCodeValue;
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private static final String ERROR_CODE = "ErrorCodeValue";
private static final String TARGET = "target";
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
*
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the TextAnalyticsErrorCode for this exception.
*
* @return The TextAnalyticsErrorCode for this exception.
*/
public TextAnalyticsErrorCode getErrorCodeValue() {
return TextAnalyticsErrorCode.fromString(errorCodeValue);
}
} |
The most risky bug in this code is: A potential resource leak due to the improper handling of `client` when exceptions other than `NoSuchMethodException` are thrown. You can modify the code like this: ```java public Table getTable(String dbName, String tableName) { try (Timer ignored = Tracers.watchScope(EXTERNAL, "HMS.getTable")) { RecyclableClient client = null; StarRocksConnectorException connectionException = null; Object[] args = {dbName, tableName}; String messageIfError = String.format("Failed to get table [%s.%s]", dbName, tableName); try { client = getClient(); Class<?>[] argClasses = ClassUtils.getCompatibleParamClasses(args); Method method = client.hiveClient.getClass().getDeclaredMethod("getTable", argClasses); return (Table) method.invoke(client.hiveClient, args); } catch (NoSuchMethodException e) { // In HMS, throw NoSuchObjectException if db or table does not exist LOG.warn("{} table not find", String.format("[%s.%s]", dbName, tableName)); return null; } catch (Throwable e) { LOG.error(messageIfError, e); if (client != null) { client.close(); } connectionException = new StarRocksConnectorException(messageIfError + ", msg: " + e.getMessage(), e); throw connectionException; } finally { if (connectionException == null && client != null) { client.finish(); } } } } ``` Explanation: - The main change happens within the catch block for `Throwable`. We ensure that `client` is closed if it's not null before throwing a new exception to avoid a potential resource leak. - Next, within the `finally` block, we have a condition that now only calls `client.finish()` if there was no exception (`connectionException == null`) and `client` is not null. If there is an exception, we expect the catch block to close the client instead. - The `close()` method should be called on the `client` object whenever we are done with it, no matter if an exception has been thrown or not, to make sure resources are properly released. The original issue was that `client` could potentially never be closed if any exception occurred because the `close()` call was conditioned to a null check for `client` and a non-null check for `connectionException`, which is a logical mistake because `connectionException` will always be null at that point within the `finally` block unless an exception has been thrown and caught above. | public Table getTable(String dbName, String tableName) {
try (Timer ignored = Tracers.watchScope(EXTERNAL, "HMS.getTable")) {
RecyclableClient client = null;
StarRocksConnectorException connectionException = null;
Object[] args = {dbName, tableName};
String messageIfError = String.format("Failed to get table [%s.%s]", dbName, tableName);
try {
client = getClient();
Class<?>[] argClasses = ClassUtils.getCompatibleParamClasses(args);
Method method = client.hiveClient.getClass().getDeclaredMethod("getTable", argClasses);
return (Table) method.invoke(client.hiveClient, args);
} catch (NoSuchMethodException e) {
LOG.warn("{} table not find", String.format("[%s.%s]", dbName, tableName));
return null;
} catch (Throwable e) {
LOG.error(messageIfError, e);
connectionException = new StarRocksConnectorException(messageIfError + ", msg: " + e.getMessage(), e);
throw connectionException;
} finally {
if (client == null && connectionException != null) {
LOG.error("Failed to get hive client. {}", connectionException.getMessage());
} else if (connectionException != null) {
LOG.error("An exception occurred when using the current long link " +
"to access metastore. msg: {}", messageIfError);
client.close();
} else if (client != null) {
client.finish();
}
}
}
} | public Table getTable(String dbName, String tableName) {
try (Timer ignored = Tracers.watchScope(EXTERNAL, "HMS.getTable")) {
return callRPC("getTable", String.format("Failed to get table [%s.%s]", dbName, tableName),
dbName, tableName);
}
} | class relies on opening
if (Thread.currentThread().getContextClassLoader() == null) {
Thread.currentThread().setContextClassLoader(ClassLoader.getSystemClassLoader());
} | class relies on opening
if (Thread.currentThread().getContextClassLoader() == null) {
Thread.currentThread().setContextClassLoader(ClassLoader.getSystemClassLoader());
} | |
i think make this comment as a method function should be better | public void analyze(Analyzer analyzer) throws UserException {
Analyzer dummyRootAnalyzer = new Analyzer(analyzer.getCatalog(), analyzer.getContext());
QueryStmt tmpStmt = queryStmt.clone();
tmpStmt.analyze(dummyRootAnalyzer);
this.queryStmt = tmpStmt;
ArrayList<Expr> resultExprs = getQueryStmt().getResultExprs();
for (Expr expr : resultExprs) {
if (expr.getType().isDecimalV2()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_UNSUPPORTED_TYPE_IN_CTAS, expr.getType());
}
}
if (columnNames != null && columnNames.size() != resultExprs.size()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_COL_NUMBER_NOT_MATCH);
}
} | public void analyze(Analyzer analyzer) throws UserException {
Analyzer dummyRootAnalyzer = new Analyzer(analyzer.getCatalog(), analyzer.getContext());
QueryStmt tmpStmt = queryStmt.clone();
tmpStmt.analyze(dummyRootAnalyzer);
this.queryStmt = tmpStmt;
ArrayList<Expr> resultExprs = getQueryStmt().getResultExprs();
for (Expr expr : resultExprs) {
if (expr.getType().isDecimalV2()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_UNSUPPORTED_TYPE_IN_CTAS, expr.getType());
}
}
if (columnNames != null && columnNames.size() != resultExprs.size()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_COL_NUMBER_NOT_MATCH);
}
} | class CreateTableAsSelectStmt extends DdlStmt {
@Getter
private final CreateTableStmt createTableStmt;
@Getter
private final List<String> columnNames;
@Getter
private QueryStmt queryStmt;
@Getter
private final InsertStmt insertStmt;
public CreateTableAsSelectStmt(CreateTableStmt createTableStmt,
List<String> columnNames, QueryStmt queryStmt) {
this.createTableStmt = createTableStmt;
this.columnNames = columnNames;
this.queryStmt = queryStmt;
this.insertStmt = new InsertStmt(createTableStmt.getDbTbl(), queryStmt.clone());
}
@Override
} | class CreateTableAsSelectStmt extends DdlStmt {
@Getter
private final CreateTableStmt createTableStmt;
@Getter
private final List<String> columnNames;
@Getter
private QueryStmt queryStmt;
@Getter
private final InsertStmt insertStmt;
protected CreateTableAsSelectStmt(CreateTableStmt createTableStmt,
List<String> columnNames, QueryStmt queryStmt) {
this.createTableStmt = createTableStmt;
this.columnNames = columnNames;
this.queryStmt = queryStmt;
this.insertStmt = new InsertStmt(createTableStmt.getDbTbl(), queryStmt.clone());
}
/**
* Cannot analyze insertStmt because the table has not been created yet.
*/
@Override
} | |
Since the recommended line length is 120, this line can be merged with the line above. | public void visit(BLangRecordLiteral recordLiteral) {
List<BLangRecordLiteral.BLangRecordKeyValue> keyValuePairs = recordLiteral.keyValuePairs;
keyValuePairs.forEach(kv -> {
analyzeExpr(kv.valueExpr);
});
Set<Object> names = new TreeSet<>((l, r) -> l.equals(r) ? 0 : 1);
for (BLangRecordLiteral.BLangRecordKeyValue recFieldDecl : keyValuePairs) {
BLangExpression key = recFieldDecl.getKey();
if (key.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef keyRef = (BLangSimpleVarRef) key;
if (names.contains(keyRef.variableName.value)) {
String assigneeType = recordLiteral.parent.type.getKind().typeName();
this.dlog.error(key.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL,
assigneeType, keyRef);
}
names.add(keyRef.variableName.value);
} else if (key.getKind() == NodeKind.LITERAL) {
BLangLiteral keyLiteral = (BLangLiteral) key;
if (names.contains(keyLiteral.value)) {
String assigneeType = recordLiteral.parent.type.getKind().typeName();
this.dlog.error(key.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL,
assigneeType, keyLiteral);
}
}
}
} | assigneeType, keyRef); | public void visit(BLangRecordLiteral recordLiteral) {
List<BLangRecordLiteral.BLangRecordKeyValue> keyValuePairs = recordLiteral.keyValuePairs;
keyValuePairs.forEach(kv -> {
analyzeExpr(kv.valueExpr);
});
Set<Object> names = new TreeSet<>((l, r) -> l.equals(r) ? 0 : 1);
for (BLangRecordLiteral.BLangRecordKeyValue recFieldDecl : keyValuePairs) {
BLangExpression key = recFieldDecl.getKey();
if (key.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef keyRef = (BLangSimpleVarRef) key;
if (names.contains(keyRef.variableName.value)) {
String assigneeType = recordLiteral.parent.type.getKind().typeName();
this.dlog.error(key.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL, assigneeType, keyRef);
}
names.add(keyRef.variableName.value);
} else if (key.getKind() == NodeKind.LITERAL) {
BLangLiteral keyLiteral = (BLangLiteral) key;
if (names.contains(keyLiteral.value)) {
String assigneeType = recordLiteral.parent.type.getKind().typeName();
this.dlog.error(key.pos, DiagnosticCode.DUPLICATE_KEY_IN_RECORD_LITERAL, assigneeType, keyLiteral);
}
names.add(keyLiteral.value);
}
}
} | class CodeAnalyzer extends BLangNodeVisitor {
private static final CompilerContext.Key<CodeAnalyzer> CODE_ANALYZER_KEY =
new CompilerContext.Key<>();
private int loopCount;
private int transactionCount;
private boolean statementReturns;
private boolean lastStatement;
private boolean withinRetryBlock;
private int forkJoinCount;
private int workerCount;
private SymbolTable symTable;
private Types types;
private BLangDiagnosticLog dlog;
private TypeChecker typeChecker;
private Stack<WorkerActionSystem> workerActionSystemStack = new Stack<>();
private Stack<Boolean> loopWithintransactionCheckStack = new Stack<>();
private Stack<Boolean> returnWithintransactionCheckStack = new Stack<>();
private Stack<Boolean> doneWithintransactionCheckStack = new Stack<>();
private Stack<Boolean> transactionWithinHandlerCheckStack = new Stack<>();
private BLangNode parent;
private Names names;
private SymbolEnv env;
public static CodeAnalyzer getInstance(CompilerContext context) {
CodeAnalyzer codeGenerator = context.get(CODE_ANALYZER_KEY);
if (codeGenerator == null) {
codeGenerator = new CodeAnalyzer(context);
}
return codeGenerator;
}
public CodeAnalyzer(CompilerContext context) {
context.put(CODE_ANALYZER_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeChecker = TypeChecker.getInstance(context);
this.names = Names.getInstance(context);
}
private void resetFunction() {
this.resetStatementReturns();
}
private void resetStatementReturns() {
this.statementReturns = false;
}
private void resetLastStatement() {
this.lastStatement = false;
}
public BLangPackage analyze(BLangPackage pkgNode) {
pkgNode.accept(this);
return pkgNode;
}
@Override
public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.CODE_ANALYZE)) {
return;
}
parent = pkgNode;
SymbolEnv pkgEnv = symTable.pkgEnvMap.get(pkgNode.symbol);
pkgNode.topLevelNodes.forEach(topLevelNode -> analyzeNode((BLangNode) topLevelNode, pkgEnv));
pkgNode.completedPhases.add(CompilerPhase.CODE_ANALYZE);
parent = null;
}
private void analyzeNode(BLangNode node, SymbolEnv env) {
SymbolEnv prevEnv = this.env;
this.env = env;
BLangNode myParent = parent;
node.parent = parent;
parent = node;
node.accept(this);
parent = myParent;
this.env = prevEnv;
}
@Override
public void visit(BLangCompilationUnit compUnitNode) {
compUnitNode.topLevelNodes.forEach(e -> analyzeNode((BLangNode) e, env));
}
public void visit(BLangTypeDefinition typeDefinition) {
if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE
|| typeDefinition.typeNode.getKind() == NodeKind.RECORD_TYPE) {
analyzeNode(typeDefinition.typeNode, env);
}
if (!Symbols.isPublic(typeDefinition.symbol) ||
typeDefinition.symbol.type != null && TypeKind.FINITE.equals(typeDefinition.symbol.type.getKind())) {
return;
}
analyseType(typeDefinition.symbol.type, typeDefinition.pos);
}
@Override
public void visit(BLangFunction funcNode) {
if (funcNode.symbol.isTransactionHandler) {
transactionWithinHandlerCheckStack.push(true);
}
this.returnWithintransactionCheckStack.push(true);
this.doneWithintransactionCheckStack.push(true);
this.validateMainFunction(funcNode);
SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
this.visitInvocable(funcNode, funcEnv);
this.returnWithintransactionCheckStack.pop();
this.doneWithintransactionCheckStack.pop();
if (funcNode.symbol.isTransactionHandler) {
transactionWithinHandlerCheckStack.pop();
}
}
private void visitInvocable(BLangInvokableNode invNode, SymbolEnv invokableEnv) {
this.resetFunction();
try {
this.initNewWorkerActionSystem();
if (Symbols.isNative(invNode.symbol)) {
return;
}
boolean invokableReturns = invNode.returnTypeNode.type != symTable.nilType;
if (invNode.workers.isEmpty()) {
if (isPublicInvokableNode(invNode)) {
analyzeNode(invNode.returnTypeNode, invokableEnv);
}
/* the body can be null in the case of Object type function declarations */
if (invNode.body != null) {
analyzeNode(invNode.body, invokableEnv);
/* the function returns, but none of the statements surely returns */
if (invokableReturns && !this.statementReturns) {
this.dlog.error(invNode.pos, DiagnosticCode.INVOKABLE_MUST_RETURN,
invNode.getKind().toString().toLowerCase());
}
}
} else {
boolean workerReturns = false;
for (BLangWorker worker : invNode.workers) {
analyzeNode(worker, invokableEnv);
workerReturns = workerReturns || this.statementReturns;
this.resetStatementReturns();
}
if (invokableReturns && !workerReturns) {
this.dlog.error(invNode.pos, DiagnosticCode.ATLEAST_ONE_WORKER_MUST_RETURN,
invNode.getKind().toString().toLowerCase());
}
}
} finally {
this.finalizeCurrentWorkerActionSystem();
}
}
private boolean isPublicInvokableNode(BLangInvokableNode invNode) {
return Symbols.isPublic(invNode.symbol) && (SymbolKind.PACKAGE.equals(invNode.symbol.owner.getKind()) ||
Symbols.isPublic(invNode.symbol.owner));
}
@Override
public void visit(BLangForkJoin forkJoin) {
this.forkJoinCount++;
this.initNewWorkerActionSystem();
this.checkStatementExecutionValidity(forkJoin);
forkJoin.workers.forEach(e -> analyzeNode(e, env));
analyzeNode(forkJoin.joinedBody, env);
if (forkJoin.timeoutBody != null) {
boolean joinReturns = this.statementReturns;
this.resetStatementReturns();
analyzeNode(forkJoin.timeoutBody, env);
this.statementReturns = joinReturns && this.statementReturns;
}
this.checkForkJoinWorkerCount(forkJoin);
this.finalizeCurrentWorkerActionSystem();
this.forkJoinCount--;
analyzeExpr(forkJoin.timeoutExpression);
}
private boolean inForkJoin() {
return this.forkJoinCount > 0;
}
private void checkForkJoinWorkerCount(BLangForkJoin forkJoin) {
if (forkJoin.joinType == ForkJoinNode.JoinType.SOME) {
int wc = forkJoin.joinedWorkers.size();
if (wc == 0) {
wc = forkJoin.workers.size();
}
if (forkJoin.joinedWorkerCount > wc) {
this.dlog.error(forkJoin.pos, DiagnosticCode.FORK_JOIN_INVALID_WORKER_COUNT);
}
}
}
private boolean inWorker() {
return this.workerCount > 0;
}
@Override
public void visit(BLangWorker worker) {
this.workerCount++;
this.workerActionSystemStack.peek().startWorkerActionStateMachine(worker.name.value, worker.pos);
analyzeNode(worker.body, env);
this.workerActionSystemStack.peek().endWorkerActionStateMachine();
this.workerCount--;
}
@Override
public void visit(BLangEndpoint endpointNode) {
}
@Override
public void visit(BLangTransaction transactionNode) {
this.checkStatementExecutionValidity(transactionNode);
if (!isValidTransactionBlock()) {
this.dlog.error(transactionNode.pos, DiagnosticCode.TRANSACTION_CANNOT_BE_USED_WITHIN_HANDLER);
return;
}
this.loopWithintransactionCheckStack.push(false);
this.returnWithintransactionCheckStack.push(false);
this.doneWithintransactionCheckStack.push(false);
this.transactionCount++;
analyzeNode(transactionNode.transactionBody, env);
this.transactionCount--;
this.resetLastStatement();
if (transactionNode.onRetryBody != null) {
this.withinRetryBlock = true;
analyzeNode(transactionNode.onRetryBody, env);
this.resetStatementReturns();
this.resetLastStatement();
this.withinRetryBlock = false;
}
this.returnWithintransactionCheckStack.pop();
this.loopWithintransactionCheckStack.pop();
this.doneWithintransactionCheckStack.pop();
analyzeExpr(transactionNode.retryCount);
analyzeExpr(transactionNode.onCommitFunction);
analyzeExpr(transactionNode.onAbortFunction);
}
@Override
public void visit(BLangAbort abortNode) {
if (this.transactionCount == 0) {
this.dlog.error(abortNode.pos, DiagnosticCode.ABORT_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK);
return;
}
this.lastStatement = true;
}
@Override
public void visit(BLangDone doneNode) {
if (checkReturnValidityInTransaction()) {
this.dlog.error(doneNode.pos, DiagnosticCode.DONE_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
this.lastStatement = true;
}
@Override
public void visit(BLangRetry retryNode) {
if (this.transactionCount == 0) {
this.dlog.error(retryNode.pos, DiagnosticCode.FAIL_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK);
return;
}
this.lastStatement = true;
}
private void checkUnreachableCode(BLangStatement stmt) {
if (this.statementReturns) {
this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE);
this.resetStatementReturns();
}
if (lastStatement) {
this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE);
this.resetLastStatement();
}
}
private void checkStatementExecutionValidity(BLangStatement stmt) {
this.checkUnreachableCode(stmt);
}
@Override
public void visit(BLangBlockStmt blockNode) {
final SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env);
blockNode.stmts.forEach(e -> analyzeNode(e, blockEnv));
this.resetLastStatement();
}
@Override
public void visit(BLangReturn returnStmt) {
this.checkStatementExecutionValidity(returnStmt);
if (this.env.enclInvokable.getKind() == NodeKind.RESOURCE) {
this.dlog.error(returnStmt.pos, DiagnosticCode.RETURN_STMT_NOT_VALID_IN_RESOURCE);
return;
}
if (this.inForkJoin() && this.inWorker()) {
this.dlog.error(returnStmt.pos, DiagnosticCode.FORK_JOIN_WORKER_CANNOT_RETURN);
return;
}
if (checkReturnValidityInTransaction()) {
this.dlog.error(returnStmt.pos, DiagnosticCode.RETURN_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
this.statementReturns = true;
analyzeExpr(returnStmt.expr);
}
@Override
public void visit(BLangIf ifStmt) {
this.checkStatementExecutionValidity(ifStmt);
analyzeNode(ifStmt.body, env);
boolean ifStmtReturns = this.statementReturns;
this.resetStatementReturns();
if (ifStmt.elseStmt != null) {
analyzeNode(ifStmt.elseStmt, env);
this.statementReturns = ifStmtReturns && this.statementReturns;
}
analyzeExpr(ifStmt.expr);
}
@Override
public void visit(BLangMatch matchStmt) {
this.returnWithintransactionCheckStack.push(true);
boolean unmatchedExprTypesAvailable = false;
analyzeExpr(matchStmt.expr);
List<BType> unmatchedExprTypes = new ArrayList<>();
for (BType exprType : matchStmt.exprTypes) {
boolean assignable = false;
for (BLangMatchStmtPatternClause pattern : matchStmt.patternClauses) {
BType patternType = pattern.variable.type;
if (exprType.tag == TypeTags.ERROR || patternType.tag == TypeTags.ERROR) {
return;
}
assignable = this.types.isAssignable(exprType, patternType);
if (assignable) {
pattern.matchedTypesDirect.add(exprType);
break;
} else if (exprType.tag == TypeTags.ANY) {
pattern.matchedTypesIndirect.add(exprType);
} else if (exprType.tag == TypeTags.JSON &&
this.types.isAssignable(patternType, exprType)) {
pattern.matchedTypesIndirect.add(exprType);
} else if ((exprType.tag == TypeTags.OBJECT || exprType.tag == TypeTags.RECORD)
&& this.types.isAssignable(patternType, exprType)) {
pattern.matchedTypesIndirect.add(exprType);
} else if (exprType.tag == TypeTags.BYTE && patternType.tag == TypeTags.INT) {
pattern.matchedTypesDirect.add(exprType);
break;
} else {
}
}
if (!assignable) {
unmatchedExprTypes.add(exprType);
}
}
if (!unmatchedExprTypes.isEmpty()) {
unmatchedExprTypesAvailable = true;
dlog.error(matchStmt.pos, DiagnosticCode.MATCH_STMT_CANNOT_GUARANTEE_A_MATCHING_PATTERN,
unmatchedExprTypes);
}
boolean matchedPatternsAvailable = false;
for (int i = matchStmt.patternClauses.size() - 1; i >= 0; i--) {
BLangMatchStmtPatternClause pattern = matchStmt.patternClauses.get(i);
if (pattern.matchedTypesDirect.isEmpty() && pattern.matchedTypesIndirect.isEmpty()) {
if (matchedPatternsAvailable) {
dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNMATCHED_PATTERN);
} else {
dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN);
}
} else {
matchedPatternsAvailable = true;
}
}
if (!unmatchedExprTypesAvailable) {
this.checkStatementExecutionValidity(matchStmt);
boolean matchStmtReturns = true;
for (BLangMatchStmtPatternClause patternClause : matchStmt.patternClauses) {
analyzeNode(patternClause.body, env);
matchStmtReturns = matchStmtReturns && this.statementReturns;
this.resetStatementReturns();
}
this.statementReturns = matchStmtReturns;
}
this.returnWithintransactionCheckStack.pop();
}
@Override
public void visit(BLangForeach foreach) {
this.loopWithintransactionCheckStack.push(true);
this.checkStatementExecutionValidity(foreach);
this.loopCount++;
foreach.body.stmts.forEach(e -> analyzeNode(e, env));
this.loopCount--;
this.resetLastStatement();
this.loopWithintransactionCheckStack.pop();
analyzeExpr(foreach.collection);
analyzeExprs(foreach.varRefs);
}
@Override
public void visit(BLangWhile whileNode) {
this.loopWithintransactionCheckStack.push(true);
this.checkStatementExecutionValidity(whileNode);
this.loopCount++;
whileNode.body.stmts.forEach(e -> analyzeNode(e, env));
this.loopCount--;
this.resetLastStatement();
this.loopWithintransactionCheckStack.pop();
analyzeExpr(whileNode.expr);
}
@Override
public void visit(BLangLock lockNode) {
this.checkStatementExecutionValidity(lockNode);
lockNode.body.stmts.forEach(e -> analyzeNode(e, env));
}
@Override
public void visit(BLangContinue continueNode) {
this.checkStatementExecutionValidity(continueNode);
if (this.loopCount == 0) {
this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_OUTSIDE_LOOP);
return;
}
if (checkNextBreakValidityInTransaction()) {
this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
this.lastStatement = true;
}
public void visit(BLangImportPackage importPkgNode) {
BPackageSymbol pkgSymbol = importPkgNode.symbol;
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol);
if (pkgEnv == null) {
return;
}
analyzeNode(pkgEnv.node, env);
}
public void visit(BLangXMLNS xmlnsNode) {
/* ignore */
}
public void visit(BLangService serviceNode) {
SymbolEnv serviceEnv = SymbolEnv.createServiceEnv(serviceNode, serviceNode.symbol.scope, env);
serviceNode.resources.forEach(res -> analyzeNode(res, serviceEnv));
}
public void visit(BLangResource resourceNode) {
SymbolEnv resourceEnv = SymbolEnv.createResourceActionSymbolEnv(resourceNode,
resourceNode.symbol.scope, env);
this.visitInvocable(resourceNode, resourceEnv);
}
public void visit(BLangForever foreverStatement) {
this.lastStatement = true;
}
public void visit(BLangAction actionNode) {
/* not used, covered with functions */
}
public void visit(BLangObjectTypeNode objectTypeNode) {
if (objectTypeNode.isFieldAnalyseRequired && Symbols.isPublic(objectTypeNode.symbol)) {
objectTypeNode.fields.stream()
.filter(field -> (Symbols.isPublic(field.symbol)))
.forEach(field -> analyzeNode(field, this.env));
}
objectTypeNode.functions.forEach(e -> this.analyzeNode(e, this.env));
}
private void analyseType(BType type, DiagnosticPos pos) {
if (type == null || type.tsymbol == null) {
return;
}
BSymbol symbol = type.tsymbol;
if (Symbols.isPrivate(symbol)) {
dlog.error(pos, DiagnosticCode.ATTEMPT_EXPOSE_NON_PUBLIC_SYMBOL, symbol.name);
}
}
public void visit(BLangRecordTypeNode recordTypeNode) {
if (recordTypeNode.isFieldAnalyseRequired && Symbols.isPublic(recordTypeNode.symbol)) {
recordTypeNode.fields.stream()
.filter(field -> (Symbols.isPublic(field.symbol)))
.forEach(field -> analyzeNode(field, this.env));
}
}
public void visit(BLangVariable varNode) {
analyzeExpr(varNode.expr);
if (Objects.isNull(varNode.symbol) || !Symbols.isPublic(varNode.symbol)) {
return;
}
int ownerSymTag = this.env.scope.owner.tag;
if (((ownerSymTag & SymTag.INVOKABLE) != SymTag.INVOKABLE) || (varNode.type != null &&
varNode.parent != null && NodeKind.FUNCTION.equals(varNode.parent.getKind()))) {
analyseType(varNode.type, varNode.pos);
}
}
public void visit(BLangIdentifier identifierNode) {
/* ignore */
}
public void visit(BLangAnnotation annotationNode) {
/* ignore */
}
public void visit(BLangAnnotationAttachment annAttachmentNode) {
/* ignore */
}
public void visit(BLangVariableDef varDefNode) {
this.checkStatementExecutionValidity(varDefNode);
analyzeNode(varDefNode.var, env);
}
public void visit(BLangCompoundAssignment compoundAssignment) {
this.checkStatementExecutionValidity(compoundAssignment);
analyzeExpr(compoundAssignment.varRef);
analyzeExpr(compoundAssignment.expr);
}
public void visit(BLangPostIncrement postIncrement) {
this.checkStatementExecutionValidity(postIncrement);
analyzeExpr(postIncrement.varRef);
analyzeExpr(postIncrement.increment);
}
public void visit(BLangAssignment assignNode) {
this.checkStatementExecutionValidity(assignNode);
analyzeExpr(assignNode.varRef);
analyzeExpr(assignNode.expr);
}
@Override
public void visit(BLangTupleDestructure stmt) {
this.checkStatementExecutionValidity(stmt);
analyzeExprs(stmt.varRefs);
analyzeExpr(stmt.expr);
}
public void visit(BLangBreak breakNode) {
this.checkStatementExecutionValidity(breakNode);
if (this.loopCount == 0) {
this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_OUTSIDE_LOOP);
return;
}
if (checkNextBreakValidityInTransaction()) {
this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
this.lastStatement = true;
}
public void visit(BLangThrow throwNode) {
this.checkStatementExecutionValidity(throwNode);
this.statementReturns = true;
analyzeExpr(throwNode.expr);
}
public void visit(BLangXMLNSStatement xmlnsStmtNode) {
this.checkStatementExecutionValidity(xmlnsStmtNode);
}
public void visit(BLangExpressionStmt exprStmtNode) {
this.checkStatementExecutionValidity(exprStmtNode);
analyzeExpr(exprStmtNode.expr);
validateExprStatementExpression(exprStmtNode);
}
private void validateExprStatementExpression(BLangExpressionStmt exprStmtNode) {
BLangExpression expr = exprStmtNode.expr;
while (expr.getKind() == NodeKind.MATCH_EXPRESSION || expr.getKind() == NodeKind.CHECK_EXPR) {
if (expr.getKind() == NodeKind.MATCH_EXPRESSION) {
expr = ((BLangMatchExpression) expr).expr;
} else if (expr.getKind() == NodeKind.CHECK_EXPR) {
expr = ((BLangCheckedExpr) expr).expr;
}
}
if (expr.getKind() == NodeKind.INVOCATION || expr.getKind() == NodeKind.AWAIT_EXPR) {
return;
}
if (expr.type == symTable.nilType) {
dlog.error(exprStmtNode.pos, DiagnosticCode.INVALID_EXPR_STATEMENT);
}
}
public void visit(BLangTryCatchFinally tryNode) {
this.checkStatementExecutionValidity(tryNode);
analyzeNode(tryNode.tryBody, env);
boolean tryCatchReturns = this.statementReturns;
this.resetStatementReturns();
List<BType> caughtTypes = new ArrayList<>();
for (BLangCatch bLangCatch : tryNode.getCatchBlocks()) {
if (caughtTypes.contains(bLangCatch.getParameter().type)) {
dlog.error(bLangCatch.getParameter().pos, DiagnosticCode.DUPLICATED_ERROR_CATCH,
bLangCatch.getParameter().type);
}
caughtTypes.add(bLangCatch.getParameter().type);
analyzeNode(bLangCatch.body, env);
tryCatchReturns = tryCatchReturns && this.statementReturns;
this.resetStatementReturns();
}
if (tryNode.finallyBody != null) {
analyzeNode(tryNode.finallyBody, env);
this.statementReturns = tryCatchReturns || this.statementReturns;
} else {
this.statementReturns = tryCatchReturns;
}
}
public void visit(BLangCatch catchNode) {
/* ignore */
}
public void visit(BLangWorkerSend workerSendNode) {
this.checkStatementExecutionValidity(workerSendNode);
if (workerSendNode.isChannel) {
analyzeExpr(workerSendNode.expr);
if (workerSendNode.keyExpr != null) {
analyzeExpr(workerSendNode.keyExpr);
}
return;
}
if (!this.inWorker()) {
return;
}
this.workerActionSystemStack.peek().addWorkerAction(workerSendNode);
analyzeExpr(workerSendNode.expr);
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
this.checkStatementExecutionValidity(workerReceiveNode);
if (workerReceiveNode.isChannel) {
analyzeExpr(workerReceiveNode.expr);
if (workerReceiveNode.keyExpr != null) {
analyzeExpr(workerReceiveNode.keyExpr);
}
return;
}
if (!this.inWorker()) {
return;
}
this.workerActionSystemStack.peek().addWorkerAction(workerReceiveNode);
analyzeExpr(workerReceiveNode.expr);
}
public void visit(BLangLiteral literalExpr) {
/* ignore */
}
public void visit(BLangArrayLiteral arrayLiteral) {
analyzeExprs(arrayLiteral.exprs);
}
public void visit(BLangTableLiteral tableLiteral) {
/* ignore */
}
public void visit(BLangSimpleVarRef varRefExpr) {
/* ignore */
}
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
/* ignore */
}
public void visit(BLangIndexBasedAccess indexAccessExpr) {
analyzeExpr(indexAccessExpr.indexExpr);
analyzeExpr(indexAccessExpr.expr);
if (indexAccessExpr.indexExpr.type == null || indexAccessExpr.indexExpr.type.tag == TypeTags.ERROR) {
return;
}
if (indexAccessExpr.expr.type.tag == TypeTags.ARRAY
&& indexAccessExpr.indexExpr.getKind() == NodeKind.LITERAL) {
BArrayType bArrayType = (BArrayType) indexAccessExpr.expr.type;
BLangLiteral indexExpr = (BLangLiteral) indexAccessExpr.indexExpr;
Long indexVal = (Long) indexExpr.getValue();
if (bArrayType.state == BArrayState.CLOSED_SEALED && (bArrayType.size <= indexVal)) {
dlog.error(indexExpr.pos, DiagnosticCode.ARRAY_INDEX_OUT_OF_RANGE, indexVal, bArrayType.size);
}
}
}
public void visit(BLangInvocation invocationExpr) {
analyzeExpr(invocationExpr.expr);
analyzeExprs(invocationExpr.requiredArgs);
analyzeExprs(invocationExpr.namedArgs);
analyzeExprs(invocationExpr.restArgs);
checkDuplicateNamedArgs(invocationExpr.namedArgs);
if ((invocationExpr.symbol != null) && invocationExpr.symbol.kind == SymbolKind.FUNCTION) {
BSymbol funcSymbol = invocationExpr.symbol;
if (Symbols.isFlagOn(funcSymbol.flags, Flags.DEPRECATED)) {
dlog.warning(invocationExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_FUNCTION,
names.fromIdNode(invocationExpr.name));
}
}
if (invocationExpr.actionInvocation) {
validateActionInvocation(invocationExpr.pos, invocationExpr);
}
}
private void validateActionInvocation(DiagnosticPos pos, BLangNode bLangNode) {
BLangNode parent = bLangNode.parent;
while (parent != null) {
final NodeKind kind = parent.getKind();
if (kind == NodeKind.ASSIGNMENT || kind == NodeKind.EXPRESSION_STATEMENT
|| kind == NodeKind.TUPLE_DESTRUCTURE || kind == NodeKind.VARIABLE) {
return;
} else if (kind == NodeKind.CHECK_EXPR || kind == NodeKind.MATCH_EXPRESSION) {
parent = parent.parent;
continue;
} else if (kind == NodeKind.ELVIS_EXPR
&& ((BLangElvisExpr) parent).lhsExpr.getKind() == NodeKind.INVOCATION
&& ((BLangInvocation) ((BLangElvisExpr) parent).lhsExpr).actionInvocation) {
parent = parent.parent;
continue;
}
break;
}
dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR);
}
public void visit(BLangTypeInit cIExpr) {
analyzeExprs(cIExpr.argsExpr);
analyzeExpr(cIExpr.objectInitInvocation);
}
public void visit(BLangTernaryExpr ternaryExpr) {
analyzeExpr(ternaryExpr.expr);
analyzeExpr(ternaryExpr.thenExpr);
analyzeExpr(ternaryExpr.elseExpr);
}
public void visit(BLangAwaitExpr awaitExpr) {
analyzeExpr(awaitExpr.expr);
}
public void visit(BLangBinaryExpr binaryExpr) {
analyzeExpr(binaryExpr.lhsExpr);
analyzeExpr(binaryExpr.rhsExpr);
}
public void visit(BLangElvisExpr elvisExpr) {
analyzeExpr(elvisExpr.lhsExpr);
analyzeExpr(elvisExpr.rhsExpr);
}
@Override
public void visit(BLangBracedOrTupleExpr bracedOrTupleExpr) {
analyzeExprs(bracedOrTupleExpr.expressions);
}
public void visit(BLangUnaryExpr unaryExpr) {
analyzeExpr(unaryExpr.expr);
}
public void visit(BLangTypedescExpr accessExpr) {
/* ignore */
}
public void visit(BLangTypeConversionExpr conversionExpr) {
analyzeExpr(conversionExpr.expr);
}
public void visit(BLangXMLQName xmlQName) {
/* ignore */
}
public void visit(BLangXMLAttribute xmlAttribute) {
analyzeExpr(xmlAttribute.name);
analyzeExpr(xmlAttribute.value);
}
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
analyzeExpr(xmlElementLiteral.startTagName);
analyzeExpr(xmlElementLiteral.endTagName);
analyzeExprs(xmlElementLiteral.attributes);
analyzeExprs(xmlElementLiteral.children);
}
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
analyzeExprs(xmlTextLiteral.textFragments);
}
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
analyzeExprs(xmlCommentLiteral.textFragments);
}
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
analyzeExprs(xmlProcInsLiteral.dataFragments);
analyzeExpr(xmlProcInsLiteral.target);
}
public void visit(BLangXMLQuotedString xmlQuotedString) {
analyzeExprs(xmlQuotedString.textFragments);
}
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
analyzeExprs(stringTemplateLiteral.exprs);
}
public void visit(BLangLambdaFunction bLangLambdaFunction) {
/* ignore */
}
public void visit(BLangArrowFunction bLangArrowFunction) {
/* ignore */
}
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
analyzeExpr(xmlAttributeAccessExpr.expr);
analyzeExpr(xmlAttributeAccessExpr.indexExpr);
}
public void visit(BLangIntRangeExpression intRangeExpression) {
analyzeExpr(intRangeExpression.startExpr);
analyzeExpr(intRangeExpression.endExpr);
}
public void visit(BLangValueType valueType) {
/* ignore */
}
public void visit(BLangArrayType arrayType) {
/* ignore */
}
public void visit(BLangBuiltInRefTypeNode builtInRefType) {
/* ignore */
}
public void visit(BLangConstrainedType constrainedType) {
/* ignore */
}
public void visit(BLangUserDefinedType userDefinedType) {
analyseType(userDefinedType.type, userDefinedType.pos);
}
public void visit(BLangTupleTypeNode tupleTypeNode) {
tupleTypeNode.memberTypeNodes.forEach(memberType -> analyzeNode(memberType, env));
}
public void visit(BLangUnionTypeNode unionTypeNode) {
unionTypeNode.memberTypeNodes.forEach(memberType -> analyzeNode(memberType, env));
}
public void visit(BLangFunctionTypeNode functionTypeNode) {
analyseType(functionTypeNode.type, functionTypeNode.pos);
}
@Override
public void visit(BLangTableQueryExpression tableQueryExpression) {
/* ignore */
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
/* ignore */
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
/* ignore */
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
analyzeExpr(bLangMatchExpression.expr);
List<BType> exprTypes;
if (bLangMatchExpression.expr.type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) bLangMatchExpression.expr.type;
exprTypes = new ArrayList<>(unionType.memberTypes);
} else {
exprTypes = Lists.of(bLangMatchExpression.expr.type);
}
List<BType> unmatchedExprTypes = new ArrayList<>();
for (BType exprType : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
BType patternType = pattern.variable.type;
if (exprType.tag == TypeTags.ERROR || patternType.tag == TypeTags.ERROR) {
return;
}
assignable = this.types.isAssignable(exprType, patternType);
if (assignable) {
pattern.matchedTypesDirect.add(exprType);
break;
} else if (exprType.tag == TypeTags.ANY) {
pattern.matchedTypesIndirect.add(exprType);
} else if (exprType.tag == TypeTags.JSON && this.types.isAssignable(patternType, exprType)) {
pattern.matchedTypesIndirect.add(exprType);
} else if ((exprType.tag == TypeTags.OBJECT || exprType.tag == TypeTags.RECORD)
&& this.types.isAssignable(patternType, exprType)) {
pattern.matchedTypesIndirect.add(exprType);
} else if (exprType.tag == TypeTags.BYTE && patternType.tag == TypeTags.INT) {
pattern.matchedTypesDirect.add(exprType);
break;
} else {
}
}
if (!assignable && !this.types.isAssignable(exprType, bLangMatchExpression.type)) {
unmatchedExprTypes.add(exprType);
}
}
if (!unmatchedExprTypes.isEmpty()) {
dlog.error(bLangMatchExpression.pos, DiagnosticCode.MATCH_STMT_CANNOT_GUARANTEE_A_MATCHING_PATTERN,
unmatchedExprTypes);
}
boolean matchedPatternsAvailable = false;
for (int i = bLangMatchExpression.patternClauses.size() - 1; i >= 0; i--) {
BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i);
if (pattern.matchedTypesDirect.isEmpty() && pattern.matchedTypesIndirect.isEmpty()) {
if (matchedPatternsAvailable) {
dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNMATCHED_PATTERN);
} else {
dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN);
}
} else {
matchedPatternsAvailable = true;
}
}
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
}
private <E extends BLangExpression> void analyzeExpr(E node) {
if (node == null) {
return;
}
BLangNode myParent = parent;
node.parent = parent;
parent = node;
node.accept(this);
parent = myParent;
checkAccess(node);
}
@Override
public void visit(BLangScope scopeNode) {
this.checkStatementExecutionValidity(scopeNode);
scopeNode.getScopeBody().accept(this);
this.resetLastStatement();
visit(scopeNode.compensationFunction);
}
@Override
public void visit(BLangCompensate compensateNode) {
this.checkStatementExecutionValidity(compensateNode);
}
/**
* This method checks for private symbols being accessed or used outside of package and|or private symbols being
* used in public fields of objects/records and will fail those occurrences.
*
* @param node expression node to analyse
*/
private <E extends BLangExpression> void checkAccess(E node) {
if (node.type != null) {
checkAccessSymbol(node.type.tsymbol, node.pos);
}
if (node instanceof BLangInvocation) {
BLangInvocation bLangInvocation = (BLangInvocation) node;
checkAccessSymbol(bLangInvocation.symbol, bLangInvocation.pos);
}
}
private void checkAccessSymbol(BSymbol symbol, DiagnosticPos position) {
if (symbol == null) {
return;
}
if (env.enclPkg.symbol.pkgID != symbol.pkgID && Symbols.isPrivate(symbol)) {
dlog.error(position, DiagnosticCode.ATTEMPT_REFER_NON_ACCESSIBLE_SYMBOL, symbol.name);
}
}
private <E extends BLangExpression> void analyzeExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.get(i).accept(this);
}
}
private void initNewWorkerActionSystem() {
this.workerActionSystemStack.push(new WorkerActionSystem());
}
private void finalizeCurrentWorkerActionSystem() {
WorkerActionSystem was = this.workerActionSystemStack.pop();
this.validateWorkerInteractions(was);
}
private static boolean isWorkerSend(BLangStatement action) {
return action.getKind() == NodeKind.WORKER_SEND;
}
private static boolean isWorkerForkSend(BLangStatement action) {
return ((BLangWorkerSend) action).isForkJoinSend;
}
private String extractWorkerId(BLangStatement action) {
if (isWorkerSend(action)) {
return ((BLangWorkerSend) action).workerIdentifier.value;
} else {
return ((BLangWorkerReceive) action).workerIdentifier.value;
}
}
private void validateWorkerInteractions(WorkerActionSystem workerActionSystem) {
this.validateForkJoinSendsToFork(workerActionSystem);
BLangStatement currentAction;
WorkerActionStateMachine currentSM;
String currentWorkerId;
boolean systemRunning;
do {
systemRunning = false;
for (Map.Entry<String, WorkerActionStateMachine> entry : workerActionSystem.entrySet()) {
currentWorkerId = entry.getKey();
currentSM = entry.getValue();
if (currentSM.done()) {
continue;
}
currentAction = currentSM.currentAction();
if (isWorkerSend(currentAction)) {
if (isWorkerForkSend(currentAction)) {
currentSM.next();
systemRunning = true;
} else {
WorkerActionStateMachine otherSM = workerActionSystem.get(this.extractWorkerId(currentAction));
if (otherSM.currentIsReceive(currentWorkerId)) {
this.validateWorkerActionParameters((BLangWorkerSend) currentAction,
(BLangWorkerReceive) otherSM.currentAction());
otherSM.next();
currentSM.next();
systemRunning = true;
}
}
}
}
} while (systemRunning);
if (!workerActionSystem.everyoneDone()) {
this.reportInvalidWorkerInteractionDiagnostics(workerActionSystem);
}
}
private void validateForkJoinSendsToFork(WorkerActionSystem workerActionSystem) {
for (Map.Entry<String, WorkerActionStateMachine> entry : workerActionSystem.entrySet()) {
this.validateForkJoinSendsToFork(entry.getValue());
}
}
private void validateForkJoinSendsToFork(WorkerActionStateMachine sm) {
boolean sentToFork = false;
for (BLangStatement action : sm.actions) {
if (isWorkerSend(action) && isWorkerForkSend(action)) {
if (sentToFork) {
this.dlog.error(action.pos, DiagnosticCode.INVALID_MULTIPLE_FORK_JOIN_SEND);
} else {
sentToFork = true;
}
}
}
}
private void reportInvalidWorkerInteractionDiagnostics(WorkerActionSystem workerActionSystem) {
this.dlog.error(workerActionSystem.getRootPosition(), DiagnosticCode.INVALID_WORKER_INTERACTION,
workerActionSystem.toString());
}
private void validateWorkerActionParameters(BLangWorkerSend send, BLangWorkerReceive receive) {
this.typeChecker.checkExpr(send.expr, send.env, receive.expr.type);
}
private boolean checkNextBreakValidityInTransaction() {
return !this.loopWithintransactionCheckStack.peek() && transactionCount > 0;
}
private boolean checkReturnValidityInTransaction() {
return (this.returnWithintransactionCheckStack.empty() || !this.returnWithintransactionCheckStack.peek())
&& transactionCount > 0;
}
private boolean isValidTransactionBlock() {
return (this.transactionWithinHandlerCheckStack.empty() || !this.transactionWithinHandlerCheckStack.peek()) &&
!this.withinRetryBlock;
}
private void validateMainFunction(BLangFunction funcNode) {
if (!MAIN_FUNCTION_NAME.equals(funcNode.name.value)) {
return;
}
if (!Symbols.isPublic(funcNode.symbol)) {
this.dlog.error(funcNode.pos, DiagnosticCode.MAIN_SHOULD_BE_PUBLIC);
}
if (!(funcNode.symbol.retType.tag == TypeTags.NIL || funcNode.symbol.retType.tag == TypeTags.INT)) {
this.dlog.error(funcNode.returnTypeNode.pos, DiagnosticCode.INVALID_RETURN_WITH_MAIN,
funcNode.symbol.retType);
}
}
private void checkDuplicateNamedArgs(List<BLangExpression> args) {
List<BLangIdentifier> existingArgs = new ArrayList<>();
args.forEach(arg -> {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg;
if (existingArgs.contains(namedArg.name)) {
dlog.error(namedArg.pos, DiagnosticCode.DUPLICATE_NAMED_ARGS, namedArg.name);
}
existingArgs.add(namedArg.name);
});
}
/**
* This class contains the state machines for a set of workers.
*/
private static class WorkerActionSystem {
public Map<String, WorkerActionStateMachine> workerActionStateMachines = new LinkedHashMap<>();
private WorkerActionStateMachine currentSM;
private String currentWorkerId;
public void startWorkerActionStateMachine(String workerId, DiagnosticPos pos) {
this.currentWorkerId = workerId;
this.currentSM = new WorkerActionStateMachine(pos);
}
public void endWorkerActionStateMachine() {
this.workerActionStateMachines.put(this.currentWorkerId, this.currentSM);
}
public void addWorkerAction(BLangStatement action) {
this.currentSM.actions.add(action);
}
public WorkerActionStateMachine get(String workerId) {
return this.workerActionStateMachines.get(workerId);
}
public Set<Map.Entry<String, WorkerActionStateMachine>> entrySet() {
return this.workerActionStateMachines.entrySet();
}
public boolean everyoneDone() {
return this.workerActionStateMachines.values().stream().allMatch(WorkerActionStateMachine::done);
}
public DiagnosticPos getRootPosition() {
return this.workerActionStateMachines.values().iterator().next().pos;
}
@Override
public String toString() {
return this.workerActionStateMachines.toString();
}
}
/**
* This class represents a state machine to maintain the state of the send/receive
* actions of a worker.
*/
private static class WorkerActionStateMachine {
private static final String WORKER_SM_FINISHED = "FINISHED";
public int currentState;
public List<BLangStatement> actions = new ArrayList<>();
public DiagnosticPos pos;
public WorkerActionStateMachine(DiagnosticPos pos) {
this.pos = pos;
}
public boolean done() {
return this.actions.size() == this.currentState;
}
public BLangStatement currentAction() {
return this.actions.get(this.currentState);
}
public boolean currentIsReceive(String sourceWorkerId) {
if (this.done()) {
return false;
}
BLangStatement action = this.currentAction();
return !isWorkerSend(action) && ((BLangWorkerReceive) action).
workerIdentifier.value.equals(sourceWorkerId);
}
public void next() {
this.currentState++;
}
@Override
public String toString() {
if (this.done()) {
return WORKER_SM_FINISHED;
} else {
BLangStatement action = this.currentAction();
if (isWorkerSend(action)) {
return ((BLangWorkerSend) action).toActionString();
} else {
return ((BLangWorkerReceive) action).toActionString();
}
}
}
}
} | class CodeAnalyzer extends BLangNodeVisitor {
private static final CompilerContext.Key<CodeAnalyzer> CODE_ANALYZER_KEY =
new CompilerContext.Key<>();
private int loopCount;
private int transactionCount;
private boolean statementReturns;
private boolean lastStatement;
private boolean withinRetryBlock;
private int forkJoinCount;
private int workerCount;
private SymbolTable symTable;
private Types types;
private BLangDiagnosticLog dlog;
private TypeChecker typeChecker;
private Stack<WorkerActionSystem> workerActionSystemStack = new Stack<>();
private Stack<Boolean> loopWithintransactionCheckStack = new Stack<>();
private Stack<Boolean> returnWithintransactionCheckStack = new Stack<>();
private Stack<Boolean> doneWithintransactionCheckStack = new Stack<>();
private Stack<Boolean> transactionWithinHandlerCheckStack = new Stack<>();
private BLangNode parent;
private Names names;
private SymbolEnv env;
public static CodeAnalyzer getInstance(CompilerContext context) {
CodeAnalyzer codeGenerator = context.get(CODE_ANALYZER_KEY);
if (codeGenerator == null) {
codeGenerator = new CodeAnalyzer(context);
}
return codeGenerator;
}
public CodeAnalyzer(CompilerContext context) {
context.put(CODE_ANALYZER_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeChecker = TypeChecker.getInstance(context);
this.names = Names.getInstance(context);
}
private void resetFunction() {
this.resetStatementReturns();
}
private void resetStatementReturns() {
this.statementReturns = false;
}
private void resetLastStatement() {
this.lastStatement = false;
}
public BLangPackage analyze(BLangPackage pkgNode) {
pkgNode.accept(this);
return pkgNode;
}
@Override
public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.CODE_ANALYZE)) {
return;
}
parent = pkgNode;
SymbolEnv pkgEnv = symTable.pkgEnvMap.get(pkgNode.symbol);
pkgNode.topLevelNodes.forEach(topLevelNode -> analyzeNode((BLangNode) topLevelNode, pkgEnv));
pkgNode.completedPhases.add(CompilerPhase.CODE_ANALYZE);
parent = null;
}
private void analyzeNode(BLangNode node, SymbolEnv env) {
SymbolEnv prevEnv = this.env;
this.env = env;
BLangNode myParent = parent;
node.parent = parent;
parent = node;
node.accept(this);
parent = myParent;
this.env = prevEnv;
}
@Override
public void visit(BLangCompilationUnit compUnitNode) {
compUnitNode.topLevelNodes.forEach(e -> analyzeNode((BLangNode) e, env));
}
public void visit(BLangTypeDefinition typeDefinition) {
if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE
|| typeDefinition.typeNode.getKind() == NodeKind.RECORD_TYPE) {
analyzeNode(typeDefinition.typeNode, env);
}
if (!Symbols.isPublic(typeDefinition.symbol) ||
typeDefinition.symbol.type != null && TypeKind.FINITE.equals(typeDefinition.symbol.type.getKind())) {
return;
}
analyseType(typeDefinition.symbol.type, typeDefinition.pos);
}
@Override
public void visit(BLangFunction funcNode) {
if (funcNode.symbol.isTransactionHandler) {
transactionWithinHandlerCheckStack.push(true);
}
this.returnWithintransactionCheckStack.push(true);
this.doneWithintransactionCheckStack.push(true);
this.validateMainFunction(funcNode);
SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
this.visitInvocable(funcNode, funcEnv);
this.returnWithintransactionCheckStack.pop();
this.doneWithintransactionCheckStack.pop();
if (funcNode.symbol.isTransactionHandler) {
transactionWithinHandlerCheckStack.pop();
}
}
private void visitInvocable(BLangInvokableNode invNode, SymbolEnv invokableEnv) {
this.resetFunction();
try {
this.initNewWorkerActionSystem();
if (Symbols.isNative(invNode.symbol)) {
return;
}
boolean invokableReturns = invNode.returnTypeNode.type != symTable.nilType;
if (invNode.workers.isEmpty()) {
if (isPublicInvokableNode(invNode)) {
analyzeNode(invNode.returnTypeNode, invokableEnv);
}
/* the body can be null in the case of Object type function declarations */
if (invNode.body != null) {
analyzeNode(invNode.body, invokableEnv);
/* the function returns, but none of the statements surely returns */
if (invokableReturns && !this.statementReturns) {
this.dlog.error(invNode.pos, DiagnosticCode.INVOKABLE_MUST_RETURN,
invNode.getKind().toString().toLowerCase());
}
}
} else {
boolean workerReturns = false;
for (BLangWorker worker : invNode.workers) {
analyzeNode(worker, invokableEnv);
workerReturns = workerReturns || this.statementReturns;
this.resetStatementReturns();
}
if (invokableReturns && !workerReturns) {
this.dlog.error(invNode.pos, DiagnosticCode.ATLEAST_ONE_WORKER_MUST_RETURN,
invNode.getKind().toString().toLowerCase());
}
}
} finally {
this.finalizeCurrentWorkerActionSystem();
}
}
private boolean isPublicInvokableNode(BLangInvokableNode invNode) {
return Symbols.isPublic(invNode.symbol) && (SymbolKind.PACKAGE.equals(invNode.symbol.owner.getKind()) ||
Symbols.isPublic(invNode.symbol.owner));
}
@Override
public void visit(BLangForkJoin forkJoin) {
this.forkJoinCount++;
this.initNewWorkerActionSystem();
this.checkStatementExecutionValidity(forkJoin);
forkJoin.workers.forEach(e -> analyzeNode(e, env));
analyzeNode(forkJoin.joinedBody, env);
if (forkJoin.timeoutBody != null) {
boolean joinReturns = this.statementReturns;
this.resetStatementReturns();
analyzeNode(forkJoin.timeoutBody, env);
this.statementReturns = joinReturns && this.statementReturns;
}
this.checkForkJoinWorkerCount(forkJoin);
this.finalizeCurrentWorkerActionSystem();
this.forkJoinCount--;
analyzeExpr(forkJoin.timeoutExpression);
}
private boolean inForkJoin() {
return this.forkJoinCount > 0;
}
private void checkForkJoinWorkerCount(BLangForkJoin forkJoin) {
if (forkJoin.joinType == ForkJoinNode.JoinType.SOME) {
int wc = forkJoin.joinedWorkers.size();
if (wc == 0) {
wc = forkJoin.workers.size();
}
if (forkJoin.joinedWorkerCount > wc) {
this.dlog.error(forkJoin.pos, DiagnosticCode.FORK_JOIN_INVALID_WORKER_COUNT);
}
}
}
private boolean inWorker() {
return this.workerCount > 0;
}
@Override
public void visit(BLangWorker worker) {
this.workerCount++;
this.workerActionSystemStack.peek().startWorkerActionStateMachine(worker.name.value, worker.pos);
analyzeNode(worker.body, env);
this.workerActionSystemStack.peek().endWorkerActionStateMachine();
this.workerCount--;
}
@Override
public void visit(BLangEndpoint endpointNode) {
}
@Override
public void visit(BLangTransaction transactionNode) {
this.checkStatementExecutionValidity(transactionNode);
if (!isValidTransactionBlock()) {
this.dlog.error(transactionNode.pos, DiagnosticCode.TRANSACTION_CANNOT_BE_USED_WITHIN_HANDLER);
return;
}
this.loopWithintransactionCheckStack.push(false);
this.returnWithintransactionCheckStack.push(false);
this.doneWithintransactionCheckStack.push(false);
this.transactionCount++;
analyzeNode(transactionNode.transactionBody, env);
this.transactionCount--;
this.resetLastStatement();
if (transactionNode.onRetryBody != null) {
this.withinRetryBlock = true;
analyzeNode(transactionNode.onRetryBody, env);
this.resetStatementReturns();
this.resetLastStatement();
this.withinRetryBlock = false;
}
this.returnWithintransactionCheckStack.pop();
this.loopWithintransactionCheckStack.pop();
this.doneWithintransactionCheckStack.pop();
analyzeExpr(transactionNode.retryCount);
analyzeExpr(transactionNode.onCommitFunction);
analyzeExpr(transactionNode.onAbortFunction);
}
@Override
public void visit(BLangAbort abortNode) {
if (this.transactionCount == 0) {
this.dlog.error(abortNode.pos, DiagnosticCode.ABORT_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK);
return;
}
this.lastStatement = true;
}
@Override
public void visit(BLangDone doneNode) {
if (checkReturnValidityInTransaction()) {
this.dlog.error(doneNode.pos, DiagnosticCode.DONE_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
this.lastStatement = true;
}
@Override
public void visit(BLangRetry retryNode) {
if (this.transactionCount == 0) {
this.dlog.error(retryNode.pos, DiagnosticCode.FAIL_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK);
return;
}
this.lastStatement = true;
}
private void checkUnreachableCode(BLangStatement stmt) {
if (this.statementReturns) {
this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE);
this.resetStatementReturns();
}
if (lastStatement) {
this.dlog.error(stmt.pos, DiagnosticCode.UNREACHABLE_CODE);
this.resetLastStatement();
}
}
private void checkStatementExecutionValidity(BLangStatement stmt) {
this.checkUnreachableCode(stmt);
}
@Override
public void visit(BLangBlockStmt blockNode) {
final SymbolEnv blockEnv = SymbolEnv.createBlockEnv(blockNode, env);
blockNode.stmts.forEach(e -> analyzeNode(e, blockEnv));
this.resetLastStatement();
}
@Override
public void visit(BLangReturn returnStmt) {
this.checkStatementExecutionValidity(returnStmt);
if (this.env.enclInvokable.getKind() == NodeKind.RESOURCE) {
this.dlog.error(returnStmt.pos, DiagnosticCode.RETURN_STMT_NOT_VALID_IN_RESOURCE);
return;
}
if (this.inForkJoin() && this.inWorker()) {
this.dlog.error(returnStmt.pos, DiagnosticCode.FORK_JOIN_WORKER_CANNOT_RETURN);
return;
}
if (checkReturnValidityInTransaction()) {
this.dlog.error(returnStmt.pos, DiagnosticCode.RETURN_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
this.statementReturns = true;
analyzeExpr(returnStmt.expr);
}
@Override
public void visit(BLangIf ifStmt) {
this.checkStatementExecutionValidity(ifStmt);
analyzeNode(ifStmt.body, env);
boolean ifStmtReturns = this.statementReturns;
this.resetStatementReturns();
if (ifStmt.elseStmt != null) {
analyzeNode(ifStmt.elseStmt, env);
this.statementReturns = ifStmtReturns && this.statementReturns;
}
analyzeExpr(ifStmt.expr);
}
@Override
public void visit(BLangMatch matchStmt) {
this.returnWithintransactionCheckStack.push(true);
boolean unmatchedExprTypesAvailable = false;
analyzeExpr(matchStmt.expr);
List<BType> unmatchedExprTypes = new ArrayList<>();
for (BType exprType : matchStmt.exprTypes) {
boolean assignable = false;
for (BLangMatchStmtPatternClause pattern : matchStmt.patternClauses) {
BType patternType = pattern.variable.type;
if (exprType.tag == TypeTags.ERROR || patternType.tag == TypeTags.ERROR) {
return;
}
assignable = this.types.isAssignable(exprType, patternType);
if (assignable) {
pattern.matchedTypesDirect.add(exprType);
break;
} else if (exprType.tag == TypeTags.ANY) {
pattern.matchedTypesIndirect.add(exprType);
} else if (exprType.tag == TypeTags.JSON &&
this.types.isAssignable(patternType, exprType)) {
pattern.matchedTypesIndirect.add(exprType);
} else if ((exprType.tag == TypeTags.OBJECT || exprType.tag == TypeTags.RECORD)
&& this.types.isAssignable(patternType, exprType)) {
pattern.matchedTypesIndirect.add(exprType);
} else if (exprType.tag == TypeTags.BYTE && patternType.tag == TypeTags.INT) {
pattern.matchedTypesDirect.add(exprType);
break;
} else {
}
}
if (!assignable) {
unmatchedExprTypes.add(exprType);
}
}
if (!unmatchedExprTypes.isEmpty()) {
unmatchedExprTypesAvailable = true;
dlog.error(matchStmt.pos, DiagnosticCode.MATCH_STMT_CANNOT_GUARANTEE_A_MATCHING_PATTERN,
unmatchedExprTypes);
}
boolean matchedPatternsAvailable = false;
for (int i = matchStmt.patternClauses.size() - 1; i >= 0; i--) {
BLangMatchStmtPatternClause pattern = matchStmt.patternClauses.get(i);
if (pattern.matchedTypesDirect.isEmpty() && pattern.matchedTypesIndirect.isEmpty()) {
if (matchedPatternsAvailable) {
dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNMATCHED_PATTERN);
} else {
dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN);
}
} else {
matchedPatternsAvailable = true;
}
}
if (!unmatchedExprTypesAvailable) {
this.checkStatementExecutionValidity(matchStmt);
boolean matchStmtReturns = true;
for (BLangMatchStmtPatternClause patternClause : matchStmt.patternClauses) {
analyzeNode(patternClause.body, env);
matchStmtReturns = matchStmtReturns && this.statementReturns;
this.resetStatementReturns();
}
this.statementReturns = matchStmtReturns;
}
this.returnWithintransactionCheckStack.pop();
}
@Override
public void visit(BLangForeach foreach) {
this.loopWithintransactionCheckStack.push(true);
this.checkStatementExecutionValidity(foreach);
this.loopCount++;
foreach.body.stmts.forEach(e -> analyzeNode(e, env));
this.loopCount--;
this.resetLastStatement();
this.loopWithintransactionCheckStack.pop();
analyzeExpr(foreach.collection);
analyzeExprs(foreach.varRefs);
}
@Override
public void visit(BLangWhile whileNode) {
this.loopWithintransactionCheckStack.push(true);
this.checkStatementExecutionValidity(whileNode);
this.loopCount++;
whileNode.body.stmts.forEach(e -> analyzeNode(e, env));
this.loopCount--;
this.resetLastStatement();
this.loopWithintransactionCheckStack.pop();
analyzeExpr(whileNode.expr);
}
@Override
public void visit(BLangLock lockNode) {
this.checkStatementExecutionValidity(lockNode);
lockNode.body.stmts.forEach(e -> analyzeNode(e, env));
}
@Override
public void visit(BLangContinue continueNode) {
this.checkStatementExecutionValidity(continueNode);
if (this.loopCount == 0) {
this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_OUTSIDE_LOOP);
return;
}
if (checkNextBreakValidityInTransaction()) {
this.dlog.error(continueNode.pos, DiagnosticCode.CONTINUE_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
this.lastStatement = true;
}
public void visit(BLangImportPackage importPkgNode) {
BPackageSymbol pkgSymbol = importPkgNode.symbol;
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol);
if (pkgEnv == null) {
return;
}
analyzeNode(pkgEnv.node, env);
}
public void visit(BLangXMLNS xmlnsNode) {
/* ignore */
}
public void visit(BLangService serviceNode) {
SymbolEnv serviceEnv = SymbolEnv.createServiceEnv(serviceNode, serviceNode.symbol.scope, env);
serviceNode.resources.forEach(res -> analyzeNode(res, serviceEnv));
}
public void visit(BLangResource resourceNode) {
SymbolEnv resourceEnv = SymbolEnv.createResourceActionSymbolEnv(resourceNode,
resourceNode.symbol.scope, env);
this.visitInvocable(resourceNode, resourceEnv);
}
public void visit(BLangForever foreverStatement) {
this.lastStatement = true;
}
public void visit(BLangAction actionNode) {
/* not used, covered with functions */
}
public void visit(BLangObjectTypeNode objectTypeNode) {
if (objectTypeNode.isFieldAnalyseRequired && Symbols.isPublic(objectTypeNode.symbol)) {
objectTypeNode.fields.stream()
.filter(field -> (Symbols.isPublic(field.symbol)))
.forEach(field -> analyzeNode(field, this.env));
}
objectTypeNode.functions.forEach(e -> this.analyzeNode(e, this.env));
}
private void analyseType(BType type, DiagnosticPos pos) {
if (type == null || type.tsymbol == null) {
return;
}
BSymbol symbol = type.tsymbol;
if (Symbols.isPrivate(symbol)) {
dlog.error(pos, DiagnosticCode.ATTEMPT_EXPOSE_NON_PUBLIC_SYMBOL, symbol.name);
}
}
public void visit(BLangRecordTypeNode recordTypeNode) {
if (recordTypeNode.isFieldAnalyseRequired && Symbols.isPublic(recordTypeNode.symbol)) {
recordTypeNode.fields.stream()
.filter(field -> (Symbols.isPublic(field.symbol)))
.forEach(field -> analyzeNode(field, this.env));
}
}
public void visit(BLangVariable varNode) {
analyzeExpr(varNode.expr);
if (Objects.isNull(varNode.symbol) || !Symbols.isPublic(varNode.symbol)) {
return;
}
int ownerSymTag = this.env.scope.owner.tag;
if (((ownerSymTag & SymTag.INVOKABLE) != SymTag.INVOKABLE) || (varNode.type != null &&
varNode.parent != null && NodeKind.FUNCTION.equals(varNode.parent.getKind()))) {
analyseType(varNode.type, varNode.pos);
}
}
public void visit(BLangIdentifier identifierNode) {
/* ignore */
}
public void visit(BLangAnnotation annotationNode) {
/* ignore */
}
public void visit(BLangAnnotationAttachment annAttachmentNode) {
/* ignore */
}
public void visit(BLangVariableDef varDefNode) {
this.checkStatementExecutionValidity(varDefNode);
analyzeNode(varDefNode.var, env);
}
public void visit(BLangCompoundAssignment compoundAssignment) {
this.checkStatementExecutionValidity(compoundAssignment);
analyzeExpr(compoundAssignment.varRef);
analyzeExpr(compoundAssignment.expr);
}
public void visit(BLangPostIncrement postIncrement) {
this.checkStatementExecutionValidity(postIncrement);
analyzeExpr(postIncrement.varRef);
analyzeExpr(postIncrement.increment);
}
public void visit(BLangAssignment assignNode) {
this.checkStatementExecutionValidity(assignNode);
analyzeExpr(assignNode.varRef);
analyzeExpr(assignNode.expr);
}
@Override
public void visit(BLangTupleDestructure stmt) {
this.checkStatementExecutionValidity(stmt);
analyzeExprs(stmt.varRefs);
analyzeExpr(stmt.expr);
}
public void visit(BLangBreak breakNode) {
this.checkStatementExecutionValidity(breakNode);
if (this.loopCount == 0) {
this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_OUTSIDE_LOOP);
return;
}
if (checkNextBreakValidityInTransaction()) {
this.dlog.error(breakNode.pos, DiagnosticCode.BREAK_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
this.lastStatement = true;
}
public void visit(BLangThrow throwNode) {
this.checkStatementExecutionValidity(throwNode);
this.statementReturns = true;
analyzeExpr(throwNode.expr);
}
public void visit(BLangXMLNSStatement xmlnsStmtNode) {
this.checkStatementExecutionValidity(xmlnsStmtNode);
}
public void visit(BLangExpressionStmt exprStmtNode) {
this.checkStatementExecutionValidity(exprStmtNode);
analyzeExpr(exprStmtNode.expr);
validateExprStatementExpression(exprStmtNode);
}
private void validateExprStatementExpression(BLangExpressionStmt exprStmtNode) {
BLangExpression expr = exprStmtNode.expr;
while (expr.getKind() == NodeKind.MATCH_EXPRESSION || expr.getKind() == NodeKind.CHECK_EXPR) {
if (expr.getKind() == NodeKind.MATCH_EXPRESSION) {
expr = ((BLangMatchExpression) expr).expr;
} else if (expr.getKind() == NodeKind.CHECK_EXPR) {
expr = ((BLangCheckedExpr) expr).expr;
}
}
if (expr.getKind() == NodeKind.INVOCATION || expr.getKind() == NodeKind.AWAIT_EXPR) {
return;
}
if (expr.type == symTable.nilType) {
dlog.error(exprStmtNode.pos, DiagnosticCode.INVALID_EXPR_STATEMENT);
}
}
public void visit(BLangTryCatchFinally tryNode) {
this.checkStatementExecutionValidity(tryNode);
analyzeNode(tryNode.tryBody, env);
boolean tryCatchReturns = this.statementReturns;
this.resetStatementReturns();
List<BType> caughtTypes = new ArrayList<>();
for (BLangCatch bLangCatch : tryNode.getCatchBlocks()) {
if (caughtTypes.contains(bLangCatch.getParameter().type)) {
dlog.error(bLangCatch.getParameter().pos, DiagnosticCode.DUPLICATED_ERROR_CATCH,
bLangCatch.getParameter().type);
}
caughtTypes.add(bLangCatch.getParameter().type);
analyzeNode(bLangCatch.body, env);
tryCatchReturns = tryCatchReturns && this.statementReturns;
this.resetStatementReturns();
}
if (tryNode.finallyBody != null) {
analyzeNode(tryNode.finallyBody, env);
this.statementReturns = tryCatchReturns || this.statementReturns;
} else {
this.statementReturns = tryCatchReturns;
}
}
public void visit(BLangCatch catchNode) {
/* ignore */
}
public void visit(BLangWorkerSend workerSendNode) {
this.checkStatementExecutionValidity(workerSendNode);
if (workerSendNode.isChannel) {
analyzeExpr(workerSendNode.expr);
if (workerSendNode.keyExpr != null) {
analyzeExpr(workerSendNode.keyExpr);
}
return;
}
if (!this.inWorker()) {
return;
}
this.workerActionSystemStack.peek().addWorkerAction(workerSendNode);
analyzeExpr(workerSendNode.expr);
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
this.checkStatementExecutionValidity(workerReceiveNode);
if (workerReceiveNode.isChannel) {
analyzeExpr(workerReceiveNode.expr);
if (workerReceiveNode.keyExpr != null) {
analyzeExpr(workerReceiveNode.keyExpr);
}
return;
}
if (!this.inWorker()) {
return;
}
this.workerActionSystemStack.peek().addWorkerAction(workerReceiveNode);
analyzeExpr(workerReceiveNode.expr);
}
public void visit(BLangLiteral literalExpr) {
/* ignore */
}
public void visit(BLangArrayLiteral arrayLiteral) {
analyzeExprs(arrayLiteral.exprs);
}
public void visit(BLangTableLiteral tableLiteral) {
/* ignore */
}
public void visit(BLangSimpleVarRef varRefExpr) {
/* ignore */
}
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
/* ignore */
}
public void visit(BLangIndexBasedAccess indexAccessExpr) {
analyzeExpr(indexAccessExpr.indexExpr);
analyzeExpr(indexAccessExpr.expr);
if (indexAccessExpr.indexExpr.type == null || indexAccessExpr.indexExpr.type.tag == TypeTags.ERROR) {
return;
}
if (indexAccessExpr.expr.type.tag == TypeTags.ARRAY
&& indexAccessExpr.indexExpr.getKind() == NodeKind.LITERAL) {
BArrayType bArrayType = (BArrayType) indexAccessExpr.expr.type;
BLangLiteral indexExpr = (BLangLiteral) indexAccessExpr.indexExpr;
Long indexVal = (Long) indexExpr.getValue();
if (bArrayType.state == BArrayState.CLOSED_SEALED && (bArrayType.size <= indexVal)) {
dlog.error(indexExpr.pos, DiagnosticCode.ARRAY_INDEX_OUT_OF_RANGE, indexVal, bArrayType.size);
}
}
}
public void visit(BLangInvocation invocationExpr) {
analyzeExpr(invocationExpr.expr);
analyzeExprs(invocationExpr.requiredArgs);
analyzeExprs(invocationExpr.namedArgs);
analyzeExprs(invocationExpr.restArgs);
checkDuplicateNamedArgs(invocationExpr.namedArgs);
if ((invocationExpr.symbol != null) && invocationExpr.symbol.kind == SymbolKind.FUNCTION) {
BSymbol funcSymbol = invocationExpr.symbol;
if (Symbols.isFlagOn(funcSymbol.flags, Flags.DEPRECATED)) {
dlog.warning(invocationExpr.pos, DiagnosticCode.USAGE_OF_DEPRECATED_FUNCTION,
names.fromIdNode(invocationExpr.name));
}
}
if (invocationExpr.actionInvocation) {
validateActionInvocation(invocationExpr.pos, invocationExpr);
}
}
private void validateActionInvocation(DiagnosticPos pos, BLangNode bLangNode) {
BLangNode parent = bLangNode.parent;
while (parent != null) {
final NodeKind kind = parent.getKind();
if (kind == NodeKind.ASSIGNMENT || kind == NodeKind.EXPRESSION_STATEMENT
|| kind == NodeKind.TUPLE_DESTRUCTURE || kind == NodeKind.VARIABLE) {
return;
} else if (kind == NodeKind.CHECK_EXPR || kind == NodeKind.MATCH_EXPRESSION) {
parent = parent.parent;
continue;
} else if (kind == NodeKind.ELVIS_EXPR
&& ((BLangElvisExpr) parent).lhsExpr.getKind() == NodeKind.INVOCATION
&& ((BLangInvocation) ((BLangElvisExpr) parent).lhsExpr).actionInvocation) {
parent = parent.parent;
continue;
}
break;
}
dlog.error(pos, DiagnosticCode.INVALID_ACTION_INVOCATION_AS_EXPR);
}
public void visit(BLangTypeInit cIExpr) {
analyzeExprs(cIExpr.argsExpr);
analyzeExpr(cIExpr.objectInitInvocation);
}
public void visit(BLangTernaryExpr ternaryExpr) {
analyzeExpr(ternaryExpr.expr);
analyzeExpr(ternaryExpr.thenExpr);
analyzeExpr(ternaryExpr.elseExpr);
}
public void visit(BLangAwaitExpr awaitExpr) {
analyzeExpr(awaitExpr.expr);
}
public void visit(BLangBinaryExpr binaryExpr) {
analyzeExpr(binaryExpr.lhsExpr);
analyzeExpr(binaryExpr.rhsExpr);
}
public void visit(BLangElvisExpr elvisExpr) {
analyzeExpr(elvisExpr.lhsExpr);
analyzeExpr(elvisExpr.rhsExpr);
}
@Override
public void visit(BLangBracedOrTupleExpr bracedOrTupleExpr) {
analyzeExprs(bracedOrTupleExpr.expressions);
}
public void visit(BLangUnaryExpr unaryExpr) {
analyzeExpr(unaryExpr.expr);
}
public void visit(BLangTypedescExpr accessExpr) {
/* ignore */
}
public void visit(BLangTypeConversionExpr conversionExpr) {
analyzeExpr(conversionExpr.expr);
}
public void visit(BLangXMLQName xmlQName) {
/* ignore */
}
public void visit(BLangXMLAttribute xmlAttribute) {
analyzeExpr(xmlAttribute.name);
analyzeExpr(xmlAttribute.value);
}
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
analyzeExpr(xmlElementLiteral.startTagName);
analyzeExpr(xmlElementLiteral.endTagName);
analyzeExprs(xmlElementLiteral.attributes);
analyzeExprs(xmlElementLiteral.children);
}
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
analyzeExprs(xmlTextLiteral.textFragments);
}
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
analyzeExprs(xmlCommentLiteral.textFragments);
}
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
analyzeExprs(xmlProcInsLiteral.dataFragments);
analyzeExpr(xmlProcInsLiteral.target);
}
public void visit(BLangXMLQuotedString xmlQuotedString) {
analyzeExprs(xmlQuotedString.textFragments);
}
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
analyzeExprs(stringTemplateLiteral.exprs);
}
public void visit(BLangLambdaFunction bLangLambdaFunction) {
/* ignore */
}
public void visit(BLangArrowFunction bLangArrowFunction) {
/* ignore */
}
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
analyzeExpr(xmlAttributeAccessExpr.expr);
analyzeExpr(xmlAttributeAccessExpr.indexExpr);
}
public void visit(BLangIntRangeExpression intRangeExpression) {
analyzeExpr(intRangeExpression.startExpr);
analyzeExpr(intRangeExpression.endExpr);
}
public void visit(BLangValueType valueType) {
/* ignore */
}
public void visit(BLangArrayType arrayType) {
/* ignore */
}
public void visit(BLangBuiltInRefTypeNode builtInRefType) {
/* ignore */
}
public void visit(BLangConstrainedType constrainedType) {
/* ignore */
}
public void visit(BLangUserDefinedType userDefinedType) {
analyseType(userDefinedType.type, userDefinedType.pos);
}
public void visit(BLangTupleTypeNode tupleTypeNode) {
tupleTypeNode.memberTypeNodes.forEach(memberType -> analyzeNode(memberType, env));
}
public void visit(BLangUnionTypeNode unionTypeNode) {
unionTypeNode.memberTypeNodes.forEach(memberType -> analyzeNode(memberType, env));
}
public void visit(BLangFunctionTypeNode functionTypeNode) {
analyseType(functionTypeNode.type, functionTypeNode.pos);
}
@Override
public void visit(BLangTableQueryExpression tableQueryExpression) {
/* ignore */
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
/* ignore */
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
/* ignore */
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
analyzeExpr(bLangMatchExpression.expr);
List<BType> exprTypes;
if (bLangMatchExpression.expr.type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) bLangMatchExpression.expr.type;
exprTypes = new ArrayList<>(unionType.memberTypes);
} else {
exprTypes = Lists.of(bLangMatchExpression.expr.type);
}
List<BType> unmatchedExprTypes = new ArrayList<>();
for (BType exprType : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
BType patternType = pattern.variable.type;
if (exprType.tag == TypeTags.ERROR || patternType.tag == TypeTags.ERROR) {
return;
}
assignable = this.types.isAssignable(exprType, patternType);
if (assignable) {
pattern.matchedTypesDirect.add(exprType);
break;
} else if (exprType.tag == TypeTags.ANY) {
pattern.matchedTypesIndirect.add(exprType);
} else if (exprType.tag == TypeTags.JSON && this.types.isAssignable(patternType, exprType)) {
pattern.matchedTypesIndirect.add(exprType);
} else if ((exprType.tag == TypeTags.OBJECT || exprType.tag == TypeTags.RECORD)
&& this.types.isAssignable(patternType, exprType)) {
pattern.matchedTypesIndirect.add(exprType);
} else if (exprType.tag == TypeTags.BYTE && patternType.tag == TypeTags.INT) {
pattern.matchedTypesDirect.add(exprType);
break;
} else {
}
}
if (!assignable && !this.types.isAssignable(exprType, bLangMatchExpression.type)) {
unmatchedExprTypes.add(exprType);
}
}
if (!unmatchedExprTypes.isEmpty()) {
dlog.error(bLangMatchExpression.pos, DiagnosticCode.MATCH_STMT_CANNOT_GUARANTEE_A_MATCHING_PATTERN,
unmatchedExprTypes);
}
boolean matchedPatternsAvailable = false;
for (int i = bLangMatchExpression.patternClauses.size() - 1; i >= 0; i--) {
BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i);
if (pattern.matchedTypesDirect.isEmpty() && pattern.matchedTypesIndirect.isEmpty()) {
if (matchedPatternsAvailable) {
dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNMATCHED_PATTERN);
} else {
dlog.error(pattern.pos, DiagnosticCode.MATCH_STMT_UNREACHABLE_PATTERN);
}
} else {
matchedPatternsAvailable = true;
}
}
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
}
private <E extends BLangExpression> void analyzeExpr(E node) {
if (node == null) {
return;
}
BLangNode myParent = parent;
node.parent = parent;
parent = node;
node.accept(this);
parent = myParent;
checkAccess(node);
}
@Override
public void visit(BLangScope scopeNode) {
this.checkStatementExecutionValidity(scopeNode);
scopeNode.getScopeBody().accept(this);
this.resetLastStatement();
visit(scopeNode.compensationFunction);
}
@Override
public void visit(BLangCompensate compensateNode) {
this.checkStatementExecutionValidity(compensateNode);
}
/**
* This method checks for private symbols being accessed or used outside of package and|or private symbols being
* used in public fields of objects/records and will fail those occurrences.
*
* @param node expression node to analyse
*/
private <E extends BLangExpression> void checkAccess(E node) {
if (node.type != null) {
checkAccessSymbol(node.type.tsymbol, node.pos);
}
if (node instanceof BLangInvocation) {
BLangInvocation bLangInvocation = (BLangInvocation) node;
checkAccessSymbol(bLangInvocation.symbol, bLangInvocation.pos);
}
}
private void checkAccessSymbol(BSymbol symbol, DiagnosticPos position) {
if (symbol == null) {
return;
}
if (env.enclPkg.symbol.pkgID != symbol.pkgID && Symbols.isPrivate(symbol)) {
dlog.error(position, DiagnosticCode.ATTEMPT_REFER_NON_ACCESSIBLE_SYMBOL, symbol.name);
}
}
private <E extends BLangExpression> void analyzeExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.get(i).accept(this);
}
}
private void initNewWorkerActionSystem() {
this.workerActionSystemStack.push(new WorkerActionSystem());
}
private void finalizeCurrentWorkerActionSystem() {
WorkerActionSystem was = this.workerActionSystemStack.pop();
this.validateWorkerInteractions(was);
}
private static boolean isWorkerSend(BLangStatement action) {
return action.getKind() == NodeKind.WORKER_SEND;
}
private static boolean isWorkerForkSend(BLangStatement action) {
return ((BLangWorkerSend) action).isForkJoinSend;
}
private String extractWorkerId(BLangStatement action) {
if (isWorkerSend(action)) {
return ((BLangWorkerSend) action).workerIdentifier.value;
} else {
return ((BLangWorkerReceive) action).workerIdentifier.value;
}
}
private void validateWorkerInteractions(WorkerActionSystem workerActionSystem) {
this.validateForkJoinSendsToFork(workerActionSystem);
BLangStatement currentAction;
WorkerActionStateMachine currentSM;
String currentWorkerId;
boolean systemRunning;
do {
systemRunning = false;
for (Map.Entry<String, WorkerActionStateMachine> entry : workerActionSystem.entrySet()) {
currentWorkerId = entry.getKey();
currentSM = entry.getValue();
if (currentSM.done()) {
continue;
}
currentAction = currentSM.currentAction();
if (isWorkerSend(currentAction)) {
if (isWorkerForkSend(currentAction)) {
currentSM.next();
systemRunning = true;
} else {
WorkerActionStateMachine otherSM = workerActionSystem.get(this.extractWorkerId(currentAction));
if (otherSM.currentIsReceive(currentWorkerId)) {
this.validateWorkerActionParameters((BLangWorkerSend) currentAction,
(BLangWorkerReceive) otherSM.currentAction());
otherSM.next();
currentSM.next();
systemRunning = true;
}
}
}
}
} while (systemRunning);
if (!workerActionSystem.everyoneDone()) {
this.reportInvalidWorkerInteractionDiagnostics(workerActionSystem);
}
}
private void validateForkJoinSendsToFork(WorkerActionSystem workerActionSystem) {
for (Map.Entry<String, WorkerActionStateMachine> entry : workerActionSystem.entrySet()) {
this.validateForkJoinSendsToFork(entry.getValue());
}
}
private void validateForkJoinSendsToFork(WorkerActionStateMachine sm) {
boolean sentToFork = false;
for (BLangStatement action : sm.actions) {
if (isWorkerSend(action) && isWorkerForkSend(action)) {
if (sentToFork) {
this.dlog.error(action.pos, DiagnosticCode.INVALID_MULTIPLE_FORK_JOIN_SEND);
} else {
sentToFork = true;
}
}
}
}
private void reportInvalidWorkerInteractionDiagnostics(WorkerActionSystem workerActionSystem) {
this.dlog.error(workerActionSystem.getRootPosition(), DiagnosticCode.INVALID_WORKER_INTERACTION,
workerActionSystem.toString());
}
private void validateWorkerActionParameters(BLangWorkerSend send, BLangWorkerReceive receive) {
this.typeChecker.checkExpr(send.expr, send.env, receive.expr.type);
}
private boolean checkNextBreakValidityInTransaction() {
return !this.loopWithintransactionCheckStack.peek() && transactionCount > 0;
}
private boolean checkReturnValidityInTransaction() {
return (this.returnWithintransactionCheckStack.empty() || !this.returnWithintransactionCheckStack.peek())
&& transactionCount > 0;
}
private boolean isValidTransactionBlock() {
return (this.transactionWithinHandlerCheckStack.empty() || !this.transactionWithinHandlerCheckStack.peek()) &&
!this.withinRetryBlock;
}
private void validateMainFunction(BLangFunction funcNode) {
if (!MAIN_FUNCTION_NAME.equals(funcNode.name.value)) {
return;
}
if (!Symbols.isPublic(funcNode.symbol)) {
this.dlog.error(funcNode.pos, DiagnosticCode.MAIN_SHOULD_BE_PUBLIC);
}
if (!(funcNode.symbol.retType.tag == TypeTags.NIL || funcNode.symbol.retType.tag == TypeTags.INT)) {
this.dlog.error(funcNode.returnTypeNode.pos, DiagnosticCode.INVALID_RETURN_WITH_MAIN,
funcNode.symbol.retType);
}
}
private void checkDuplicateNamedArgs(List<BLangExpression> args) {
List<BLangIdentifier> existingArgs = new ArrayList<>();
args.forEach(arg -> {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg;
if (existingArgs.contains(namedArg.name)) {
dlog.error(namedArg.pos, DiagnosticCode.DUPLICATE_NAMED_ARGS, namedArg.name);
}
existingArgs.add(namedArg.name);
});
}
/**
* This class contains the state machines for a set of workers.
*/
private static class WorkerActionSystem {
public Map<String, WorkerActionStateMachine> workerActionStateMachines = new LinkedHashMap<>();
private WorkerActionStateMachine currentSM;
private String currentWorkerId;
public void startWorkerActionStateMachine(String workerId, DiagnosticPos pos) {
this.currentWorkerId = workerId;
this.currentSM = new WorkerActionStateMachine(pos);
}
public void endWorkerActionStateMachine() {
this.workerActionStateMachines.put(this.currentWorkerId, this.currentSM);
}
public void addWorkerAction(BLangStatement action) {
this.currentSM.actions.add(action);
}
public WorkerActionStateMachine get(String workerId) {
return this.workerActionStateMachines.get(workerId);
}
public Set<Map.Entry<String, WorkerActionStateMachine>> entrySet() {
return this.workerActionStateMachines.entrySet();
}
public boolean everyoneDone() {
return this.workerActionStateMachines.values().stream().allMatch(WorkerActionStateMachine::done);
}
public DiagnosticPos getRootPosition() {
return this.workerActionStateMachines.values().iterator().next().pos;
}
@Override
public String toString() {
return this.workerActionStateMachines.toString();
}
}
/**
* This class represents a state machine to maintain the state of the send/receive
* actions of a worker.
*/
private static class WorkerActionStateMachine {
private static final String WORKER_SM_FINISHED = "FINISHED";
public int currentState;
public List<BLangStatement> actions = new ArrayList<>();
public DiagnosticPos pos;
public WorkerActionStateMachine(DiagnosticPos pos) {
this.pos = pos;
}
public boolean done() {
return this.actions.size() == this.currentState;
}
public BLangStatement currentAction() {
return this.actions.get(this.currentState);
}
public boolean currentIsReceive(String sourceWorkerId) {
if (this.done()) {
return false;
}
BLangStatement action = this.currentAction();
return !isWorkerSend(action) && ((BLangWorkerReceive) action).
workerIdentifier.value.equals(sourceWorkerId);
}
public void next() {
this.currentState++;
}
@Override
public String toString() {
if (this.done()) {
return WORKER_SM_FINISHED;
} else {
BLangStatement action = this.currentAction();
if (isWorkerSend(action)) {
return ((BLangWorkerSend) action).toActionString();
} else {
return ((BLangWorkerReceive) action).toActionString();
}
}
}
}
} |
I thought that when the ContextManager is not empty, the stateContext will definitely not be empty. According to ProxyContext to obtain the internal logic of stateContext, judgment should be added. | public List<MetricFamilySamples> collect() {
if (!MetricsUtil.isClassExisted(PROXY_CLASS)) {
return Collections.emptyList();
}
Optional<GaugeMetricFamily> proxyInfo = FACTORY.createGaugeMetricFamily(MetricIds.PROXY_INFO);
if (null == ProxyContext.getInstance().getContextManager() || !proxyInfo.isPresent()) {
return Collections.emptyList();
}
Optional<StateContext> stateContext = ProxyContext.getInstance().getStateContext();
List<MetricFamilySamples> result = new LinkedList<>();
proxyInfo.get().addMetric(Collections.singletonList(PROXY_STATE), PROXY_STATE_MAP.get(stateContext.get().getCurrentState()));
result.add(proxyInfo.get());
return result;
} | if (null == ProxyContext.getInstance().getContextManager() || !proxyInfo.isPresent()) { | public List<MetricFamilySamples> collect() {
if (!MetricsUtil.isClassExisted(PROXY_CLASS) || null == ProxyContext.getInstance().getContextManager()) {
return Collections.emptyList();
}
Optional<GaugeMetricFamily> proxyInfo = FACTORY.createGaugeMetricFamily(MetricIds.PROXY_INFO);
Optional<StateContext> stateContext = ProxyContext.getInstance().getStateContext();
if (!proxyInfo.isPresent() || !stateContext.isPresent()) {
return Collections.emptyList();
}
List<MetricFamilySamples> result = new LinkedList<>();
proxyInfo.get().addMetric(Collections.singletonList(PROXY_STATE), PROXY_STATE_MAP.get(stateContext.get().getCurrentState()));
result.add(proxyInfo.get());
return result;
} | class ProxyInfoCollector extends Collector {
private static final String PROXY_STATE = "state";
private static final String PROXY_CLASS = "org.apache.shardingsphere.proxy.backend.context.ProxyContext";
private static final PrometheusWrapperFactory FACTORY = new PrometheusWrapperFactory();
private static final ConcurrentHashMap<StateType, Integer> PROXY_STATE_MAP = new ConcurrentHashMap<>();
static {
PROXY_STATE_MAP.put(StateType.OK, 1);
PROXY_STATE_MAP.put(StateType.CIRCUIT_BREAK, 2);
}
@Override
} | class ProxyInfoCollector extends Collector {
private static final String PROXY_STATE = "state";
private static final String PROXY_CLASS = "org.apache.shardingsphere.proxy.backend.context.ProxyContext";
private static final PrometheusWrapperFactory FACTORY = new PrometheusWrapperFactory();
private static final ConcurrentHashMap<StateType, Integer> PROXY_STATE_MAP = new ConcurrentHashMap<>();
static {
PROXY_STATE_MAP.put(StateType.OK, 1);
PROXY_STATE_MAP.put(StateType.CIRCUIT_BREAK, 2);
}
@Override
} |
Probably don't need the `isLhsAService` here also? | public boolean checkObjectEquivalency(BObjectType rhsType, BObjectType lhsType, Set<TypePair> unresolvedTypes) {
if (Symbols.isFlagOn(lhsType.flags, Flags.ISOLATED) && !Symbols.isFlagOn(rhsType.flags, Flags.ISOLATED)) {
return false;
}
BObjectTypeSymbol lhsStructSymbol = (BObjectTypeSymbol) lhsType.tsymbol;
BObjectTypeSymbol rhsStructSymbol = (BObjectTypeSymbol) rhsType.tsymbol;
List<BAttachedFunction> lhsFuncs = lhsStructSymbol.attachedFuncs;
List<BAttachedFunction> rhsFuncs = ((BObjectTypeSymbol) rhsType.tsymbol).attachedFuncs;
int lhsAttachedFuncCount = getObjectFuncCount(lhsStructSymbol);
int rhsAttachedFuncCount = getObjectFuncCount(rhsStructSymbol);
boolean isLhsAService = Symbols.isService(lhsStructSymbol);
if (isLhsAService && !Symbols.isService(rhsStructSymbol)) {
return false;
}
if (lhsType.fields.size() > rhsType.fields.size() || lhsAttachedFuncCount > rhsAttachedFuncCount) {
return false;
}
for (BField bField : lhsType.fields.values()) {
if (Symbols.isPrivate(bField.symbol)) {
return false;
}
}
for (BAttachedFunction func : lhsFuncs) {
if (Symbols.isPrivate(func.symbol)) {
return false;
}
}
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsType.fields.get(lhsField.name.value);
if (rhsField == null ||
!isInSameVisibilityRegion(lhsField.symbol, rhsField.symbol) ||
!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {
return false;
}
}
for (BAttachedFunction lhsFunc : lhsFuncs) {
if (lhsFunc == lhsStructSymbol.initializerFunc) {
continue;
}
if (isLhsAService && Symbols.isResource(lhsFunc.symbol)) {
continue;
}
BAttachedFunction rhsFunc = getMatchingInvokableType(rhsFuncs, lhsFunc, unresolvedTypes);
if (rhsFunc == null || !isInSameVisibilityRegion(lhsFunc.symbol, rhsFunc.symbol)) {
return false;
}
if (Symbols.isRemote(lhsFunc.symbol) != Symbols.isRemote(rhsFunc.symbol)) {
return false;
}
}
return lhsType.typeIdSet.isAssignableFrom(rhsType.typeIdSet);
} | if (isLhsAService && Symbols.isResource(lhsFunc.symbol)) { | public boolean checkObjectEquivalency(BObjectType rhsType, BObjectType lhsType, Set<TypePair> unresolvedTypes) {
if (Symbols.isFlagOn(lhsType.flags, Flags.ISOLATED) && !Symbols.isFlagOn(rhsType.flags, Flags.ISOLATED)) {
return false;
}
BObjectTypeSymbol lhsStructSymbol = (BObjectTypeSymbol) lhsType.tsymbol;
BObjectTypeSymbol rhsStructSymbol = (BObjectTypeSymbol) rhsType.tsymbol;
List<BAttachedFunction> lhsFuncs = lhsStructSymbol.attachedFuncs;
List<BAttachedFunction> rhsFuncs = ((BObjectTypeSymbol) rhsType.tsymbol).attachedFuncs;
int lhsAttachedFuncCount = getObjectFuncCount(lhsStructSymbol);
int rhsAttachedFuncCount = getObjectFuncCount(rhsStructSymbol);
boolean isLhsAService = Symbols.isService(lhsStructSymbol);
if (isLhsAService && !Symbols.isService(rhsStructSymbol)) {
return false;
}
if (lhsType.fields.size() > rhsType.fields.size() || lhsAttachedFuncCount > rhsAttachedFuncCount) {
return false;
}
for (BField bField : lhsType.fields.values()) {
if (Symbols.isPrivate(bField.symbol)) {
return false;
}
}
for (BAttachedFunction func : lhsFuncs) {
if (Symbols.isPrivate(func.symbol)) {
return false;
}
}
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsType.fields.get(lhsField.name.value);
if (rhsField == null ||
!isInSameVisibilityRegion(lhsField.symbol, rhsField.symbol) ||
!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {
return false;
}
}
for (BAttachedFunction lhsFunc : lhsFuncs) {
if (lhsFunc == lhsStructSymbol.initializerFunc) {
continue;
}
if (isLhsAService && Symbols.isResource(lhsFunc.symbol)) {
continue;
}
BAttachedFunction rhsFunc = getMatchingInvokableType(rhsFuncs, lhsFunc, unresolvedTypes);
if (rhsFunc == null || !isInSameVisibilityRegion(lhsFunc.symbol, rhsFunc.symbol)) {
return false;
}
if (Symbols.isRemote(lhsFunc.symbol) != Symbols.isRemote(rhsFunc.symbol)) {
return false;
}
}
return lhsType.typeIdSet.isAssignableFrom(rhsType.typeIdSet);
} | class Types {
private static final CompilerContext.Key<Types> TYPES_KEY =
new CompilerContext.Key<>();
private final ResolvedTypeBuilder typeBuilder;
private SymbolTable symTable;
private SymbolResolver symResolver;
private BLangDiagnosticLog dlog;
private Names names;
private int finiteTypeCount = 0;
private BUnionType expandedXMLBuiltinSubtypes;
public static Types getInstance(CompilerContext context) {
Types types = context.get(TYPES_KEY);
if (types == null) {
types = new Types(context);
}
return types;
}
public Types(CompilerContext context) {
context.put(TYPES_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.names = Names.getInstance(context);
this.expandedXMLBuiltinSubtypes = BUnionType.create(null,
symTable.xmlElementType, symTable.xmlCommentType,
symTable.xmlPIType, symTable.xmlTextType);
this.typeBuilder = new ResolvedTypeBuilder();
}
public List<BType> checkTypes(BLangExpression node,
List<BType> actualTypes,
List<BType> expTypes) {
List<BType> resTypes = new ArrayList<>();
for (int i = 0; i < actualTypes.size(); i++) {
resTypes.add(checkType(node, actualTypes.get(i), expTypes.size() > i ? expTypes.get(i) : symTable.noType));
}
return resTypes;
}
public BType checkType(BLangExpression node,
BType actualType,
BType expType) {
return checkType(node, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
public BType checkType(BLangExpression expr,
BType actualType,
BType expType,
DiagnosticCode diagCode) {
expr.type = checkType(expr.pos, actualType, expType, diagCode);
if (expr.type.tag == TypeTags.SEMANTIC_ERROR) {
return expr.type;
}
setImplicitCastExpr(expr, actualType, expType);
return expr.type;
}
public BType checkType(Location pos,
BType actualType,
BType expType,
DiagnosticCode diagCode) {
if (expType.tag == TypeTags.SEMANTIC_ERROR) {
return expType;
} else if (expType.tag == TypeTags.NONE) {
return actualType;
} else if (actualType.tag == TypeTags.SEMANTIC_ERROR) {
return actualType;
} else if (isAssignable(actualType, expType)) {
return actualType;
}
dlog.error(pos, diagCode, expType, actualType);
return symTable.semanticError;
}
public boolean isJSONContext(BType type) {
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream().anyMatch(memType -> memType.tag == TypeTags.JSON);
}
return type.tag == TypeTags.JSON;
}
public boolean isLax(BType type) {
switch (type.tag) {
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
return true;
case TypeTags.MAP:
return isLax(((BMapType) type).constraint);
case TypeTags.UNION:
return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isLax);
}
return false;
}
public boolean isSameType(BType source, BType target) {
return isSameType(source, target, new HashSet<>());
}
private boolean isSameType(BType source, BType target, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
BTypeVisitor<BType, Boolean> sameTypeVisitor = new BSameTypeVisitor(unresolvedTypes);
return target.accept(sameTypeVisitor, source);
}
public boolean isValueType(BType type) {
switch (type.tag) {
case TypeTags.BOOLEAN:
case TypeTags.BYTE:
case TypeTags.DECIMAL:
case TypeTags.FLOAT:
case TypeTags.INT:
case TypeTags.STRING:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.CHAR_STRING:
return true;
default:
return false;
}
}
boolean isBasicNumericType(BType type) {
return type.tag < TypeTags.STRING || TypeTags.isIntegerTypeTag(type.tag);
}
boolean finiteTypeContainsNumericTypeValues(BFiniteType finiteType) {
return finiteType.getValueSpace().stream().anyMatch(valueExpr -> isBasicNumericType(valueExpr.type));
}
public boolean containsErrorType(BType type) {
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.anyMatch(this::containsErrorType);
}
return type.tag == TypeTags.ERROR;
}
public boolean isSubTypeOfList(BType type) {
if (type.tag != TypeTags.UNION) {
return isSubTypeOfBaseType(type, TypeTags.ARRAY) || isSubTypeOfBaseType(type, TypeTags.TUPLE);
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfList);
}
public BType resolvePatternTypeFromMatchExpr(BLangExpression matchExpr, BTupleType listMatchPatternType) {
if (matchExpr == null) {
return listMatchPatternType;
}
BType matchExprType = matchExpr.type;
BType intersectionType = getTypeIntersection(matchExprType, listMatchPatternType);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
if (matchExprType.tag == TypeTags.ANYDATA) {
Collections.fill(listMatchPatternType.tupleTypes, symTable.anydataType);
if (listMatchPatternType.restType != null) {
listMatchPatternType.restType = symTable.anydataType;
}
return listMatchPatternType;
}
return symTable.noType;
}
public BType resolvePatternTypeFromMatchExpr(BLangExpression matchExpr, BLangExpression constPatternExpr) {
if (matchExpr == null) {
if (constPatternExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) constPatternExpr).symbol.type;
} else {
return constPatternExpr.type;
}
}
BType matchExprType = matchExpr.type;
BType constMatchPatternExprType = constPatternExpr.type;
if (constPatternExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef constVarRef = (BLangSimpleVarRef) constPatternExpr;
if (constVarRef.symbol == null) {
return symTable.noType;
}
BType constVarRefSymbolType = constVarRef.symbol.type;
if (isAssignable(constVarRefSymbolType, matchExprType)) {
return constVarRefSymbolType;
}
return symTable.noType;
}
BLangLiteral constPatternLiteral = (BLangLiteral) constPatternExpr;
if (containsAnyType(constMatchPatternExprType)) {
return matchExprType;
} else if (containsAnyType(matchExprType)) {
return constMatchPatternExprType;
}
if (matchExprType.tag == TypeTags.BYTE && constMatchPatternExprType.tag == TypeTags.INT) {
return matchExprType;
}
if (isAssignable(constMatchPatternExprType, matchExprType)) {
return constMatchPatternExprType;
}
if (matchExprType.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) matchExprType).getMemberTypes()) {
if (memberType.tag == TypeTags.FINITE) {
if (isAssignableToFiniteType(memberType, constPatternLiteral)) {
return memberType;
}
} else {
if (isAssignable(constMatchPatternExprType, matchExprType)) {
return constMatchPatternExprType;
}
}
}
} else if (matchExprType.tag == TypeTags.FINITE) {
if (isAssignableToFiniteType(matchExprType, constPatternLiteral)) {
return matchExprType;
}
}
return symTable.noType;
}
private boolean containsAnyType(BType type) {
if (type.tag != TypeTags.UNION) {
return type.tag == TypeTags.ANY;
}
for (BType memberTypes : ((BUnionType) type).getMemberTypes()) {
if (memberTypes.tag == TypeTags.ANY) {
return true;
}
}
return false;
}
public BType mergeTypes(BType typeFirst, BType typeSecond) {
if (containsAnyType(typeFirst)) {
return typeSecond;
}
if (isSameBasicType(typeFirst, typeSecond)) {
return typeFirst;
}
return BUnionType.create(null, typeFirst, typeSecond);
}
public boolean isSubTypeOfMapping(BType type) {
if (type.tag != TypeTags.UNION) {
return isSubTypeOfBaseType(type, TypeTags.MAP) || isSubTypeOfBaseType(type, TypeTags.RECORD);
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfMapping);
}
public boolean isSubTypeOfBaseType(BType type, int baseTypeTag) {
if (type.tag != TypeTags.UNION) {
return type.tag == baseTypeTag;
}
if (TypeTags.isXMLTypeTag(baseTypeTag)) {
return true;
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(memType -> memType.tag == baseTypeTag);
}
/**
* Checks whether source type is assignable to the target type.
* <p>
* Source type is assignable to the target type if,
* 1) the target type is any and the source type is not a value type.
* 2) there exists an implicit cast symbol from source to target.
* 3) both types are JSON and the target constraint is no type.
* 4) both types are array type and both array types are assignable.
* 5) both types are MAP and the target constraint is any type or constraints are structurally equivalent.
*
* @param source type.
* @param target type.
* @return true if source type is assignable to the target type.
*/
public boolean isAssignable(BType source, BType target) {
return isAssignable(source, target, new HashSet<>());
}
boolean isStampingAllowed(BType source, BType target) {
return (isAssignable(source, target) || isAssignable(target, source) ||
checkTypeEquivalencyForStamping(source, target) || checkTypeEquivalencyForStamping(target, source));
}
private boolean checkTypeEquivalencyForStamping(BType source, BType target) {
if (target.tag == TypeTags.RECORD) {
if (source.tag == TypeTags.RECORD) {
TypePair pair = new TypePair(source, target);
Set<TypePair> unresolvedTypes = new HashSet<>();
unresolvedTypes.add(pair);
return checkRecordEquivalencyForStamping((BRecordType) source, (BRecordType) target, unresolvedTypes);
} else if (source.tag == TypeTags.MAP) {
int mapConstraintTypeTag = ((BMapType) source).constraint.tag;
if ((!(mapConstraintTypeTag == TypeTags.ANY || mapConstraintTypeTag == TypeTags.ANYDATA)) &&
((BRecordType) target).sealed) {
for (BField field : ((BStructureType) target).getFields().values()) {
if (field.getType().tag != mapConstraintTypeTag) {
return false;
}
}
}
return true;
}
} else if (target.tag == TypeTags.JSON) {
return source.tag == TypeTags.JSON || source.tag == TypeTags.RECORD || source.tag == TypeTags.MAP;
} else if (target.tag == TypeTags.MAP) {
if (source.tag == TypeTags.MAP) {
return isStampingAllowed(((BMapType) source).getConstraint(), ((BMapType) target).getConstraint());
} else if (source.tag == TypeTags.UNION) {
return checkUnionEquivalencyForStamping(source, target);
}
} else if (target.tag == TypeTags.ARRAY) {
if (source.tag == TypeTags.JSON) {
return true;
} else if (source.tag == TypeTags.TUPLE) {
BType arrayElementType = ((BArrayType) target).eType;
for (BType tupleMemberType : ((BTupleType) source).getTupleTypes()) {
if (!isStampingAllowed(tupleMemberType, arrayElementType)) {
return false;
}
}
return true;
} else if (source.tag == TypeTags.ARRAY) {
return checkTypeEquivalencyForStamping(((BArrayType) source).eType, ((BArrayType) target).eType);
}
} else if (target.tag == TypeTags.UNION) {
return checkUnionEquivalencyForStamping(source, target);
} else if (target.tag == TypeTags.TUPLE && source.tag == TypeTags.TUPLE) {
return checkTupleEquivalencyForStamping(source, target);
}
return false;
}
private boolean checkRecordEquivalencyForStamping(BRecordType rhsType, BRecordType lhsType,
Set<TypePair> unresolvedTypes) {
if (Symbols.isFlagOn(lhsType.tsymbol.flags ^ rhsType.tsymbol.flags, Flags.PUBLIC)) {
return false;
}
if (Symbols.isPrivate(lhsType.tsymbol) && rhsType.tsymbol.pkgID != lhsType.tsymbol.pkgID) {
return false;
}
if (lhsType.fields.size() > rhsType.fields.size()) {
return false;
}
if (lhsType.sealed && !rhsType.sealed) {
return false;
}
return checkFieldEquivalencyForStamping(lhsType, rhsType, unresolvedTypes);
}
private boolean checkFieldEquivalencyForStamping(BStructureType lhsType, BStructureType rhsType,
Set<TypePair> unresolvedTypes) {
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsType.fields.get(lhsField.name.value);
if (rhsField == null || !isStampingAllowed(rhsField.type, lhsField.type)) {
return false;
}
}
for (BField rhsField : rhsType.fields.values()) {
BField lhsField = lhsType.fields.get(rhsField.name.value);
if (lhsField == null && !isStampingAllowed(rhsField.type, ((BRecordType) lhsType).restFieldType)) {
return false;
}
}
return true;
}
private boolean checkUnionEquivalencyForStamping(BType source, BType target) {
Set<BType> sourceTypes = new LinkedHashSet<>();
Set<BType> targetTypes = new LinkedHashSet<>();
if (source.tag == TypeTags.UNION) {
BUnionType sourceUnionType = (BUnionType) source;
sourceTypes.addAll(sourceUnionType.getMemberTypes());
} else {
sourceTypes.add(source);
}
if (target.tag == TypeTags.UNION) {
BUnionType targetUnionType = (BUnionType) target;
targetTypes.addAll(targetUnionType.getMemberTypes());
} else {
targetTypes.add(target);
}
boolean notAssignable = sourceTypes
.stream()
.map(s -> targetTypes
.stream()
.anyMatch(t -> isStampingAllowed(s, t)))
.anyMatch(assignable -> !assignable);
return !notAssignable;
}
private boolean checkTupleEquivalencyForStamping(BType source, BType target) {
if (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) {
return false;
}
BTupleType lhsTupleType = (BTupleType) target;
BTupleType rhsTupleType = (BTupleType) source;
if (lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) {
return false;
}
for (int i = 0; i < lhsTupleType.tupleTypes.size(); i++) {
if (!isStampingAllowed(rhsTupleType.tupleTypes.get(i), lhsTupleType.tupleTypes.get(i))) {
return false;
}
}
return true;
}
private boolean isAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (isSameType(source, target)) {
return true;
}
int sourceTag = source.tag;
int targetTag = target.tag;
if (!Symbols.isFlagOn(source.flags, Flags.PARAMETERIZED) &&
!isInherentlyImmutableType(target) && Symbols.isFlagOn(target.flags, Flags.READONLY) &&
!isInherentlyImmutableType(source) && !Symbols.isFlagOn(source.flags, Flags.READONLY)) {
return false;
}
if (sourceTag == TypeTags.INTERSECTION) {
return isAssignable(((BIntersectionType) source).effectiveType,
targetTag != TypeTags.INTERSECTION ? target :
((BIntersectionType) target).effectiveType, unresolvedTypes);
}
if (targetTag == TypeTags.INTERSECTION) {
return isAssignable(source, ((BIntersectionType) target).effectiveType, unresolvedTypes);
}
if (sourceTag == TypeTags.PARAMETERIZED_TYPE) {
return isParameterizedTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.BYTE && targetTag == TypeTags.INT) {
return true;
}
if (TypeTags.isXMLTypeTag(sourceTag) && TypeTags.isXMLTypeTag(targetTag)) {
return isXMLTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.STRING) {
return true;
}
if (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.XML_TEXT) {
return true;
}
if (sourceTag == TypeTags.STRING && targetTag == TypeTags.XML_TEXT) {
return true;
}
if (sourceTag == TypeTags.XML_TEXT && targetTag == TypeTags.STRING) {
return true;
}
if (sourceTag == TypeTags.XML_TEXT && targetTag == TypeTags.CHAR_STRING) {
return true;
}
if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ERROR) {
return isErrorTypeAssignable((BErrorType) source, (BErrorType) target, unresolvedTypes);
} else if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ANY) {
return false;
}
if (sourceTag == TypeTags.NIL && (isNullable(target) || targetTag == TypeTags.JSON)) {
return true;
}
if (targetTag == TypeTags.ANY && !containsErrorType(source) && !isValueType(source)) {
return true;
}
if (targetTag == TypeTags.ANYDATA && !containsErrorType(source) && source.isAnydata()) {
return true;
}
if (targetTag == TypeTags.READONLY &&
(isInherentlyImmutableType(source) || Symbols.isFlagOn(source.flags, Flags.READONLY))) {
return true;
}
if (targetTag == TypeTags.MAP && sourceTag == TypeTags.RECORD) {
BRecordType recordType = (BRecordType) source;
return isAssignableRecordType(recordType, target, unresolvedTypes);
}
if (targetTag == TypeTags.RECORD && sourceTag == TypeTags.MAP) {
return isAssignableMapType((BMapType) source, (BRecordType) target);
}
if (targetTag == TypeTags.TYPEDESC && sourceTag == TypeTags.TYPEDESC) {
return isAssignable(((BTypedescType) source).constraint, (((BTypedescType) target).constraint),
unresolvedTypes);
}
if (targetTag == TypeTags.TABLE && sourceTag == TypeTags.TABLE) {
return isAssignableTableType((BTableType) source, (BTableType) target);
}
if (targetTag == TypeTags.STREAM && sourceTag == TypeTags.STREAM) {
return isAssignable(((BStreamType) source).constraint, ((BStreamType) target).constraint, unresolvedTypes);
}
if (isBuiltInTypeWidenPossible(source, target) == TypeTestResult.TRUE) {
return true;
}
if (sourceTag == TypeTags.FINITE) {
return isFiniteTypeAssignable((BFiniteType) source, target, unresolvedTypes);
}
if ((targetTag == TypeTags.UNION || sourceTag == TypeTags.UNION) &&
isAssignableToUnionType(source, target, unresolvedTypes)) {
return true;
}
if (targetTag == TypeTags.JSON) {
if (sourceTag == TypeTags.JSON) {
return true;
}
if (sourceTag == TypeTags.ARRAY) {
return isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.MAP) {
return isAssignable(((BMapType) source).constraint, target, unresolvedTypes);
}
if (sourceTag == TypeTags.RECORD) {
return isAssignableRecordType((BRecordType) source, target, unresolvedTypes);
}
}
if (targetTag == TypeTags.FUTURE && sourceTag == TypeTags.FUTURE) {
if (((BFutureType) target).constraint.tag == TypeTags.NONE) {
return true;
}
return isAssignable(((BFutureType) source).constraint, ((BFutureType) target).constraint, unresolvedTypes);
}
if (targetTag == TypeTags.MAP && sourceTag == TypeTags.MAP) {
if (((BMapType) target).constraint.tag == TypeTags.ANY &&
((BMapType) source).constraint.tag != TypeTags.UNION) {
return true;
}
return isAssignable(((BMapType) source).constraint, ((BMapType) target).constraint, unresolvedTypes);
}
if ((sourceTag == TypeTags.OBJECT || sourceTag == TypeTags.RECORD)
&& (targetTag == TypeTags.OBJECT || targetTag == TypeTags.RECORD)) {
return checkStructEquivalency(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.TUPLE && targetTag == TypeTags.ARRAY) {
return isTupleTypeAssignableToArrayType((BTupleType) source, (BArrayType) target, unresolvedTypes);
}
if (sourceTag == TypeTags.ARRAY && targetTag == TypeTags.TUPLE) {
return isArrayTypeAssignableToTupleType((BArrayType) source, (BTupleType) target, unresolvedTypes);
}
if (sourceTag == TypeTags.TUPLE || targetTag == TypeTags.TUPLE) {
return isTupleTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.INVOKABLE && targetTag == TypeTags.INVOKABLE) {
return isFunctionTypeAssignable((BInvokableType) source, (BInvokableType) target, new HashSet<>());
}
return sourceTag == TypeTags.ARRAY && targetTag == TypeTags.ARRAY &&
isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes);
}
private boolean isParameterizedTypeAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
BType resolvedSourceType = typeBuilder.build(source);
if (target.tag != TypeTags.PARAMETERIZED_TYPE) {
return isAssignable(resolvedSourceType, target, unresolvedTypes);
}
if (((BParameterizedType) source).paramIndex != ((BParameterizedType) target).paramIndex) {
return false;
}
return isAssignable(resolvedSourceType, typeBuilder.build(target), unresolvedTypes);
}
private boolean isAssignableRecordType(BRecordType recordType, BType type, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(recordType, type);
if (!unresolvedTypes.add(pair)) {
return true;
}
BType targetType;
switch (type.tag) {
case TypeTags.MAP:
targetType = ((BMapType) type).constraint;
break;
case TypeTags.JSON:
targetType = type;
break;
default:
throw new IllegalArgumentException("Incompatible target type: " + type.toString());
}
return recordFieldsAssignableToType(recordType, targetType, unresolvedTypes);
}
private boolean recordFieldsAssignableToType(BRecordType recordType, BType targetType,
Set<TypePair> unresolvedTypes) {
for (BField field : recordType.fields.values()) {
if (!isAssignable(field.type, targetType, unresolvedTypes)) {
return false;
}
}
if (!recordType.sealed) {
return isAssignable(recordType.restFieldType, targetType, unresolvedTypes);
}
return true;
}
private boolean isAssignableTableType(BTableType sourceTableType, BTableType targetTableType) {
if (!isAssignable(sourceTableType.constraint, targetTableType.constraint)) {
return false;
}
if (targetTableType.keyTypeConstraint == null && targetTableType.fieldNameList == null) {
return true;
}
if (targetTableType.keyTypeConstraint != null) {
if (sourceTableType.keyTypeConstraint != null &&
(isAssignable(sourceTableType.keyTypeConstraint, targetTableType.keyTypeConstraint))) {
return true;
}
if (sourceTableType.fieldNameList == null) {
return false;
}
List<BType> fieldTypes = new ArrayList<>();
sourceTableType.fieldNameList.forEach(field -> fieldTypes
.add(getTableConstraintField(sourceTableType.constraint, field).type));
if (fieldTypes.size() == 1) {
return isAssignable(fieldTypes.get(0), targetTableType.keyTypeConstraint);
}
BTupleType tupleType = new BTupleType(fieldTypes);
return isAssignable(tupleType, targetTableType.keyTypeConstraint);
}
return targetTableType.fieldNameList.equals(sourceTableType.fieldNameList);
}
BField getTableConstraintField(BType constraintType, String fieldName) {
switch (constraintType.tag) {
case TypeTags.RECORD:
Map<String, BField> fieldList = ((BRecordType) constraintType).getFields();
return fieldList.get(fieldName);
case TypeTags.UNION:
BUnionType unionType = (BUnionType) constraintType;
Set<BType> memTypes = unionType.getMemberTypes();
List<BField> fields = memTypes.stream().map(type -> getTableConstraintField(type, fieldName))
.filter(Objects::nonNull).collect(Collectors.toList());
if (fields.size() != memTypes.size()) {
return null;
}
if (fields.stream().allMatch(field -> isAssignable(field.type, fields.get(0).type) &&
isAssignable(fields.get(0).type, field.type))) {
return fields.get(0);
}
break;
case TypeTags.INTERSECTION:
return getTableConstraintField(((BIntersectionType) constraintType).effectiveType, fieldName);
}
return null;
}
private boolean isAssignableMapType(BMapType sourceMapType, BRecordType targetRecType) {
if (targetRecType.sealed) {
return false;
}
for (BField field : targetRecType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) {
return false;
}
if (hasIncompatibleReadOnlyFlags(field.symbol.flags, sourceMapType.flags)) {
return false;
}
if (!isAssignable(sourceMapType.constraint, field.type)) {
return false;
}
}
return isAssignable(sourceMapType.constraint, targetRecType.restFieldType);
}
private boolean hasIncompatibleReadOnlyFlags(long targetFlags, long sourceFlags) {
return Symbols.isFlagOn(targetFlags, Flags.READONLY) && !Symbols.isFlagOn(sourceFlags, Flags.READONLY);
}
private boolean isErrorTypeAssignable(BErrorType source, BErrorType target, Set<TypePair> unresolvedTypes) {
if (target == symTable.errorType) {
return true;
}
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
return isAssignable(source.detailType, target.detailType, unresolvedTypes)
&& target.typeIdSet.isAssignableFrom(source.typeIdSet);
}
private boolean isXMLTypeAssignable(BType sourceType, BType targetType, Set<TypePair> unresolvedTypes) {
int sourceTag = sourceType.tag;
int targetTag = targetType.tag;
if (targetTag == TypeTags.XML) {
BXMLType target = (BXMLType) targetType;
if (target.constraint != null) {
if (TypeTags.isXMLNonSequenceType(sourceTag)) {
return isAssignable(sourceType, target.constraint, unresolvedTypes);
}
BXMLType source = (BXMLType) sourceType;
return isAssignable(source.constraint, target.constraint, unresolvedTypes);
}
return true;
}
return sourceTag == targetTag;
}
private boolean isTupleTypeAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) {
return false;
}
BTupleType lhsTupleType = (BTupleType) target;
BTupleType rhsTupleType = (BTupleType) source;
if (lhsTupleType.restType == null && rhsTupleType.restType != null) {
return false;
}
if (lhsTupleType.restType == null && lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) {
return false;
}
if (lhsTupleType.restType != null && rhsTupleType.restType != null) {
if (!isAssignable(rhsTupleType.restType, lhsTupleType.restType, unresolvedTypes)) {
return false;
}
}
if (lhsTupleType.tupleTypes.size() > rhsTupleType.tupleTypes.size()) {
return false;
}
for (int i = 0; i < rhsTupleType.tupleTypes.size(); i++) {
BType lhsType = (lhsTupleType.tupleTypes.size() > i)
? lhsTupleType.tupleTypes.get(i) : lhsTupleType.restType;
if (!isAssignable(rhsTupleType.tupleTypes.get(i), lhsType, unresolvedTypes)) {
return false;
}
}
return true;
}
private boolean isTupleTypeAssignableToArrayType(BTupleType source, BArrayType target,
Set<TypePair> unresolvedTypes) {
if (target.state != BArrayState.OPEN
&& (source.restType != null || source.tupleTypes.size() != target.size)) {
return false;
}
List<BType> sourceTypes = new ArrayList<>(source.tupleTypes);
if (source.restType != null) {
sourceTypes.add(source.restType);
}
return sourceTypes.stream()
.allMatch(tupleElemType -> isAssignable(tupleElemType, target.eType, unresolvedTypes));
}
private boolean isArrayTypeAssignableToTupleType(BArrayType source, BTupleType target,
Set<TypePair> unresolvedTypes) {
if (!target.tupleTypes.isEmpty()) {
if (source.state == BArrayState.OPEN) {
return false;
}
if (target.restType != null && target.tupleTypes.size() > source.size) {
return false;
}
if (target.restType == null && target.tupleTypes.size() != source.size) {
return false;
}
}
List<BType> targetTypes = new ArrayList<>(target.tupleTypes);
if (target.restType != null) {
targetTypes.add(target.restType);
}
return targetTypes.stream()
.allMatch(tupleElemType -> isAssignable(source.eType, tupleElemType, unresolvedTypes));
}
private boolean isArrayTypesAssignable(BArrayType source, BType target, Set<TypePair> unresolvedTypes) {
BType sourceElementType = source.getElementType();
if (target.tag == TypeTags.ARRAY) {
BArrayType targetArrayType = (BArrayType) target;
BType targetElementType = targetArrayType.getElementType();
if (targetArrayType.state == BArrayState.OPEN) {
return isAssignable(sourceElementType, targetElementType, unresolvedTypes);
}
if (targetArrayType.size != source.size) {
return false;
}
return isAssignable(sourceElementType, targetElementType, unresolvedTypes);
} else if (target.tag == TypeTags.JSON) {
return isAssignable(sourceElementType, target, unresolvedTypes);
}
return false;
}
private boolean isFunctionTypeAssignable(BInvokableType source, BInvokableType target,
Set<TypePair> unresolvedTypes) {
if (hasIncompatibleIsolatedFlags(source, target) || hasIncompatibleTransactionalFlags(source, target)) {
return false;
}
if (containsTypeParams(target)) {
if (source.paramTypes.size() != target.paramTypes.size()) {
return false;
}
for (int i = 0; i < source.paramTypes.size(); i++) {
BType sourceParam = source.paramTypes.get(i);
BType targetParam = target.paramTypes.get(i);
boolean isTypeParam = TypeParamAnalyzer.isTypeParam(targetParam);
if (isTypeParam) {
if (!isAssignable(sourceParam, targetParam)) {
return false;
}
} else {
if (!isAssignable(targetParam, sourceParam)) {
return false;
}
}
}
if (source.retType == null && target.retType == null) {
return true;
} else if (source.retType == null || target.retType == null) {
return false;
}
return isAssignable(source.retType, target.retType, unresolvedTypes);
}
return checkFunctionTypeEquality(source, target, unresolvedTypes, (s, t, ut) -> isAssignable(t, s, ut));
}
public boolean isInherentlyImmutableType(BType type) {
if (isValueType(type)) {
return true;
}
switch (type.tag) {
case TypeTags.XML_TEXT:
case TypeTags.FINITE:
case TypeTags.READONLY:
case TypeTags.NIL:
case TypeTags.ERROR:
case TypeTags.INVOKABLE:
case TypeTags.TYPEDESC:
case TypeTags.HANDLE:
return true;
}
return false;
}
boolean isSelectivelyImmutableType(BType type) {
return isSelectivelyImmutableType(type, false, new HashSet<>(), false);
}
boolean isSelectivelyImmutableType(BType type, boolean disallowReadOnlyObjects, boolean forceCheck) {
return isSelectivelyImmutableType(type, disallowReadOnlyObjects, new HashSet<>(), forceCheck);
}
public boolean isSelectivelyImmutableType(BType type, Set<BType> unresolvedTypes) {
return isSelectivelyImmutableType(type, false, unresolvedTypes, false);
}
private boolean isSelectivelyImmutableType(BType type, Set<BType> unresolvedTypes, boolean forceCheck) {
return isSelectivelyImmutableType(type, false, unresolvedTypes, forceCheck);
}
private boolean isSelectivelyImmutableType(BType type, boolean disallowReadOnlyObjects, Set<BType> unresolvedTypes,
boolean forceCheck) {
if (isInherentlyImmutableType(type) || !(type instanceof SelectivelyImmutableReferenceType)) {
return false;
}
if (!forceCheck && ((SelectivelyImmutableReferenceType) type).getImmutableType() != null) {
return true;
}
if (!unresolvedTypes.add(type)) {
return true;
}
switch (type.tag) {
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_COMMENT:
case TypeTags.XML_ELEMENT:
case TypeTags.XML_PI:
return true;
case TypeTags.ARRAY:
BType elementType = ((BArrayType) type).eType;
return isInherentlyImmutableType(elementType) ||
isSelectivelyImmutableType(elementType, unresolvedTypes, forceCheck);
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) type;
for (BType tupMemType : tupleType.tupleTypes) {
if (!isInherentlyImmutableType(tupMemType) &&
!isSelectivelyImmutableType(tupMemType, unresolvedTypes, forceCheck)) {
return false;
}
}
BType tupRestType = tupleType.restType;
if (tupRestType == null) {
return true;
}
return isInherentlyImmutableType(tupRestType) ||
isSelectivelyImmutableType(tupRestType, unresolvedTypes, forceCheck);
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
BType fieldType = field.type;
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes, forceCheck)) {
return false;
}
}
BType recordRestType = recordType.restFieldType;
if (recordRestType == null || recordRestType == symTable.noType) {
return true;
}
return isInherentlyImmutableType(recordRestType) ||
isSelectivelyImmutableType(recordRestType, unresolvedTypes, forceCheck);
case TypeTags.MAP:
BType constraintType = ((BMapType) type).constraint;
return isInherentlyImmutableType(constraintType) ||
isSelectivelyImmutableType(constraintType, unresolvedTypes, forceCheck);
case TypeTags.OBJECT:
BObjectType objectType = (BObjectType) type;
if (Symbols.isFlagOn(objectType.tsymbol.flags, Flags.CLASS) &&
(disallowReadOnlyObjects || !Symbols.isFlagOn(objectType.flags, Flags.READONLY))) {
return false;
}
for (BField field : objectType.fields.values()) {
BType fieldType = field.type;
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes, forceCheck)) {
return false;
}
}
return true;
case TypeTags.TABLE:
BType tableConstraintType = ((BTableType) type).constraint;
return isInherentlyImmutableType(tableConstraintType) ||
isSelectivelyImmutableType(tableConstraintType, unresolvedTypes, forceCheck);
case TypeTags.UNION:
boolean readonlyIntersectionExists = false;
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (isInherentlyImmutableType(memberType) ||
isSelectivelyImmutableType(memberType, disallowReadOnlyObjects, unresolvedTypes,
forceCheck)) {
readonlyIntersectionExists = true;
}
}
return readonlyIntersectionExists;
case TypeTags.INTERSECTION:
return isSelectivelyImmutableType(((BIntersectionType) type).effectiveType, false, unresolvedTypes,
forceCheck);
}
return false;
}
private boolean containsTypeParams(BInvokableType type) {
boolean hasParameterizedTypes = type.paramTypes.stream()
.anyMatch(t -> {
if (t.tag == TypeTags.FUNCTION_POINTER) {
return containsTypeParams((BInvokableType) t);
}
return TypeParamAnalyzer.isTypeParam(t);
});
if (hasParameterizedTypes) {
return hasParameterizedTypes;
}
if (type.retType.tag == TypeTags.FUNCTION_POINTER) {
return containsTypeParams((BInvokableType) type.retType);
}
return TypeParamAnalyzer.isTypeParam(type.retType);
}
private boolean isSameFunctionType(BInvokableType source, BInvokableType target, Set<TypePair> unresolvedTypes) {
return checkFunctionTypeEquality(source, target, unresolvedTypes, this::isSameType);
}
private boolean checkFunctionTypeEquality(BInvokableType source, BInvokableType target,
Set<TypePair> unresolvedTypes, TypeEqualityPredicate equality) {
if (hasIncompatibleIsolatedFlags(source, target) || hasIncompatibleTransactionalFlags(source, target)) {
return false;
}
if (source.paramTypes.size() != target.paramTypes.size()) {
return false;
}
for (int i = 0; i < source.paramTypes.size(); i++) {
if (!equality.test(source.paramTypes.get(i), target.paramTypes.get(i), unresolvedTypes)) {
return false;
}
}
if ((source.restType != null && target.restType == null) ||
target.restType != null && source.restType == null) {
return false;
} else if (source.restType != null && !equality.test(source.restType, target.restType, unresolvedTypes)) {
return false;
}
if (source.retType == null && target.retType == null) {
return true;
} else if (source.retType == null || target.retType == null) {
return false;
}
return isAssignable(source.retType, target.retType, unresolvedTypes);
}
private boolean hasIncompatibleIsolatedFlags(BInvokableType source, BInvokableType target) {
return Symbols.isFlagOn(target.flags, Flags.ISOLATED) && !Symbols.isFlagOn(source.flags, Flags.ISOLATED);
}
private boolean hasIncompatibleTransactionalFlags(BInvokableType source, BInvokableType target) {
return Symbols.isFlagOn(source.flags, Flags.TRANSACTIONAL) &&
!Symbols.isFlagOn(target.flags, Flags.TRANSACTIONAL);
}
public boolean isSameArrayType(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (target.tag != TypeTags.ARRAY || source.tag != TypeTags.ARRAY) {
return false;
}
BArrayType lhsArrayType = (BArrayType) target;
BArrayType rhsArrayType = (BArrayType) source;
boolean hasSameTypeElements = isSameType(lhsArrayType.eType, rhsArrayType.eType, unresolvedTypes);
if (lhsArrayType.state == BArrayState.OPEN) {
return (rhsArrayType.state == BArrayState.OPEN) && hasSameTypeElements;
}
return checkSealedArraySizeEquality(rhsArrayType, lhsArrayType) && hasSameTypeElements;
}
public boolean checkSealedArraySizeEquality(BArrayType rhsArrayType, BArrayType lhsArrayType) {
return lhsArrayType.size == rhsArrayType.size;
}
public boolean checkStructEquivalency(BType rhsType, BType lhsType) {
return checkStructEquivalency(rhsType, lhsType, new HashSet<>());
}
private boolean checkStructEquivalency(BType rhsType, BType lhsType, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(rhsType, lhsType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
if (rhsType.tag == TypeTags.OBJECT && lhsType.tag == TypeTags.OBJECT) {
return checkObjectEquivalency((BObjectType) rhsType, (BObjectType) lhsType, unresolvedTypes);
}
if (rhsType.tag == TypeTags.RECORD && lhsType.tag == TypeTags.RECORD) {
return checkRecordEquivalency((BRecordType) rhsType, (BRecordType) lhsType, unresolvedTypes);
}
return false;
}
private int getObjectFuncCount(BObjectTypeSymbol sym) {
if (sym.initializerFunc != null && sym.attachedFuncs.contains(sym.initializerFunc)) {
return sym.attachedFuncs.size() - 1;
}
return sym.attachedFuncs.size();
}
public boolean checkRecordEquivalency(BRecordType rhsType, BRecordType lhsType, Set<TypePair> unresolvedTypes) {
if (lhsType.sealed && !rhsType.sealed) {
return false;
}
if (!rhsType.sealed && !isAssignable(rhsType.restFieldType, lhsType.restFieldType, unresolvedTypes)) {
return false;
}
return checkFieldEquivalency(lhsType, rhsType, unresolvedTypes);
}
public void setForeachTypedBindingPatternType(BLangForeach foreachNode) {
BType collectionType = foreachNode.collection.type;
BType varType;
switch (collectionType.tag) {
case TypeTags.STRING:
varType = symTable.stringType;
break;
case TypeTags.ARRAY:
BArrayType arrayType = (BArrayType) collectionType;
varType = arrayType.eType;
break;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) collectionType;
LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);
if (tupleType.restType != null) {
tupleTypes.add(tupleType.restType);
}
varType = tupleTypes.size() == 1 ?
tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);
break;
case TypeTags.MAP:
BMapType bMapType = (BMapType) collectionType;
varType = bMapType.constraint;
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) collectionType;
varType = inferRecordFieldType(recordType);
break;
case TypeTags.XML:
varType = BUnionType.create(null, symTable.xmlType, symTable.stringType);
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) collectionType;
varType = tableType.constraint;
break;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) collectionType;
if (streamType.constraint.tag == TypeTags.NONE) {
varType = symTable.anydataType;
break;
}
varType = streamType.constraint;
if (streamType.error != null) {
BType actualType = BUnionType.create(null, varType, streamType.error);
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
varType, actualType);
}
break;
case TypeTags.OBJECT:
BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);
if (nextMethodReturnType != null) {
foreachNode.resultType = getRecordType(nextMethodReturnType);
BType valueType = (foreachNode.resultType != null)
? ((BRecordType) foreachNode.resultType).fields.get("value").type : null;
BType errorType = getErrorType(nextMethodReturnType);
if (errorType != null) {
BType actualType = BUnionType.create(null, valueType, errorType);
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
valueType, actualType);
}
foreachNode.nillableResultType = nextMethodReturnType;
foreachNode.varType = valueType;
return;
}
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE);
case TypeTags.SEMANTIC_ERROR:
foreachNode.varType = symTable.semanticError;
foreachNode.resultType = symTable.semanticError;
foreachNode.nillableResultType = symTable.semanticError;
return;
default:
foreachNode.varType = symTable.semanticError;
foreachNode.resultType = symTable.semanticError;
foreachNode.nillableResultType = symTable.semanticError;
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.ITERABLE_NOT_SUPPORTED_COLLECTION,
collectionType);
return;
}
BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
BUnionType nextMethodReturnType =
(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);
foreachNode.varType = varType;
foreachNode.resultType = getRecordType(nextMethodReturnType);
foreachNode.nillableResultType = nextMethodReturnType;
}
public void setInputClauseTypedBindingPatternType(BLangInputClause bLangInputClause) {
if (bLangInputClause.collection == null) {
return;
}
BType collectionType = bLangInputClause.collection.type;
BType varType;
switch (collectionType.tag) {
case TypeTags.STRING:
varType = symTable.stringType;
break;
case TypeTags.ARRAY:
BArrayType arrayType = (BArrayType) collectionType;
varType = arrayType.eType;
break;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) collectionType;
LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);
if (tupleType.restType != null) {
tupleTypes.add(tupleType.restType);
}
varType = tupleTypes.size() == 1 ?
tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);
break;
case TypeTags.MAP:
BMapType bMapType = (BMapType) collectionType;
varType = bMapType.constraint;
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) collectionType;
varType = inferRecordFieldType(recordType);
break;
case TypeTags.XML:
varType = BUnionType.create(null, symTable.xmlType, symTable.stringType);
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) collectionType;
varType = tableType.constraint;
break;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) collectionType;
if (streamType.constraint.tag == TypeTags.NONE) {
varType = symTable.anydataType;
break;
}
varType = streamType.constraint;
break;
case TypeTags.OBJECT:
BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);
if (nextMethodReturnType != null) {
bLangInputClause.resultType = getRecordType(nextMethodReturnType);
bLangInputClause.nillableResultType = nextMethodReturnType;
bLangInputClause.varType = ((BRecordType) bLangInputClause.resultType).fields.get("value").type;
return;
}
dlog.error(bLangInputClause.collection.pos,
DiagnosticErrorCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE);
case TypeTags.SEMANTIC_ERROR:
bLangInputClause.varType = symTable.semanticError;
bLangInputClause.resultType = symTable.semanticError;
bLangInputClause.nillableResultType = symTable.semanticError;
return;
default:
bLangInputClause.varType = symTable.semanticError;
bLangInputClause.resultType = symTable.semanticError;
bLangInputClause.nillableResultType = symTable.semanticError;
dlog.error(bLangInputClause.collection.pos, DiagnosticErrorCode.ITERABLE_NOT_SUPPORTED_COLLECTION,
collectionType);
return;
}
BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
BUnionType nextMethodReturnType =
(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);
bLangInputClause.varType = varType;
bLangInputClause.resultType = getRecordType(nextMethodReturnType);
bLangInputClause.nillableResultType = nextMethodReturnType;
}
public BUnionType getVarTypeFromIterableObject(BObjectType collectionType) {
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) collectionType.tsymbol;
for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) {
return getVarTypeFromIteratorFunc(func);
}
}
return null;
}
private BUnionType getVarTypeFromIteratorFunc(BAttachedFunction candidateIteratorFunc) {
if (!candidateIteratorFunc.type.paramTypes.isEmpty()) {
return null;
}
BType returnType = candidateIteratorFunc.type.retType;
return getVarTypeFromIteratorFuncReturnType(returnType);
}
public BUnionType getVarTypeFromIteratorFuncReturnType(BType returnType) {
BObjectTypeSymbol objectTypeSymbol;
if (returnType.tag != TypeTags.OBJECT) {
return null;
}
objectTypeSymbol = (BObjectTypeSymbol) returnType.tsymbol;
for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.NEXT_FUNC)) {
return getVarTypeFromNextFunc(func);
}
}
return null;
}
private BUnionType getVarTypeFromNextFunc(BAttachedFunction nextFunc) {
BType returnType;
if (!nextFunc.type.paramTypes.isEmpty()) {
return null;
}
returnType = nextFunc.type.retType;
if (checkNextFuncReturnType(returnType)) {
return (BUnionType) returnType;
}
return null;
}
private boolean checkNextFuncReturnType(BType returnType) {
if (returnType.tag != TypeTags.UNION) {
return false;
}
List<BType> types = new ArrayList<>(((BUnionType) returnType).getMemberTypes());
if (!types.removeIf(type -> type.tag == TypeTags.NIL)) {
return false;
}
types.removeIf(type -> type.tag == TypeTags.ERROR);
if (types.size() != 1) {
return false;
}
if (types.get(0).tag != TypeTags.RECORD) {
return false;
}
BRecordType recordType = (BRecordType) types.get(0);
return checkRecordTypeInNextFuncReturnType(recordType);
}
private boolean checkRecordTypeInNextFuncReturnType(BRecordType recordType) {
if (!recordType.sealed) {
return false;
}
if (recordType.fields.size() != 1) {
return false;
}
return recordType.fields.containsKey(BLangCompilerConstants.VALUE_FIELD);
}
private BRecordType getRecordType(BUnionType type) {
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.RECORD) {
return (BRecordType) member;
}
}
return null;
}
public BErrorType getErrorType(BUnionType type) {
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.ERROR) {
return (BErrorType) member;
} else if (member.tag == TypeTags.UNION) {
BErrorType e = getErrorType((BUnionType) member);
if (e != null) {
return e;
}
}
}
return null;
}
public BType getResultTypeOfNextInvocation(BObjectType iteratorType) {
BAttachedFunction nextFunc = getAttachedFuncFromObject(iteratorType, BLangCompilerConstants.NEXT_FUNC);
return Objects.requireNonNull(nextFunc).type.retType;
}
public BAttachedFunction getAttachedFuncFromObject(BObjectType objectType, String funcName) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) objectType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (funcName.equals(bAttachedFunction.funcName.value)) {
return bAttachedFunction;
}
}
return null;
}
public BType inferRecordFieldType(BRecordType recordType) {
Map<String, BField> fields = recordType.fields;
BUnionType unionType = BUnionType.create(null);
if (!recordType.sealed) {
unionType.add(recordType.restFieldType);
}
for (BField field : fields.values()) {
if (isAssignable(field.type, unionType)) {
continue;
}
if (isAssignable(unionType, field.type)) {
unionType = BUnionType.create(null);
}
unionType.add(field.type);
}
if (unionType.getMemberTypes().size() > 1) {
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, recordType.tsymbol.pkgID, null,
recordType.tsymbol.owner, symTable.builtinPos, VIRTUAL);
return unionType;
}
return unionType.getMemberTypes().iterator().next();
}
/**
* Enum to represent type test result.
*
* @since 1.2.0
*/
enum TypeTestResult {
NOT_FOUND,
TRUE,
FALSE
}
TypeTestResult isBuiltInTypeWidenPossible(BType actualType, BType targetType) {
int targetTag = targetType.tag;
int actualTag = actualType.tag;
if (actualTag < TypeTags.JSON && targetTag < TypeTags.JSON) {
switch (actualTag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
if (targetTag == TypeTags.BOOLEAN || targetTag == TypeTags.STRING) {
return TypeTestResult.FALSE;
}
break;
case TypeTags.BOOLEAN:
if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT
|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.STRING) {
return TypeTestResult.FALSE;
}
break;
case TypeTags.STRING:
if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT
|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.BOOLEAN) {
return TypeTestResult.FALSE;
}
break;
}
}
switch (actualTag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.STRING:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.CHAR_STRING:
if (targetTag == TypeTags.JSON || targetTag == TypeTags.ANYDATA || targetTag == TypeTags.ANY ||
targetTag == TypeTags.READONLY) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.ANYDATA:
case TypeTags.TYPEDESC:
if (targetTag == TypeTags.ANY) {
return TypeTestResult.TRUE;
}
break;
default:
}
if (TypeTags.isIntegerTypeTag(targetTag) && actualTag == targetTag) {
return TypeTestResult.FALSE;
}
if ((TypeTags.isIntegerTypeTag(actualTag) || actualTag == TypeTags.BYTE)
&& (TypeTags.isIntegerTypeTag(targetTag) || targetTag == TypeTags.BYTE)) {
return checkBuiltInIntSubtypeWidenPossible(actualType, targetType);
}
if (actualTag == TypeTags.CHAR_STRING && TypeTags.STRING == targetTag) {
return TypeTestResult.TRUE;
}
return TypeTestResult.NOT_FOUND;
}
private TypeTestResult checkBuiltInIntSubtypeWidenPossible(BType actualType, BType targetType) {
int actualTag = actualType.tag;
switch (targetType.tag) {
case TypeTags.INT:
if (actualTag == TypeTags.BYTE || TypeTags.isIntegerTypeTag(actualTag)) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.SIGNED32_INT:
if (actualTag == TypeTags.SIGNED16_INT || actualTag == TypeTags.SIGNED8_INT ||
actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.SIGNED16_INT:
if (actualTag == TypeTags.SIGNED8_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED32_INT:
if (actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED16_INT:
if (actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.BYTE:
if (actualTag == TypeTags.UNSIGNED8_INT) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED8_INT:
if (actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
}
return TypeTestResult.NOT_FOUND;
}
public boolean isImplicityCastable(BType actualType, BType targetType) {
/* The word Builtin refers for Compiler known types. */
BType newTargetType = targetType;
if ((targetType.tag == TypeTags.UNION || targetType.tag == TypeTags.FINITE) && isValueType(actualType)) {
newTargetType = symTable.anyType;
} else if (targetType.tag == TypeTags.INTERSECTION) {
newTargetType = ((BIntersectionType) targetType).effectiveType;
}
TypeTestResult result = isBuiltInTypeWidenPossible(actualType, newTargetType);
if (result != TypeTestResult.NOT_FOUND) {
return result == TypeTestResult.TRUE;
}
if (isValueType(targetType) &&
(actualType.tag == TypeTags.FINITE ||
(actualType.tag == TypeTags.UNION && ((BUnionType) actualType).getMemberTypes().stream()
.anyMatch(type -> type.tag == TypeTags.FINITE && isAssignable(type, targetType))))) {
return targetType.tag == TypeTags.INT || targetType.tag == TypeTags.BYTE || targetType.tag == TypeTags.FLOAT
|| targetType.tag == TypeTags.STRING || targetType.tag == TypeTags.BOOLEAN;
} else if (targetType.tag == TypeTags.ERROR
&& (actualType.tag == TypeTags.UNION
&& isAllErrorMembers((BUnionType) actualType))) {
return true;
} else if (targetType.tag == TypeTags.STRING && actualType.tag == TypeTags.XML_TEXT) {
return true;
}
return false;
}
public boolean isTypeCastable(BLangExpression expr, BType sourceType, BType targetType) {
if (getTypeIntersection(sourceType, symTable.errorType) != symTable.semanticError
&& getTypeIntersection(targetType, symTable.errorType) == symTable.semanticError) {
return false;
}
if (sourceType.tag == TypeTags.SEMANTIC_ERROR || targetType.tag == TypeTags.SEMANTIC_ERROR ||
sourceType == targetType) {
return true;
}
if (isAssignable(sourceType, targetType) || isAssignable(targetType, sourceType)) {
return true;
}
if (isNumericConversionPossible(expr, sourceType, targetType)) {
return true;
}
boolean validTypeCast = false;
if (sourceType.tag == TypeTags.UNION) {
if (getTypeForUnionTypeMembersAssignableToType((BUnionType) sourceType, targetType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (targetType.tag == TypeTags.UNION) {
if (getTypeForUnionTypeMembersAssignableToType((BUnionType) targetType, sourceType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (sourceType.tag == TypeTags.FINITE) {
if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) sourceType, targetType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (targetType.tag == TypeTags.FINITE) {
if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) targetType, sourceType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (validTypeCast) {
if (isValueType(sourceType)) {
setImplicitCastExpr(expr, sourceType, symTable.anyType);
}
return true;
}
return false;
}
boolean isNumericConversionPossible(BLangExpression expr, BType sourceType,
BType targetType) {
final boolean isSourceNumericType = isBasicNumericType(sourceType);
final boolean isTargetNumericType = isBasicNumericType(targetType);
if (isSourceNumericType && isTargetNumericType) {
return true;
}
if (targetType.tag == TypeTags.UNION) {
HashSet<Integer> typeTags = new HashSet<>();
for (BType bType : ((BUnionType) targetType).getMemberTypes()) {
if (isBasicNumericType(bType)) {
typeTags.add(bType.tag);
if (typeTags.size() > 1) {
return false;
}
}
}
}
if (!isTargetNumericType && targetType.tag != TypeTags.UNION) {
return false;
}
if (isSourceNumericType) {
setImplicitCastExpr(expr, sourceType, symTable.anyType);
return true;
}
switch (sourceType.tag) {
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.JSON:
return true;
case TypeTags.UNION:
for (BType memType : ((BUnionType) sourceType).getMemberTypes()) {
if (isBasicNumericType(memType) ||
(memType.tag == TypeTags.FINITE &&
finiteTypeContainsNumericTypeValues((BFiniteType) memType))) {
return true;
}
}
break;
case TypeTags.FINITE:
if (finiteTypeContainsNumericTypeValues((BFiniteType) sourceType)) {
return true;
}
break;
}
return false;
}
private boolean isAllErrorMembers(BUnionType actualType) {
return actualType.getMemberTypes().stream().allMatch(t -> isAssignable(t, symTable.errorType));
}
public void setImplicitCastExpr(BLangExpression expr, BType actualType, BType expType) {
if (!isImplicityCastable(actualType, expType)) {
return;
}
BLangTypeConversionExpr implicitConversionExpr =
(BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
implicitConversionExpr.pos = expr.pos;
implicitConversionExpr.expr = expr.impConversionExpr == null ? expr : expr.impConversionExpr;
implicitConversionExpr.type = expType;
implicitConversionExpr.targetType = expType;
implicitConversionExpr.internal = true;
expr.impConversionExpr = implicitConversionExpr;
}
public BType getElementType(BType type) {
if (type.tag != TypeTags.ARRAY) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
public boolean checkListenerCompatibility(BType type) {
if (type.tag != TypeTags.OBJECT) {
return false;
}
BObjectType rhsType = (BObjectType) type;
List<BAttachedFunction> rhsFuncs = ((BStructureTypeSymbol) rhsType.tsymbol).attachedFuncs;
ListenerValidationModel listenerValidationModel = new ListenerValidationModel(this, symTable);
return listenerValidationModel.checkMethods(rhsFuncs);
}
public boolean isValidErrorDetailType(BType detailType) {
switch (detailType.tag) {
case TypeTags.MAP:
return isAssignable(detailType, symTable.detailType);
case TypeTags.RECORD: {
if (isSealed((BRecordType) detailType)) {
return false;
}
return isAssignable(detailType, symTable.detailType);
}
}
return false;
}
private boolean isSealed(BRecordType recordType) {
return recordType.sealed;
}
private boolean isNullable(BType fieldType) {
return fieldType.isNullable();
}
private class BSameTypeVisitor implements BTypeVisitor<BType, Boolean> {
Set<TypePair> unresolvedTypes;
BSameTypeVisitor(Set<TypePair> unresolvedTypes) {
this.unresolvedTypes = unresolvedTypes;
}
@Override
public Boolean visit(BType t, BType s) {
if (t == s) {
return true;
}
switch (t.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
return t.tag == s.tag
&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));
case TypeTags.ANY:
case TypeTags.ANYDATA:
return t.tag == s.tag && hasSameReadonlyFlag(s, t)
&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));
default:
break;
}
return false;
}
@Override
public Boolean visit(BBuiltInRefType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnyType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnydataType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BMapType t, BType s) {
if (s.tag != TypeTags.MAP || !hasSameReadonlyFlag(s, t)) {
return false;
}
BMapType sType = ((BMapType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFutureType t, BType s) {
return s.tag == TypeTags.FUTURE && t.constraint.tag == ((BFutureType) s).constraint.tag;
}
@Override
public Boolean visit(BXMLType t, BType s) {
return visit((BBuiltInRefType) t, s);
}
@Override
public Boolean visit(BJSONType t, BType s) {
return s.tag == TypeTags.JSON && hasSameReadonlyFlag(s, t);
}
@Override
public Boolean visit(BArrayType t, BType s) {
return s.tag == TypeTags.ARRAY && hasSameReadonlyFlag(s, t) && isSameArrayType(s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BObjectType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.OBJECT) {
return false;
}
return t.tsymbol.pkgID.equals(s.tsymbol.pkgID) && t.tsymbol.name.equals(s.tsymbol.name);
}
@Override
public Boolean visit(BRecordType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.RECORD || !hasSameReadonlyFlag(s, t)) {
return false;
}
BRecordType source = (BRecordType) s;
if (source.fields.size() != t.fields.size()) {
return false;
}
for (BField sourceField : source.fields.values()) {
if (t.fields.containsKey(sourceField.name.value)) {
BField targetField = t.fields.get(sourceField.name.value);
if (isSameType(sourceField.type, targetField.type, this.unresolvedTypes) &&
hasSameOptionalFlag(sourceField.symbol, targetField.symbol) &&
(!Symbols.isFlagOn(targetField.symbol.flags, Flags.READONLY) ||
Symbols.isFlagOn(sourceField.symbol.flags, Flags.READONLY))) {
continue;
}
}
return false;
}
return isSameType(source.restFieldType, t.restFieldType, this.unresolvedTypes);
}
private boolean hasSameOptionalFlag(BVarSymbol s, BVarSymbol t) {
return ((s.flags & Flags.OPTIONAL) ^ (t.flags & Flags.OPTIONAL)) != Flags.OPTIONAL;
}
private boolean hasSameReadonlyFlag(BType source, BType target) {
return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY);
}
public Boolean visit(BTupleType t, BType s) {
if (s.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(s, t)) {
return false;
}
BTupleType source = (BTupleType) s;
if (source.tupleTypes.size() != t.tupleTypes.size()) {
return false;
}
for (int i = 0; i < source.tupleTypes.size(); i++) {
if (t.getTupleTypes().get(i) == symTable.noType) {
continue;
}
if (!isSameType(source.getTupleTypes().get(i), t.tupleTypes.get(i), this.unresolvedTypes)) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BStreamType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BTableType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BInvokableType t, BType s) {
return s.tag == TypeTags.INVOKABLE && isSameFunctionType((BInvokableType) s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BUnionType tUnionType, BType s) {
if (s.tag != TypeTags.UNION || !hasSameReadonlyFlag(s, tUnionType)) {
return false;
}
BUnionType sUnionType = (BUnionType) s;
if (sUnionType.getMemberTypes().size()
!= tUnionType.getMemberTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sUnionType.getMemberTypes());
Set<BType> targetTypes = new LinkedHashSet<>(tUnionType.getMemberTypes());
boolean notSameType = sourceTypes
.stream()
.map(sT -> targetTypes
.stream()
.anyMatch(it -> isSameType(it, sT, this.unresolvedTypes)))
.anyMatch(foundSameType -> !foundSameType);
return !notSameType;
}
@Override
public Boolean visit(BIntersectionType tIntersectionType, BType s) {
if (s.tag != TypeTags.INTERSECTION || !hasSameReadonlyFlag(s, tIntersectionType)) {
return false;
}
BIntersectionType sIntersectionType = (BIntersectionType) s;
if (sIntersectionType.getConstituentTypes().size() != tIntersectionType.getConstituentTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sIntersectionType.getConstituentTypes());
Set<BType> targetTypes = new LinkedHashSet<>(tIntersectionType.getConstituentTypes());
for (BType sourceType : sourceTypes) {
boolean foundSameType = false;
for (BType targetType : targetTypes) {
if (isSameType(sourceType, targetType, this.unresolvedTypes)) {
foundSameType = true;
break;
}
}
if (!foundSameType) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BErrorType t, BType s) {
if (s.tag != TypeTags.ERROR) {
return false;
}
BErrorType source = (BErrorType) s;
if (!source.typeIdSet.equals(t.typeIdSet)) {
return false;
}
if (source.detailType == t.detailType) {
return true;
}
return isSameType(source.detailType, t.detailType, this.unresolvedTypes);
}
@Override
public Boolean visit(BTypedescType t, BType s) {
if (s.tag != TypeTags.TYPEDESC) {
return false;
}
BTypedescType sType = ((BTypedescType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFiniteType t, BType s) {
return s == t;
}
@Override
public Boolean visit(BParameterizedType t, BType s) {
if (s.tag != TypeTags.PARAMETERIZED_TYPE) {
return false;
}
BParameterizedType sType = (BParameterizedType) s;
return isSameType(sType.paramValueType, t.paramValueType) && sType.paramSymbol.equals(t.paramSymbol);
}
};
private boolean checkFieldEquivalency(BRecordType lhsType, BRecordType rhsType, Set<TypePair> unresolvedTypes) {
Map<String, BField> rhsFields = new LinkedHashMap<>(rhsType.fields);
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsFields.get(lhsField.name.value);
if (rhsField == null) {
return false;
}
if (hasIncompatibleReadOnlyFlags(lhsField.symbol.flags, rhsField.symbol.flags)) {
return false;
}
if (!Symbols.isOptional(lhsField.symbol) && Symbols.isOptional(rhsField.symbol)) {
return false;
}
if (!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {
return false;
}
rhsFields.remove(lhsField.name.value);
}
return rhsFields.entrySet().stream().allMatch(
fieldEntry -> isAssignable(fieldEntry.getValue().type, lhsType.restFieldType, unresolvedTypes));
}
private BAttachedFunction getMatchingInvokableType(List<BAttachedFunction> rhsFuncList, BAttachedFunction lhsFunc,
Set<TypePair> unresolvedTypes) {
return rhsFuncList.stream()
.filter(rhsFunc -> lhsFunc.funcName.equals(rhsFunc.funcName))
.filter(rhsFunc -> isFunctionTypeAssignable(rhsFunc.type, lhsFunc.type, unresolvedTypes))
.findFirst()
.orElse(null);
}
private boolean isInSameVisibilityRegion(BSymbol lhsSym, BSymbol rhsSym) {
if (Symbols.isPrivate(lhsSym)) {
return Symbols.isPrivate(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID)
&& lhsSym.owner.name.equals(rhsSym.owner.name);
} else if (Symbols.isPublic(lhsSym)) {
return Symbols.isPublic(rhsSym);
}
return !Symbols.isPrivate(rhsSym) && !Symbols.isPublic(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID);
}
private boolean isAssignableToUnionType(BType source, BType target, Set<TypePair> unresolvedTypes) {
Set<BType> sourceTypes = new LinkedHashSet<>();
Set<BType> targetTypes = new LinkedHashSet<>();
if (source.tag == TypeTags.UNION) {
sourceTypes.addAll(getEffectiveMemberTypes((BUnionType) source));
} else {
sourceTypes.add(source);
}
if (target.tag == TypeTags.UNION) {
targetTypes.addAll(getEffectiveMemberTypes((BUnionType) target));
} else {
targetTypes.add(target);
}
for (BType s : sourceTypes) {
if (s.tag == TypeTags.NEVER) {
continue;
}
boolean isAssignableToAnyTargetType = true;
for (BType t : targetTypes) {
if (isAssignable(s, t, unresolvedTypes)) {
isAssignableToAnyTargetType = false;
break;
}
}
if (isAssignableToAnyTargetType && (s.tag != TypeTags.FINITE || !isAssignable(s, target, unresolvedTypes))
&& (s.tag != TypeTags.XML ||
!isAssignableToUnionType(expandedXMLBuiltinSubtypes, target, unresolvedTypes))) {
return false;
}
}
return true;
}
private Set<BType> getEffectiveMemberTypes(BUnionType unionType) {
Set<BType> memTypes = new LinkedHashSet<>();
for (BType memberType : unionType.getMemberTypes()) {
if (memberType.tag == TypeTags.INTERSECTION) {
BType effectiveType = ((BIntersectionType) memberType).effectiveType;
if (effectiveType.tag == TypeTags.UNION) {
memTypes.addAll(getEffectiveMemberTypes((BUnionType) effectiveType));
continue;
}
memTypes.add(effectiveType);
continue;
}
memTypes.add(memberType);
}
return memTypes;
}
private boolean isFiniteTypeAssignable(BFiniteType finiteType, BType targetType, Set<TypePair> unresolvedTypes) {
if (targetType.tag == TypeTags.FINITE) {
return finiteType.getValueSpace().stream()
.allMatch(expression -> isAssignableToFiniteType(targetType, (BLangLiteral) expression));
}
if (targetType.tag == TypeTags.UNION) {
List<BType> unionMemberTypes = getAllTypes(targetType);
return finiteType.getValueSpace().stream()
.allMatch(valueExpr -> unionMemberTypes.stream()
.anyMatch(targetMemType -> targetMemType.tag == TypeTags.FINITE ?
isAssignableToFiniteType(targetMemType, (BLangLiteral) valueExpr) :
isAssignable(valueExpr.type, targetType, unresolvedTypes)));
}
return finiteType.getValueSpace().stream()
.allMatch(expression -> isAssignable(expression.type, targetType, unresolvedTypes));
}
boolean isAssignableToFiniteType(BType type, BLangLiteral literalExpr) {
if (type.tag != TypeTags.FINITE) {
return false;
}
BFiniteType expType = (BFiniteType) type;
return expType.getValueSpace().stream().anyMatch(memberLiteral -> {
if (((BLangLiteral) memberLiteral).value == null) {
return literalExpr.value == null;
}
return checkLiteralAssignabilityBasedOnType((BLangLiteral) memberLiteral, literalExpr);
});
}
/**
* Method to check the literal assignability based on the types of the literals. For numeric literals the
* assignability depends on the equivalency of the literals. If the candidate literal could either be a simple
* literal or a constant. In case of a constant, it is assignable to the base literal if and only if both
* literals have same type and equivalent values.
*
* @param baseLiteral Literal based on which we check the assignability.
* @param candidateLiteral Literal to be tested whether it is assignable to the base literal or not.
* @return true if assignable; false otherwise.
*/
boolean checkLiteralAssignabilityBasedOnType(BLangLiteral baseLiteral, BLangLiteral candidateLiteral) {
if (baseLiteral.getKind() != candidateLiteral.getKind()) {
return false;
}
Object baseValue = baseLiteral.value;
Object candidateValue = candidateLiteral.value;
int candidateTypeTag = candidateLiteral.type.tag;
switch (baseLiteral.type.tag) {
case TypeTags.BYTE:
if (candidateTypeTag == TypeTags.BYTE || (candidateTypeTag == TypeTags.INT &&
!candidateLiteral.isConstant && isByteLiteralValue((Long) candidateValue))) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.INT:
if (candidateTypeTag == TypeTags.INT) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED32_INT:
if (candidateTypeTag == TypeTags.INT && isSigned32LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED16_INT:
if (candidateTypeTag == TypeTags.INT && isSigned16LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED8_INT:
if (candidateTypeTag == TypeTags.INT && isSigned8LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED32_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned32LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED16_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned16LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED8_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned8LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.FLOAT:
String baseValueStr = String.valueOf(baseValue);
String originalValue = baseLiteral.originalValue != null ? baseLiteral.originalValue : baseValueStr;
if (NumericLiteralSupport.isDecimalDiscriminated(originalValue)) {
return false;
}
double baseDoubleVal = Double.parseDouble(baseValueStr);
double candidateDoubleVal;
if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {
candidateDoubleVal = ((Long) candidateValue).doubleValue();
return baseDoubleVal == candidateDoubleVal;
} else if (candidateTypeTag == TypeTags.FLOAT) {
candidateDoubleVal = Double.parseDouble(String.valueOf(candidateValue));
return baseDoubleVal == candidateDoubleVal;
}
break;
case TypeTags.DECIMAL:
BigDecimal baseDecimalVal = NumericLiteralSupport.parseBigDecimal(baseValue);
BigDecimal candidateDecimalVal;
if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {
candidateDecimalVal = new BigDecimal((long) candidateValue, MathContext.DECIMAL128);
return baseDecimalVal.compareTo(candidateDecimalVal) == 0;
} else if (candidateTypeTag == TypeTags.FLOAT && !candidateLiteral.isConstant ||
candidateTypeTag == TypeTags.DECIMAL) {
if (NumericLiteralSupport.isFloatDiscriminated(String.valueOf(candidateValue))) {
return false;
}
candidateDecimalVal = NumericLiteralSupport.parseBigDecimal(candidateValue);
return baseDecimalVal.compareTo(candidateDecimalVal) == 0;
}
break;
default:
return baseValue.equals(candidateValue);
}
return false;
}
boolean isByteLiteralValue(Long longObject) {
return (longObject.intValue() >= BBYTE_MIN_VALUE && longObject.intValue() <= BBYTE_MAX_VALUE);
}
boolean isSigned32LiteralValue(Long longObject) {
return (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE);
}
boolean isSigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE);
}
boolean isSigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE);
}
boolean isUnsigned32LiteralValue(Long longObject) {
return (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE);
}
boolean isUnsigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE);
}
boolean isUnsigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE);
}
boolean isCharLiteralValue(String literal) {
return (literal.codePoints().count() == 1);
}
/**
* Method to retrieve a type representing all the values in the value space of a finite type that are assignable to
* the target type.
*
* @param finiteType the finite type
* @param targetType the target type
* @return a new finite type if at least one value in the value space of the specified finiteType is
* assignable to targetType (the same if all are assignable), else semanticError
*/
BType getTypeForFiniteTypeValuesAssignableToType(BFiniteType finiteType, BType targetType) {
if (isAssignable(finiteType, targetType)) {
return finiteType;
}
Set<BLangExpression> matchingValues = finiteType.getValueSpace().stream()
.filter(
expr -> isAssignable(expr.type, targetType) ||
isAssignableToFiniteType(targetType, (BLangLiteral) expr) ||
(targetType.tag == TypeTags.UNION &&
((BUnionType) targetType).getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.anyMatch(filteredType -> isAssignableToFiniteType(filteredType,
(BLangLiteral) expr))))
.collect(Collectors.toSet());
if (matchingValues.isEmpty()) {
return symTable.semanticError;
}
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, finiteType.tsymbol.flags,
names.fromString("$anonType$" + UNDERSCORE + finiteTypeCount++),
finiteType.tsymbol.pkgID, null,
finiteType.tsymbol.owner, finiteType.tsymbol.pos,
VIRTUAL);
BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, matchingValues);
finiteTypeSymbol.type = intersectingFiniteType;
return intersectingFiniteType;
}
/**
* Method to retrieve a type representing all the member types of a union type that are assignable to
* the target type.
*
* @param unionType the union type
* @param targetType the target type
* @return a single type or a new union type if at least one member type of the union type is
* assignable to targetType, else semanticError
*/
BType getTypeForUnionTypeMembersAssignableToType(BUnionType unionType, BType targetType) {
List<BType> intersection = new LinkedList<>();
unionType.getMemberTypes().forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
BType finiteTypeWithMatches = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) memType,
targetType);
if (finiteTypeWithMatches != symTable.semanticError) {
intersection.add(finiteTypeWithMatches);
}
} else {
if (isAssignable(memType, targetType)) {
intersection.add(memType);
}
}
});
if (intersection.isEmpty()) {
return symTable.semanticError;
}
if (intersection.size() == 1) {
return intersection.get(0);
} else {
return BUnionType.create(null, new LinkedHashSet<>(intersection));
}
}
boolean validEqualityIntersectionExists(BType lhsType, BType rhsType) {
if (!lhsType.isPureType() || !rhsType.isPureType()) {
return false;
}
if (isAssignable(lhsType, rhsType) || isAssignable(rhsType, lhsType)) {
return true;
}
Set<BType> lhsTypes = expandAndGetMemberTypesRecursive(lhsType);
Set<BType> rhsTypes = expandAndGetMemberTypesRecursive(rhsType);
return equalityIntersectionExists(lhsTypes, rhsTypes);
}
private boolean equalityIntersectionExists(Set<BType> lhsTypes, Set<BType> rhsTypes) {
if ((lhsTypes.contains(symTable.anydataType) &&
rhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR)) ||
(rhsTypes.contains(symTable.anydataType) &&
lhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR))) {
return true;
}
boolean matchFound = lhsTypes
.stream()
.anyMatch(s -> rhsTypes
.stream()
.anyMatch(t -> isSameType(s, t)));
if (!matchFound) {
matchFound = equalityIntersectionExistsForComplexTypes(lhsTypes, rhsTypes);
}
return matchFound;
}
/**
* Retrieves member types of the specified type, expanding maps/arrays of/constrained by unions types to individual
* maps/arrays.
*
* e.g., (string|int)[] would cause three entries as string[], int[], (string|int)[]
*
* @param bType the type for which member types needs to be identified
* @return a set containing all the retrieved member types
*/
public Set<BType> expandAndGetMemberTypesRecursive(BType bType) {
Set<BType> memberTypes = new LinkedHashSet<>();
switch (bType.tag) {
case TypeTags.BYTE:
case TypeTags.INT:
memberTypes.add(symTable.intType);
memberTypes.add(symTable.byteType);
break;
case TypeTags.FINITE:
BFiniteType expType = (BFiniteType) bType;
expType.getValueSpace().forEach(value -> {
memberTypes.add(value.type);
});
break;
case TypeTags.UNION:
BUnionType unionType = (BUnionType) bType;
unionType.getMemberTypes().forEach(member -> {
memberTypes.addAll(expandAndGetMemberTypesRecursive(member));
});
break;
case TypeTags.ARRAY:
BType arrayElementType = ((BArrayType) bType).getElementType();
if (((BArrayType) bType).getSize() != -1) {
memberTypes.add(new BArrayType(arrayElementType));
}
if (arrayElementType.tag == TypeTags.UNION) {
Set<BType> elementUnionTypes = expandAndGetMemberTypesRecursive(arrayElementType);
elementUnionTypes.forEach(elementUnionType -> {
memberTypes.add(new BArrayType(elementUnionType));
});
}
memberTypes.add(bType);
break;
case TypeTags.MAP:
BType mapConstraintType = ((BMapType) bType).getConstraint();
if (mapConstraintType.tag == TypeTags.UNION) {
Set<BType> constraintUnionTypes = expandAndGetMemberTypesRecursive(mapConstraintType);
constraintUnionTypes.forEach(constraintUnionType -> {
memberTypes.add(new BMapType(TypeTags.MAP, constraintUnionType, symTable.mapType.tsymbol));
});
}
memberTypes.add(bType);
break;
default:
memberTypes.add(bType);
}
return memberTypes;
}
private boolean tupleIntersectionExists(BTupleType lhsType, BTupleType rhsType) {
if (lhsType.getTupleTypes().size() != rhsType.getTupleTypes().size()) {
return false;
}
List<BType> lhsMemberTypes = lhsType.getTupleTypes();
List<BType> rhsMemberTypes = rhsType.getTupleTypes();
for (int i = 0; i < lhsType.getTupleTypes().size(); i++) {
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberTypes.get(i)),
expandAndGetMemberTypesRecursive(rhsMemberTypes.get(i)))) {
return false;
}
}
return true;
}
private boolean equalityIntersectionExistsForComplexTypes(Set<BType> lhsTypes, Set<BType> rhsTypes) {
for (BType lhsMemberType : lhsTypes) {
switch (lhsMemberType.tag) {
case TypeTags.INT:
case TypeTags.STRING:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.NIL:
if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {
return true;
}
break;
case TypeTags.JSON:
if (jsonEqualityIntersectionExists(rhsTypes)) {
return true;
}
break;
case TypeTags.TUPLE:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&
tupleIntersectionExists((BTupleType) lhsMemberType, (BTupleType) rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&
arrayTupleEqualityIntersectionExists((BArrayType) rhsMemberType,
(BTupleType) lhsMemberType))) {
return true;
}
break;
case TypeTags.ARRAY:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&
equalityIntersectionExists(
expandAndGetMemberTypesRecursive(((BArrayType) lhsMemberType).eType),
expandAndGetMemberTypesRecursive(((BArrayType) rhsMemberType).eType)))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&
arrayTupleEqualityIntersectionExists((BArrayType) lhsMemberType,
(BTupleType) rhsMemberType))) {
return true;
}
break;
case TypeTags.MAP:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&
equalityIntersectionExists(
expandAndGetMemberTypesRecursive(((BMapType) lhsMemberType).constraint),
expandAndGetMemberTypesRecursive(((BMapType) rhsMemberType).constraint)))) {
return true;
}
if (!isAssignable(((BMapType) lhsMemberType).constraint, symTable.errorType) &&
rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&
mapRecordEqualityIntersectionExists((BMapType) lhsMemberType,
(BRecordType) rhsMemberType))) {
return true;
}
break;
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> checkStructEquivalency(rhsMemberType, lhsMemberType) ||
checkStructEquivalency(lhsMemberType, rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&
recordEqualityIntersectionExists((BRecordType) lhsMemberType,
(BRecordType) rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON) &&
jsonEqualityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&
mapRecordEqualityIntersectionExists((BMapType) rhsMemberType,
(BRecordType) lhsMemberType))) {
return true;
}
break;
}
}
return false;
}
private boolean arrayTupleEqualityIntersectionExists(BArrayType arrayType, BTupleType tupleType) {
Set<BType> elementTypes = expandAndGetMemberTypesRecursive(arrayType.eType);
return tupleType.tupleTypes.stream()
.allMatch(tupleMemType -> equalityIntersectionExists(elementTypes,
expandAndGetMemberTypesRecursive(tupleMemType)));
}
private boolean recordEqualityIntersectionExists(BRecordType lhsType, BRecordType rhsType) {
Map<String, BField> lhsFields = lhsType.fields;
Map<String, BField> rhsFields = rhsType.fields;
List<Name> matchedFieldNames = new ArrayList<>();
for (BField lhsField : lhsFields.values()) {
if (rhsFields.containsKey(lhsField.name.value)) {
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),
expandAndGetMemberTypesRecursive(
rhsFields.get(lhsField.name.value).type))) {
return false;
}
matchedFieldNames.add(lhsField.getName());
} else {
if (Symbols.isFlagOn(lhsField.symbol.flags, Flags.OPTIONAL)) {
break;
}
if (rhsType.sealed) {
return false;
}
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),
expandAndGetMemberTypesRecursive(rhsType.restFieldType))) {
return false;
}
}
}
for (BField rhsField : rhsFields.values()) {
if (matchedFieldNames.contains(rhsField.getName())) {
continue;
}
if (!Symbols.isFlagOn(rhsField.symbol.flags, Flags.OPTIONAL)) {
if (lhsType.sealed) {
return false;
}
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(rhsField.type),
expandAndGetMemberTypesRecursive(lhsType.restFieldType))) {
return false;
}
}
}
return true;
}
private boolean mapRecordEqualityIntersectionExists(BMapType mapType, BRecordType recordType) {
Set<BType> mapConstrTypes = expandAndGetMemberTypesRecursive(mapType.getConstraint());
for (BField field : recordType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) &&
!equalityIntersectionExists(mapConstrTypes, expandAndGetMemberTypesRecursive(field.type))) {
return false;
}
}
return true;
}
private boolean jsonEqualityIntersectionExists(Set<BType> typeSet) {
for (BType type : typeSet) {
switch (type.tag) {
case TypeTags.MAP:
if (!isAssignable(((BMapType) type).constraint, symTable.errorType)) {
return true;
}
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
if (recordType.fields.values().stream()
.allMatch(field -> Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) ||
!isAssignable(field.type, symTable.errorType))) {
return true;
}
break;
default:
if (isAssignable(type, symTable.jsonType)) {
return true;
}
}
}
return false;
}
public BType getRemainingType(BType originalType, BType typeToRemove) {
switch (originalType.tag) {
case TypeTags.UNION:
return getRemainingType((BUnionType) originalType, getAllTypes(typeToRemove));
case TypeTags.FINITE:
return getRemainingType((BFiniteType) originalType, getAllTypes(typeToRemove));
default:
return originalType;
}
}
BType getTypeIntersection(BType lhsType, BType rhsType) {
List<BType> narrowingTypes = getAllTypes(rhsType);
LinkedHashSet<BType> intersection = narrowingTypes.stream().map(type -> {
if (isAssignable(type, lhsType)) {
return type;
} else if (isAssignable(lhsType, type)) {
return lhsType;
} else if (lhsType.tag == TypeTags.FINITE) {
BType intersectionType = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) lhsType, type);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.FINITE) {
BType intersectionType = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) type, lhsType);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (lhsType.tag == TypeTags.UNION) {
BType intersectionType = getTypeForUnionTypeMembersAssignableToType((BUnionType) lhsType, type);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.UNION) {
BType intersectionType = getTypeForUnionTypeMembersAssignableToType((BUnionType) type, lhsType);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.NULL_SET) {
return type;
}
return null;
}).filter(type -> type != null).collect(Collectors.toCollection(LinkedHashSet::new));
if (intersection.isEmpty()) {
if (lhsType.tag == TypeTags.NULL_SET) {
return lhsType;
}
return symTable.semanticError;
}
if (intersection.contains(symTable.semanticError)) {
return symTable.semanticError;
} else if (intersection.size() == 1) {
return intersection.toArray(new BType[0])[0];
} else {
return BUnionType.create(null, intersection);
}
}
private BType getRemainingType(BUnionType originalType, List<BType> removeTypes) {
List<BType> remainingTypes = getAllTypes(originalType);
removeTypes.forEach(removeType -> remainingTypes.removeIf(type -> isAssignable(type, removeType)));
List<BType> finiteTypesToRemove = new ArrayList<>();
List<BType> finiteTypesToAdd = new ArrayList<>();
for (BType remainingType : remainingTypes) {
if (remainingType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) remainingType;
finiteTypesToRemove.add(finiteType);
BType remainingTypeWithMatchesRemoved = getRemainingType(finiteType, removeTypes);
if (remainingTypeWithMatchesRemoved != symTable.semanticError) {
finiteTypesToAdd.add(remainingTypeWithMatchesRemoved);
}
}
}
remainingTypes.removeAll(finiteTypesToRemove);
remainingTypes.addAll(finiteTypesToAdd);
if (remainingTypes.size() == 1) {
return remainingTypes.get(0);
}
if (remainingTypes.isEmpty()) {
return symTable.nullSet;
}
return BUnionType.create(null, new LinkedHashSet<>(remainingTypes));
}
private BType getRemainingType(BFiniteType originalType, List<BType> removeTypes) {
Set<BLangExpression> remainingValueSpace = new LinkedHashSet<>();
for (BLangExpression valueExpr : originalType.getValueSpace()) {
boolean matchExists = false;
for (BType remType : removeTypes) {
if (isAssignable(valueExpr.type, remType) ||
isAssignableToFiniteType(remType, (BLangLiteral) valueExpr)) {
matchExists = true;
break;
}
}
if (!matchExists) {
remainingValueSpace.add(valueExpr);
}
}
if (remainingValueSpace.isEmpty()) {
return symTable.semanticError;
}
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, originalType.tsymbol.flags,
names.fromString("$anonType$" + UNDERSCORE + finiteTypeCount++),
originalType.tsymbol.pkgID, null,
originalType.tsymbol.owner, originalType.tsymbol.pos,
VIRTUAL);
BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, remainingValueSpace);
finiteTypeSymbol.type = intersectingFiniteType;
return intersectingFiniteType;
}
public BType getSafeType(BType type, boolean liftNil, boolean liftError) {
switch (type.tag) {
case TypeTags.JSON:
BJSONType jsonType = (BJSONType) type;
return new BJSONType(jsonType.tag, jsonType.tsymbol, false);
case TypeTags.ANY:
return new BAnyType(type.tag, type.tsymbol, false);
case TypeTags.ANYDATA:
return new BAnydataType(type.tag, type.tsymbol, false);
case TypeTags.READONLY:
return new BReadonlyType(type.tag, type.tsymbol, false);
}
if (type.tag != TypeTags.UNION) {
return type;
}
BUnionType unionType = (BUnionType) type;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>(unionType.getMemberTypes());
BUnionType errorLiftedType = BUnionType.create(null, memTypes);
if (liftNil) {
errorLiftedType.remove(symTable.nilType);
}
if (liftError) {
errorLiftedType.remove(symTable.errorType);
}
if (errorLiftedType.getMemberTypes().size() == 1) {
return errorLiftedType.getMemberTypes().toArray(new BType[0])[0];
}
return errorLiftedType;
}
public List<BType> getAllTypes(BType type) {
if (type.tag != TypeTags.UNION) {
return Lists.of(type);
}
List<BType> memberTypes = new ArrayList<>();
((BUnionType) type).getMemberTypes().forEach(memberType -> memberTypes.addAll(getAllTypes(memberType)));
return memberTypes;
}
public boolean isAllowedConstantType(BType type) {
switch (type.tag) {
case TypeTags.BOOLEAN:
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.NIL:
return true;
case TypeTags.MAP:
return isAllowedConstantType(((BMapType) type).constraint);
case TypeTags.FINITE:
BLangExpression finiteValue = ((BFiniteType) type).getValueSpace().toArray(new BLangExpression[0])[0];
return isAllowedConstantType(finiteValue.type);
default:
return false;
}
}
public boolean isValidLiteral(BLangLiteral literal, BType targetType) {
BType literalType = literal.type;
if (literalType.tag == targetType.tag) {
return true;
}
switch (targetType.tag) {
case TypeTags.BYTE:
return literalType.tag == TypeTags.INT && isByteLiteralValue((Long) literal.value);
case TypeTags.DECIMAL:
return literalType.tag == TypeTags.FLOAT || literalType.tag == TypeTags.INT;
case TypeTags.FLOAT:
return literalType.tag == TypeTags.INT;
case TypeTags.SIGNED32_INT:
return literalType.tag == TypeTags.INT && isSigned32LiteralValue((Long) literal.value);
case TypeTags.SIGNED16_INT:
return literalType.tag == TypeTags.INT && isSigned16LiteralValue((Long) literal.value);
case TypeTags.SIGNED8_INT:
return literalType.tag == TypeTags.INT && isSigned8LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED32_INT:
return literalType.tag == TypeTags.INT && isUnsigned32LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED16_INT:
return literalType.tag == TypeTags.INT && isUnsigned16LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED8_INT:
return literalType.tag == TypeTags.INT && isUnsigned8LiteralValue((Long) literal.value);
case TypeTags.CHAR_STRING:
return literalType.tag == TypeTags.STRING && isCharLiteralValue((String) literal.value);
default:
return false;
}
}
/**
* Validate if the return type of the given function is a subtype of `error?`, containing `()`.
*
* @param function The function of which the return type should be validated
* @param diagnosticCode The code to log if the return type is invalid
*/
public void validateErrorOrNilReturn(BLangFunction function, DiagnosticCode diagnosticCode) {
BType returnType = function.returnTypeNode.type;
if (returnType.tag == TypeTags.NIL) {
return;
}
if (returnType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) returnType).getMemberTypes();
if (returnType.isNullable() &&
memberTypes.stream().allMatch(type -> type.tag == TypeTags.NIL || type.tag == TypeTags.ERROR)) {
return;
}
}
dlog.error(function.returnTypeNode.pos, diagnosticCode, function.returnTypeNode.type.toString());
}
/**
* Type vector of size two, to hold the source and the target types.
*
* @since 0.982.0
*/
private static class TypePair {
BType sourceType;
BType targetType;
public TypePair(BType sourceType, BType targetType) {
this.sourceType = sourceType;
this.targetType = targetType;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof TypePair)) {
return false;
}
TypePair other = (TypePair) obj;
return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType);
}
@Override
public int hashCode() {
return Objects.hash(sourceType, targetType);
}
}
/**
* A functional interface for parameterizing the type of type checking that needs to be done on the source and
* target types.
*
* @since 0.995.0
*/
private interface TypeEqualityPredicate {
boolean test(BType source, BType target, Set<TypePair> unresolvedTypes);
}
public boolean hasFillerValue(BType type) {
switch (type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.NIL:
case TypeTags.TABLE:
case TypeTags.ANYDATA:
case TypeTags.MAP:
case TypeTags.ANY:
return true;
case TypeTags.ARRAY:
return checkFillerValue((BArrayType) type);
case TypeTags.FINITE:
return checkFillerValue((BFiniteType) type);
case TypeTags.UNION:
return checkFillerValue((BUnionType) type);
case TypeTags.OBJECT:
return checkFillerValue((BObjectType) type);
case TypeTags.RECORD:
return checkFillerValue((BRecordType) type);
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) type;
return tupleType.getTupleTypes().stream().allMatch(eleType -> hasFillerValue(eleType));
default:
if (TypeTags.isIntegerTypeTag(type.tag)) {
return true;
}
return false;
}
}
private boolean checkFillerValue(BObjectType type) {
if ((type.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
return false;
}
BAttachedFunction initFunction = ((BObjectTypeSymbol) type.tsymbol).initializerFunc;
if (initFunction == null) {
return true;
}
if (initFunction.symbol.getReturnType().getKind() != TypeKind.NIL) {
return false;
}
for (BVarSymbol bVarSymbol : initFunction.symbol.getParameters()) {
if (!bVarSymbol.defaultableParam) {
return false;
}
}
return true;
}
/**
* This will handle two types. Singleton : As singleton can have one value that value should it self be a valid fill
* value Union : 1. if nil is a member it is the fill values 2. else all the values should belong to same type and
* the default value for that type should be a member of the union precondition : value space should have at least
* one element
*
* @param type BFiniteType union or finite
* @return boolean whether type has a valid filler value or not
*/
private boolean checkFillerValue(BFiniteType type) {
if (type.isNullable()) {
return true;
}
if (type.getValueSpace().size() == 1) {
return true;
}
Iterator iterator = type.getValueSpace().iterator();
BLangExpression firstElement = (BLangExpression) iterator.next();
boolean defaultFillValuePresent = isImplicitDefaultValue(firstElement);
while (iterator.hasNext()) {
BLangExpression value = (BLangExpression) iterator.next();
if (!isSameBasicType(value.type, firstElement.type)) {
return false;
}
if (!defaultFillValuePresent && isImplicitDefaultValue(value)) {
defaultFillValuePresent = true;
}
}
return defaultFillValuePresent;
}
private boolean hasImplicitDefaultValue(Set<BLangExpression> valueSpace) {
for (BLangExpression expression : valueSpace) {
if (isImplicitDefaultValue(expression)) {
return true;
}
}
return false;
}
private boolean checkFillerValue(BUnionType type) {
if (type.isNullable()) {
return true;
}
Set<BType> memberTypes = new HashSet<>();
boolean hasFillerValue = false;
boolean defaultValuePresent = false;
boolean finiteTypePresent = false;
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.FINITE) {
Set<BType> uniqueValues = getValueTypes(((BFiniteType) member).getValueSpace());
memberTypes.addAll(uniqueValues);
if (!defaultValuePresent && hasImplicitDefaultValue(((BFiniteType) member).getValueSpace())) {
defaultValuePresent = true;
}
finiteTypePresent = true;
} else {
memberTypes.add(member);
}
if (!hasFillerValue && hasFillerValue(member)) {
hasFillerValue = true;
}
}
if (!hasFillerValue) {
return false;
}
Iterator<BType> iterator = memberTypes.iterator();
BType firstMember = iterator.next();
while (iterator.hasNext()) {
if (!isSameBasicType(firstMember, iterator.next())) {
return false;
}
}
if (finiteTypePresent) {
return defaultValuePresent;
}
return true;
}
private boolean isSameBasicType(BType source, BType target) {
if (isSameType(source, target)) {
return true;
}
if (TypeTags.isIntegerTypeTag(source.tag) && TypeTags.isIntegerTypeTag(target.tag)) {
return true;
}
return false;
}
private Set<BType> getValueTypes(Set<BLangExpression> valueSpace) {
Set<BType> uniqueType = new HashSet<>();
for (BLangExpression expression : valueSpace) {
uniqueType.add(expression.type);
}
return uniqueType;
}
private boolean isImplicitDefaultValue(BLangExpression expression) {
if ((expression.getKind() == NodeKind.LITERAL) || (expression.getKind() == NodeKind.NUMERIC_LITERAL)) {
BLangLiteral literalExpression = (BLangLiteral) expression;
BType literalExprType = literalExpression.type;
Object value = literalExpression.getValue();
switch (literalExprType.getKind()) {
case INT:
case BYTE:
return value.equals(Long.valueOf(0));
case STRING:
return value == null || value.equals("");
case DECIMAL:
case FLOAT:
return value.equals(String.valueOf(0.0));
case BOOLEAN:
return value.equals(Boolean.valueOf(false));
case NIL:
return true;
default:
return false;
}
}
return false;
}
private boolean checkFillerValue(BRecordType type) {
for (BField field : type.fields.values()) {
if (Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) {
continue;
}
if (Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
return false;
}
}
return true;
}
private boolean checkFillerValue(BArrayType type) {
if (type.size == -1) {
return true;
}
return hasFillerValue(type.eType);
}
/**
* Get result type of the query output.
*
* @param type type of query expression.
* @return result type.
*/
public BType resolveExprType(BType type) {
switch (type.tag) {
case TypeTags.STREAM:
return ((BStreamType) type).constraint;
case TypeTags.TABLE:
return ((BTableType) type).constraint;
case TypeTags.ARRAY:
return ((BArrayType) type).eType;
case TypeTags.UNION:
List<BType> exprTypes = new ArrayList<>(((BUnionType) type).getMemberTypes());
for (BType returnType : exprTypes) {
switch (returnType.tag) {
case TypeTags.STREAM:
return ((BStreamType) returnType).constraint;
case TypeTags.TABLE:
return ((BTableType) returnType).constraint;
case TypeTags.ARRAY:
return ((BArrayType) returnType).eType;
case TypeTags.STRING:
case TypeTags.XML:
return returnType;
}
}
default:
return type;
}
}
private boolean isSimpleBasicType(int tag) {
switch (tag) {
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.NIL:
return true;
default:
return (TypeTags.isIntegerTypeTag(tag)) || (TypeTags.isStringTypeTag(tag));
}
}
/**
* Check whether a type is an ordered type.
*
* @param type type.
* @return boolean whether the type is an ordered type or not.
*/
public boolean isOrderedType(BType type) {
switch (type.tag) {
case TypeTags.UNION:
Set<BType> memberTypes = ((BUnionType) type).getMemberTypes();
for (BType memType : memberTypes) {
if (!isOrderedType(memType)) {
return false;
}
}
return memberTypes.size() <= 2 && memberTypes.contains(symTable.nilType);
case TypeTags.ARRAY:
BType elementType = ((BArrayType) type).eType;
return isOrderedType(elementType);
default:
return isSimpleBasicType(type.tag);
}
}
public boolean isUnionOfSimpleBasicTypes(BType type) {
if (type.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) type).getMemberTypes();
for (BType memType : memberTypes) {
if (!isSimpleBasicType(memType.tag)) {
return false;
}
}
return true;
}
return isSimpleBasicType(type.tag);
}
public boolean isSubTypeOfReadOnlyOrIsolatedObjectUnion(BType type) {
if (isInherentlyImmutableType(type) || Symbols.isFlagOn(type.flags, Flags.READONLY)) {
return true;
}
int tag = type.tag;
if (tag == TypeTags.OBJECT) {
return isIsolated(type);
}
if (tag != TypeTags.UNION) {
return false;
}
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isSubTypeOfReadOnlyOrIsolatedObjectUnion(memberType)) {
return false;
}
}
return true;
}
private boolean isIsolated(BType type) {
return Symbols.isFlagOn(type.flags, Flags.ISOLATED);
}
private static class ListenerValidationModel {
private final Types types;
private final SymbolTable symtable;
private final BType serviceNameType;
boolean attachFound;
boolean detachFound;
boolean startFound;
boolean gracefulStopFound;
boolean immediateStopFound;
public ListenerValidationModel(Types types, SymbolTable symTable) {
this.types = types;
this.symtable = symTable;
this.serviceNameType =
BUnionType.create(null, symtable.stringType, symtable.arrayStringType, symtable.nilType);
}
boolean isValidListener() {
return attachFound && detachFound && startFound && gracefulStopFound && immediateStopFound;
}
private boolean checkMethods(List<BAttachedFunction> rhsFuncs) {
for (BAttachedFunction func : rhsFuncs) {
switch (func.funcName.value) {
case "attach":
if (!checkAttachMethod(func)) {
return false;
}
break;
case "detach":
if (!checkDetachMethod(func)) {
return false;
}
break;
case "start":
if (!checkStartMethod(func)) {
return true;
}
break;
case "gracefulStop":
if (!checkGracefulStop(func)) {
return false;
}
break;
case "immediateStop":
if (!checkImmediateStop(func)) {
return false;
}
break;
}
}
return isValidListener();
}
private boolean emptyParamList(BAttachedFunction func) {
return func.type.paramTypes.isEmpty() && func.type.restType != symtable.noType;
}
private boolean publicAndReturnsErrorOrNil(BAttachedFunction func) {
if (!Symbols.isPublic(func.symbol)) {
return false;
}
if (!types.isAssignable(func.type.retType, symtable.errorOrNilType)) {
return false;
}
return true;
}
private boolean isPublicNoParamReturnsErrorOrNil(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (!emptyParamList(func)) {
return false;
}
return true;
}
private boolean checkImmediateStop(BAttachedFunction func) {
return immediateStopFound = isPublicNoParamReturnsErrorOrNil(func);
}
private boolean checkGracefulStop(BAttachedFunction func) {
return gracefulStopFound = isPublicNoParamReturnsErrorOrNil(func);
}
private boolean checkStartMethod(BAttachedFunction func) {
return startFound = publicAndReturnsErrorOrNil(func);
}
private boolean checkDetachMethod(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (func.type.paramTypes.size() != 1) {
return false;
}
BType firstParamType = func.type.paramTypes.get(0);
boolean isMatchingSignature = firstParamType.tag == TypeTags.OBJECT
&& Symbols.isService(firstParamType.tsymbol);
return detachFound = isMatchingSignature;
}
private boolean checkAttachMethod(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (func.type.paramTypes.size() != 2) {
return false;
}
BType firstParamType = func.type.paramTypes.get(0);
if (firstParamType.tag != TypeTags.OBJECT) {
return false;
}
if (!Symbols.isService(firstParamType.tsymbol)) {
return false;
}
BType secondParamType = func.type.paramTypes.get(1);
boolean sameType = types.isAssignable(secondParamType, this.serviceNameType);
return attachFound = sameType;
}
private boolean isServiceObject(BType type) {
if (type.tag != TypeTags.OBJECT) {
return false;
}
return Symbols.isService(type.tsymbol);
}
}
} | class Types {
private static final CompilerContext.Key<Types> TYPES_KEY =
new CompilerContext.Key<>();
private final ResolvedTypeBuilder typeBuilder;
private SymbolTable symTable;
private SymbolResolver symResolver;
private BLangDiagnosticLog dlog;
private Names names;
private int finiteTypeCount = 0;
private BUnionType expandedXMLBuiltinSubtypes;
public static Types getInstance(CompilerContext context) {
Types types = context.get(TYPES_KEY);
if (types == null) {
types = new Types(context);
}
return types;
}
public Types(CompilerContext context) {
context.put(TYPES_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.names = Names.getInstance(context);
this.expandedXMLBuiltinSubtypes = BUnionType.create(null,
symTable.xmlElementType, symTable.xmlCommentType,
symTable.xmlPIType, symTable.xmlTextType);
this.typeBuilder = new ResolvedTypeBuilder();
}
public List<BType> checkTypes(BLangExpression node,
List<BType> actualTypes,
List<BType> expTypes) {
List<BType> resTypes = new ArrayList<>();
for (int i = 0; i < actualTypes.size(); i++) {
resTypes.add(checkType(node, actualTypes.get(i), expTypes.size() > i ? expTypes.get(i) : symTable.noType));
}
return resTypes;
}
public BType checkType(BLangExpression node,
BType actualType,
BType expType) {
return checkType(node, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
public BType checkType(BLangExpression expr,
BType actualType,
BType expType,
DiagnosticCode diagCode) {
expr.type = checkType(expr.pos, actualType, expType, diagCode);
if (expr.type.tag == TypeTags.SEMANTIC_ERROR) {
return expr.type;
}
setImplicitCastExpr(expr, actualType, expType);
return expr.type;
}
public BType checkType(Location pos,
BType actualType,
BType expType,
DiagnosticCode diagCode) {
if (expType.tag == TypeTags.SEMANTIC_ERROR) {
return expType;
} else if (expType.tag == TypeTags.NONE) {
return actualType;
} else if (actualType.tag == TypeTags.SEMANTIC_ERROR) {
return actualType;
} else if (isAssignable(actualType, expType)) {
return actualType;
}
dlog.error(pos, diagCode, expType, actualType);
return symTable.semanticError;
}
public boolean isJSONContext(BType type) {
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream().anyMatch(memType -> memType.tag == TypeTags.JSON);
}
return type.tag == TypeTags.JSON;
}
public boolean isLax(BType type) {
switch (type.tag) {
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
return true;
case TypeTags.MAP:
return isLax(((BMapType) type).constraint);
case TypeTags.UNION:
return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isLax);
}
return false;
}
public boolean isSameType(BType source, BType target) {
return isSameType(source, target, new HashSet<>());
}
private boolean isSameType(BType source, BType target, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
BTypeVisitor<BType, Boolean> sameTypeVisitor = new BSameTypeVisitor(unresolvedTypes);
return target.accept(sameTypeVisitor, source);
}
public boolean isValueType(BType type) {
switch (type.tag) {
case TypeTags.BOOLEAN:
case TypeTags.BYTE:
case TypeTags.DECIMAL:
case TypeTags.FLOAT:
case TypeTags.INT:
case TypeTags.STRING:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.CHAR_STRING:
return true;
default:
return false;
}
}
boolean isBasicNumericType(BType type) {
return type.tag < TypeTags.STRING || TypeTags.isIntegerTypeTag(type.tag);
}
boolean finiteTypeContainsNumericTypeValues(BFiniteType finiteType) {
return finiteType.getValueSpace().stream().anyMatch(valueExpr -> isBasicNumericType(valueExpr.type));
}
public boolean containsErrorType(BType type) {
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.anyMatch(this::containsErrorType);
}
return type.tag == TypeTags.ERROR;
}
public boolean isSubTypeOfList(BType type) {
if (type.tag != TypeTags.UNION) {
return isSubTypeOfBaseType(type, TypeTags.ARRAY) || isSubTypeOfBaseType(type, TypeTags.TUPLE);
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfList);
}
public BType resolvePatternTypeFromMatchExpr(BLangExpression matchExpr, BTupleType listMatchPatternType) {
if (matchExpr == null) {
return listMatchPatternType;
}
BType matchExprType = matchExpr.type;
BType intersectionType = getTypeIntersection(matchExprType, listMatchPatternType);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
if (matchExprType.tag == TypeTags.ANYDATA) {
Collections.fill(listMatchPatternType.tupleTypes, symTable.anydataType);
if (listMatchPatternType.restType != null) {
listMatchPatternType.restType = symTable.anydataType;
}
return listMatchPatternType;
}
return symTable.noType;
}
public BType resolvePatternTypeFromMatchExpr(BLangExpression matchExpr, BLangExpression constPatternExpr) {
if (matchExpr == null) {
if (constPatternExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) constPatternExpr).symbol.type;
} else {
return constPatternExpr.type;
}
}
BType matchExprType = matchExpr.type;
BType constMatchPatternExprType = constPatternExpr.type;
if (constPatternExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef constVarRef = (BLangSimpleVarRef) constPatternExpr;
if (constVarRef.symbol == null) {
return symTable.noType;
}
BType constVarRefSymbolType = constVarRef.symbol.type;
if (isAssignable(constVarRefSymbolType, matchExprType)) {
return constVarRefSymbolType;
}
return symTable.noType;
}
BLangLiteral constPatternLiteral = (BLangLiteral) constPatternExpr;
if (containsAnyType(constMatchPatternExprType)) {
return matchExprType;
} else if (containsAnyType(matchExprType)) {
return constMatchPatternExprType;
}
if (matchExprType.tag == TypeTags.BYTE && constMatchPatternExprType.tag == TypeTags.INT) {
return matchExprType;
}
if (isAssignable(constMatchPatternExprType, matchExprType)) {
return constMatchPatternExprType;
}
if (matchExprType.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) matchExprType).getMemberTypes()) {
if (memberType.tag == TypeTags.FINITE) {
if (isAssignableToFiniteType(memberType, constPatternLiteral)) {
return memberType;
}
} else {
if (isAssignable(constMatchPatternExprType, matchExprType)) {
return constMatchPatternExprType;
}
}
}
} else if (matchExprType.tag == TypeTags.FINITE) {
if (isAssignableToFiniteType(matchExprType, constPatternLiteral)) {
return matchExprType;
}
}
return symTable.noType;
}
private boolean containsAnyType(BType type) {
if (type.tag != TypeTags.UNION) {
return type.tag == TypeTags.ANY;
}
for (BType memberTypes : ((BUnionType) type).getMemberTypes()) {
if (memberTypes.tag == TypeTags.ANY) {
return true;
}
}
return false;
}
public BType mergeTypes(BType typeFirst, BType typeSecond) {
if (containsAnyType(typeFirst)) {
return typeSecond;
}
if (isSameBasicType(typeFirst, typeSecond)) {
return typeFirst;
}
return BUnionType.create(null, typeFirst, typeSecond);
}
public boolean isSubTypeOfMapping(BType type) {
if (type.tag != TypeTags.UNION) {
return isSubTypeOfBaseType(type, TypeTags.MAP) || isSubTypeOfBaseType(type, TypeTags.RECORD);
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfMapping);
}
public boolean isSubTypeOfBaseType(BType type, int baseTypeTag) {
if (type.tag != TypeTags.UNION) {
return type.tag == baseTypeTag;
}
if (TypeTags.isXMLTypeTag(baseTypeTag)) {
return true;
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(memType -> memType.tag == baseTypeTag);
}
/**
* Checks whether source type is assignable to the target type.
* <p>
* Source type is assignable to the target type if,
* 1) the target type is any and the source type is not a value type.
* 2) there exists an implicit cast symbol from source to target.
* 3) both types are JSON and the target constraint is no type.
* 4) both types are array type and both array types are assignable.
* 5) both types are MAP and the target constraint is any type or constraints are structurally equivalent.
*
* @param source type.
* @param target type.
* @return true if source type is assignable to the target type.
*/
public boolean isAssignable(BType source, BType target) {
return isAssignable(source, target, new HashSet<>());
}
boolean isStampingAllowed(BType source, BType target) {
return (isAssignable(source, target) || isAssignable(target, source) ||
checkTypeEquivalencyForStamping(source, target) || checkTypeEquivalencyForStamping(target, source));
}
private boolean checkTypeEquivalencyForStamping(BType source, BType target) {
if (target.tag == TypeTags.RECORD) {
if (source.tag == TypeTags.RECORD) {
TypePair pair = new TypePair(source, target);
Set<TypePair> unresolvedTypes = new HashSet<>();
unresolvedTypes.add(pair);
return checkRecordEquivalencyForStamping((BRecordType) source, (BRecordType) target, unresolvedTypes);
} else if (source.tag == TypeTags.MAP) {
int mapConstraintTypeTag = ((BMapType) source).constraint.tag;
if ((!(mapConstraintTypeTag == TypeTags.ANY || mapConstraintTypeTag == TypeTags.ANYDATA)) &&
((BRecordType) target).sealed) {
for (BField field : ((BStructureType) target).getFields().values()) {
if (field.getType().tag != mapConstraintTypeTag) {
return false;
}
}
}
return true;
}
} else if (target.tag == TypeTags.JSON) {
return source.tag == TypeTags.JSON || source.tag == TypeTags.RECORD || source.tag == TypeTags.MAP;
} else if (target.tag == TypeTags.MAP) {
if (source.tag == TypeTags.MAP) {
return isStampingAllowed(((BMapType) source).getConstraint(), ((BMapType) target).getConstraint());
} else if (source.tag == TypeTags.UNION) {
return checkUnionEquivalencyForStamping(source, target);
}
} else if (target.tag == TypeTags.ARRAY) {
if (source.tag == TypeTags.JSON) {
return true;
} else if (source.tag == TypeTags.TUPLE) {
BType arrayElementType = ((BArrayType) target).eType;
for (BType tupleMemberType : ((BTupleType) source).getTupleTypes()) {
if (!isStampingAllowed(tupleMemberType, arrayElementType)) {
return false;
}
}
return true;
} else if (source.tag == TypeTags.ARRAY) {
return checkTypeEquivalencyForStamping(((BArrayType) source).eType, ((BArrayType) target).eType);
}
} else if (target.tag == TypeTags.UNION) {
return checkUnionEquivalencyForStamping(source, target);
} else if (target.tag == TypeTags.TUPLE && source.tag == TypeTags.TUPLE) {
return checkTupleEquivalencyForStamping(source, target);
}
return false;
}
private boolean checkRecordEquivalencyForStamping(BRecordType rhsType, BRecordType lhsType,
Set<TypePair> unresolvedTypes) {
if (Symbols.isFlagOn(lhsType.tsymbol.flags ^ rhsType.tsymbol.flags, Flags.PUBLIC)) {
return false;
}
if (Symbols.isPrivate(lhsType.tsymbol) && rhsType.tsymbol.pkgID != lhsType.tsymbol.pkgID) {
return false;
}
if (lhsType.fields.size() > rhsType.fields.size()) {
return false;
}
if (lhsType.sealed && !rhsType.sealed) {
return false;
}
return checkFieldEquivalencyForStamping(lhsType, rhsType, unresolvedTypes);
}
private boolean checkFieldEquivalencyForStamping(BStructureType lhsType, BStructureType rhsType,
Set<TypePair> unresolvedTypes) {
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsType.fields.get(lhsField.name.value);
if (rhsField == null || !isStampingAllowed(rhsField.type, lhsField.type)) {
return false;
}
}
for (BField rhsField : rhsType.fields.values()) {
BField lhsField = lhsType.fields.get(rhsField.name.value);
if (lhsField == null && !isStampingAllowed(rhsField.type, ((BRecordType) lhsType).restFieldType)) {
return false;
}
}
return true;
}
private boolean checkUnionEquivalencyForStamping(BType source, BType target) {
Set<BType> sourceTypes = new LinkedHashSet<>();
Set<BType> targetTypes = new LinkedHashSet<>();
if (source.tag == TypeTags.UNION) {
BUnionType sourceUnionType = (BUnionType) source;
sourceTypes.addAll(sourceUnionType.getMemberTypes());
} else {
sourceTypes.add(source);
}
if (target.tag == TypeTags.UNION) {
BUnionType targetUnionType = (BUnionType) target;
targetTypes.addAll(targetUnionType.getMemberTypes());
} else {
targetTypes.add(target);
}
boolean notAssignable = sourceTypes
.stream()
.map(s -> targetTypes
.stream()
.anyMatch(t -> isStampingAllowed(s, t)))
.anyMatch(assignable -> !assignable);
return !notAssignable;
}
private boolean checkTupleEquivalencyForStamping(BType source, BType target) {
if (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) {
return false;
}
BTupleType lhsTupleType = (BTupleType) target;
BTupleType rhsTupleType = (BTupleType) source;
if (lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) {
return false;
}
for (int i = 0; i < lhsTupleType.tupleTypes.size(); i++) {
if (!isStampingAllowed(rhsTupleType.tupleTypes.get(i), lhsTupleType.tupleTypes.get(i))) {
return false;
}
}
return true;
}
private boolean isAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (isSameType(source, target)) {
return true;
}
int sourceTag = source.tag;
int targetTag = target.tag;
if (!Symbols.isFlagOn(source.flags, Flags.PARAMETERIZED) &&
!isInherentlyImmutableType(target) && Symbols.isFlagOn(target.flags, Flags.READONLY) &&
!isInherentlyImmutableType(source) && !Symbols.isFlagOn(source.flags, Flags.READONLY)) {
return false;
}
if (sourceTag == TypeTags.INTERSECTION) {
return isAssignable(((BIntersectionType) source).effectiveType,
targetTag != TypeTags.INTERSECTION ? target :
((BIntersectionType) target).effectiveType, unresolvedTypes);
}
if (targetTag == TypeTags.INTERSECTION) {
return isAssignable(source, ((BIntersectionType) target).effectiveType, unresolvedTypes);
}
if (sourceTag == TypeTags.PARAMETERIZED_TYPE) {
return isParameterizedTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.BYTE && targetTag == TypeTags.INT) {
return true;
}
if (TypeTags.isXMLTypeTag(sourceTag) && TypeTags.isXMLTypeTag(targetTag)) {
return isXMLTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.STRING) {
return true;
}
if (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.XML_TEXT) {
return true;
}
if (sourceTag == TypeTags.STRING && targetTag == TypeTags.XML_TEXT) {
return true;
}
if (sourceTag == TypeTags.XML_TEXT && targetTag == TypeTags.STRING) {
return true;
}
if (sourceTag == TypeTags.XML_TEXT && targetTag == TypeTags.CHAR_STRING) {
return true;
}
if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ERROR) {
return isErrorTypeAssignable((BErrorType) source, (BErrorType) target, unresolvedTypes);
} else if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ANY) {
return false;
}
if (sourceTag == TypeTags.NIL && (isNullable(target) || targetTag == TypeTags.JSON)) {
return true;
}
if (targetTag == TypeTags.ANY && !containsErrorType(source) && !isValueType(source)) {
return true;
}
if (targetTag == TypeTags.ANYDATA && !containsErrorType(source) && source.isAnydata()) {
return true;
}
if (targetTag == TypeTags.READONLY &&
(isInherentlyImmutableType(source) || Symbols.isFlagOn(source.flags, Flags.READONLY))) {
return true;
}
if (targetTag == TypeTags.MAP && sourceTag == TypeTags.RECORD) {
BRecordType recordType = (BRecordType) source;
return isAssignableRecordType(recordType, target, unresolvedTypes);
}
if (targetTag == TypeTags.RECORD && sourceTag == TypeTags.MAP) {
return isAssignableMapType((BMapType) source, (BRecordType) target);
}
if (targetTag == TypeTags.TYPEDESC && sourceTag == TypeTags.TYPEDESC) {
return isAssignable(((BTypedescType) source).constraint, (((BTypedescType) target).constraint),
unresolvedTypes);
}
if (targetTag == TypeTags.TABLE && sourceTag == TypeTags.TABLE) {
return isAssignableTableType((BTableType) source, (BTableType) target);
}
if (targetTag == TypeTags.STREAM && sourceTag == TypeTags.STREAM) {
return isAssignable(((BStreamType) source).constraint, ((BStreamType) target).constraint, unresolvedTypes);
}
if (isBuiltInTypeWidenPossible(source, target) == TypeTestResult.TRUE) {
return true;
}
if (sourceTag == TypeTags.FINITE) {
return isFiniteTypeAssignable((BFiniteType) source, target, unresolvedTypes);
}
if ((targetTag == TypeTags.UNION || sourceTag == TypeTags.UNION) &&
isAssignableToUnionType(source, target, unresolvedTypes)) {
return true;
}
if (targetTag == TypeTags.JSON) {
if (sourceTag == TypeTags.JSON) {
return true;
}
if (sourceTag == TypeTags.ARRAY) {
return isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.MAP) {
return isAssignable(((BMapType) source).constraint, target, unresolvedTypes);
}
if (sourceTag == TypeTags.RECORD) {
return isAssignableRecordType((BRecordType) source, target, unresolvedTypes);
}
}
if (targetTag == TypeTags.FUTURE && sourceTag == TypeTags.FUTURE) {
if (((BFutureType) target).constraint.tag == TypeTags.NONE) {
return true;
}
return isAssignable(((BFutureType) source).constraint, ((BFutureType) target).constraint, unresolvedTypes);
}
if (targetTag == TypeTags.MAP && sourceTag == TypeTags.MAP) {
if (((BMapType) target).constraint.tag == TypeTags.ANY &&
((BMapType) source).constraint.tag != TypeTags.UNION) {
return true;
}
return isAssignable(((BMapType) source).constraint, ((BMapType) target).constraint, unresolvedTypes);
}
if ((sourceTag == TypeTags.OBJECT || sourceTag == TypeTags.RECORD)
&& (targetTag == TypeTags.OBJECT || targetTag == TypeTags.RECORD)) {
return checkStructEquivalency(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.TUPLE && targetTag == TypeTags.ARRAY) {
return isTupleTypeAssignableToArrayType((BTupleType) source, (BArrayType) target, unresolvedTypes);
}
if (sourceTag == TypeTags.ARRAY && targetTag == TypeTags.TUPLE) {
return isArrayTypeAssignableToTupleType((BArrayType) source, (BTupleType) target, unresolvedTypes);
}
if (sourceTag == TypeTags.TUPLE || targetTag == TypeTags.TUPLE) {
return isTupleTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.INVOKABLE && targetTag == TypeTags.INVOKABLE) {
return isFunctionTypeAssignable((BInvokableType) source, (BInvokableType) target, new HashSet<>());
}
return sourceTag == TypeTags.ARRAY && targetTag == TypeTags.ARRAY &&
isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes);
}
private boolean isParameterizedTypeAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
BType resolvedSourceType = typeBuilder.build(source);
if (target.tag != TypeTags.PARAMETERIZED_TYPE) {
return isAssignable(resolvedSourceType, target, unresolvedTypes);
}
if (((BParameterizedType) source).paramIndex != ((BParameterizedType) target).paramIndex) {
return false;
}
return isAssignable(resolvedSourceType, typeBuilder.build(target), unresolvedTypes);
}
private boolean isAssignableRecordType(BRecordType recordType, BType type, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(recordType, type);
if (!unresolvedTypes.add(pair)) {
return true;
}
BType targetType;
switch (type.tag) {
case TypeTags.MAP:
targetType = ((BMapType) type).constraint;
break;
case TypeTags.JSON:
targetType = type;
break;
default:
throw new IllegalArgumentException("Incompatible target type: " + type.toString());
}
return recordFieldsAssignableToType(recordType, targetType, unresolvedTypes);
}
private boolean recordFieldsAssignableToType(BRecordType recordType, BType targetType,
Set<TypePair> unresolvedTypes) {
for (BField field : recordType.fields.values()) {
if (!isAssignable(field.type, targetType, unresolvedTypes)) {
return false;
}
}
if (!recordType.sealed) {
return isAssignable(recordType.restFieldType, targetType, unresolvedTypes);
}
return true;
}
private boolean isAssignableTableType(BTableType sourceTableType, BTableType targetTableType) {
if (!isAssignable(sourceTableType.constraint, targetTableType.constraint)) {
return false;
}
if (targetTableType.keyTypeConstraint == null && targetTableType.fieldNameList == null) {
return true;
}
if (targetTableType.keyTypeConstraint != null) {
if (sourceTableType.keyTypeConstraint != null &&
(isAssignable(sourceTableType.keyTypeConstraint, targetTableType.keyTypeConstraint))) {
return true;
}
if (sourceTableType.fieldNameList == null) {
return false;
}
List<BType> fieldTypes = new ArrayList<>();
sourceTableType.fieldNameList.forEach(field -> fieldTypes
.add(getTableConstraintField(sourceTableType.constraint, field).type));
if (fieldTypes.size() == 1) {
return isAssignable(fieldTypes.get(0), targetTableType.keyTypeConstraint);
}
BTupleType tupleType = new BTupleType(fieldTypes);
return isAssignable(tupleType, targetTableType.keyTypeConstraint);
}
return targetTableType.fieldNameList.equals(sourceTableType.fieldNameList);
}
BField getTableConstraintField(BType constraintType, String fieldName) {
switch (constraintType.tag) {
case TypeTags.RECORD:
Map<String, BField> fieldList = ((BRecordType) constraintType).getFields();
return fieldList.get(fieldName);
case TypeTags.UNION:
BUnionType unionType = (BUnionType) constraintType;
Set<BType> memTypes = unionType.getMemberTypes();
List<BField> fields = memTypes.stream().map(type -> getTableConstraintField(type, fieldName))
.filter(Objects::nonNull).collect(Collectors.toList());
if (fields.size() != memTypes.size()) {
return null;
}
if (fields.stream().allMatch(field -> isAssignable(field.type, fields.get(0).type) &&
isAssignable(fields.get(0).type, field.type))) {
return fields.get(0);
}
break;
case TypeTags.INTERSECTION:
return getTableConstraintField(((BIntersectionType) constraintType).effectiveType, fieldName);
}
return null;
}
private boolean isAssignableMapType(BMapType sourceMapType, BRecordType targetRecType) {
if (targetRecType.sealed) {
return false;
}
for (BField field : targetRecType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) {
return false;
}
if (hasIncompatibleReadOnlyFlags(field.symbol.flags, sourceMapType.flags)) {
return false;
}
if (!isAssignable(sourceMapType.constraint, field.type)) {
return false;
}
}
return isAssignable(sourceMapType.constraint, targetRecType.restFieldType);
}
private boolean hasIncompatibleReadOnlyFlags(long targetFlags, long sourceFlags) {
return Symbols.isFlagOn(targetFlags, Flags.READONLY) && !Symbols.isFlagOn(sourceFlags, Flags.READONLY);
}
private boolean isErrorTypeAssignable(BErrorType source, BErrorType target, Set<TypePair> unresolvedTypes) {
if (target == symTable.errorType) {
return true;
}
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
return isAssignable(source.detailType, target.detailType, unresolvedTypes)
&& target.typeIdSet.isAssignableFrom(source.typeIdSet);
}
private boolean isXMLTypeAssignable(BType sourceType, BType targetType, Set<TypePair> unresolvedTypes) {
int sourceTag = sourceType.tag;
int targetTag = targetType.tag;
if (targetTag == TypeTags.XML) {
BXMLType target = (BXMLType) targetType;
if (target.constraint != null) {
if (TypeTags.isXMLNonSequenceType(sourceTag)) {
return isAssignable(sourceType, target.constraint, unresolvedTypes);
}
BXMLType source = (BXMLType) sourceType;
return isAssignable(source.constraint, target.constraint, unresolvedTypes);
}
return true;
}
return sourceTag == targetTag;
}
private boolean isTupleTypeAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) {
return false;
}
BTupleType lhsTupleType = (BTupleType) target;
BTupleType rhsTupleType = (BTupleType) source;
if (lhsTupleType.restType == null && rhsTupleType.restType != null) {
return false;
}
if (lhsTupleType.restType == null && lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) {
return false;
}
if (lhsTupleType.restType != null && rhsTupleType.restType != null) {
if (!isAssignable(rhsTupleType.restType, lhsTupleType.restType, unresolvedTypes)) {
return false;
}
}
if (lhsTupleType.tupleTypes.size() > rhsTupleType.tupleTypes.size()) {
return false;
}
for (int i = 0; i < rhsTupleType.tupleTypes.size(); i++) {
BType lhsType = (lhsTupleType.tupleTypes.size() > i)
? lhsTupleType.tupleTypes.get(i) : lhsTupleType.restType;
if (!isAssignable(rhsTupleType.tupleTypes.get(i), lhsType, unresolvedTypes)) {
return false;
}
}
return true;
}
private boolean isTupleTypeAssignableToArrayType(BTupleType source, BArrayType target,
Set<TypePair> unresolvedTypes) {
if (target.state != BArrayState.OPEN
&& (source.restType != null || source.tupleTypes.size() != target.size)) {
return false;
}
List<BType> sourceTypes = new ArrayList<>(source.tupleTypes);
if (source.restType != null) {
sourceTypes.add(source.restType);
}
return sourceTypes.stream()
.allMatch(tupleElemType -> isAssignable(tupleElemType, target.eType, unresolvedTypes));
}
private boolean isArrayTypeAssignableToTupleType(BArrayType source, BTupleType target,
Set<TypePair> unresolvedTypes) {
if (!target.tupleTypes.isEmpty()) {
if (source.state == BArrayState.OPEN) {
return false;
}
if (target.restType != null && target.tupleTypes.size() > source.size) {
return false;
}
if (target.restType == null && target.tupleTypes.size() != source.size) {
return false;
}
}
List<BType> targetTypes = new ArrayList<>(target.tupleTypes);
if (target.restType != null) {
targetTypes.add(target.restType);
}
return targetTypes.stream()
.allMatch(tupleElemType -> isAssignable(source.eType, tupleElemType, unresolvedTypes));
}
private boolean isArrayTypesAssignable(BArrayType source, BType target, Set<TypePair> unresolvedTypes) {
BType sourceElementType = source.getElementType();
if (target.tag == TypeTags.ARRAY) {
BArrayType targetArrayType = (BArrayType) target;
BType targetElementType = targetArrayType.getElementType();
if (targetArrayType.state == BArrayState.OPEN) {
return isAssignable(sourceElementType, targetElementType, unresolvedTypes);
}
if (targetArrayType.size != source.size) {
return false;
}
return isAssignable(sourceElementType, targetElementType, unresolvedTypes);
} else if (target.tag == TypeTags.JSON) {
return isAssignable(sourceElementType, target, unresolvedTypes);
}
return false;
}
private boolean isFunctionTypeAssignable(BInvokableType source, BInvokableType target,
Set<TypePair> unresolvedTypes) {
if (hasIncompatibleIsolatedFlags(source, target) || hasIncompatibleTransactionalFlags(source, target)) {
return false;
}
if (containsTypeParams(target)) {
if (source.paramTypes.size() != target.paramTypes.size()) {
return false;
}
for (int i = 0; i < source.paramTypes.size(); i++) {
BType sourceParam = source.paramTypes.get(i);
BType targetParam = target.paramTypes.get(i);
boolean isTypeParam = TypeParamAnalyzer.isTypeParam(targetParam);
if (isTypeParam) {
if (!isAssignable(sourceParam, targetParam)) {
return false;
}
} else {
if (!isAssignable(targetParam, sourceParam)) {
return false;
}
}
}
if (source.retType == null && target.retType == null) {
return true;
} else if (source.retType == null || target.retType == null) {
return false;
}
return isAssignable(source.retType, target.retType, unresolvedTypes);
}
return checkFunctionTypeEquality(source, target, unresolvedTypes, (s, t, ut) -> isAssignable(t, s, ut));
}
public boolean isInherentlyImmutableType(BType type) {
if (isValueType(type)) {
return true;
}
switch (type.tag) {
case TypeTags.XML_TEXT:
case TypeTags.FINITE:
case TypeTags.READONLY:
case TypeTags.NIL:
case TypeTags.ERROR:
case TypeTags.INVOKABLE:
case TypeTags.TYPEDESC:
case TypeTags.HANDLE:
return true;
}
return false;
}
boolean isSelectivelyImmutableType(BType type) {
return isSelectivelyImmutableType(type, false, new HashSet<>(), false);
}
boolean isSelectivelyImmutableType(BType type, boolean disallowReadOnlyObjects, boolean forceCheck) {
return isSelectivelyImmutableType(type, disallowReadOnlyObjects, new HashSet<>(), forceCheck);
}
public boolean isSelectivelyImmutableType(BType type, Set<BType> unresolvedTypes) {
return isSelectivelyImmutableType(type, false, unresolvedTypes, false);
}
private boolean isSelectivelyImmutableType(BType type, Set<BType> unresolvedTypes, boolean forceCheck) {
return isSelectivelyImmutableType(type, false, unresolvedTypes, forceCheck);
}
private boolean isSelectivelyImmutableType(BType type, boolean disallowReadOnlyObjects, Set<BType> unresolvedTypes,
boolean forceCheck) {
if (isInherentlyImmutableType(type) || !(type instanceof SelectivelyImmutableReferenceType)) {
return false;
}
if (!forceCheck && ((SelectivelyImmutableReferenceType) type).getImmutableType() != null) {
return true;
}
if (!unresolvedTypes.add(type)) {
return true;
}
switch (type.tag) {
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_COMMENT:
case TypeTags.XML_ELEMENT:
case TypeTags.XML_PI:
return true;
case TypeTags.ARRAY:
BType elementType = ((BArrayType) type).eType;
return isInherentlyImmutableType(elementType) ||
isSelectivelyImmutableType(elementType, unresolvedTypes, forceCheck);
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) type;
for (BType tupMemType : tupleType.tupleTypes) {
if (!isInherentlyImmutableType(tupMemType) &&
!isSelectivelyImmutableType(tupMemType, unresolvedTypes, forceCheck)) {
return false;
}
}
BType tupRestType = tupleType.restType;
if (tupRestType == null) {
return true;
}
return isInherentlyImmutableType(tupRestType) ||
isSelectivelyImmutableType(tupRestType, unresolvedTypes, forceCheck);
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
BType fieldType = field.type;
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes, forceCheck)) {
return false;
}
}
BType recordRestType = recordType.restFieldType;
if (recordRestType == null || recordRestType == symTable.noType) {
return true;
}
return isInherentlyImmutableType(recordRestType) ||
isSelectivelyImmutableType(recordRestType, unresolvedTypes, forceCheck);
case TypeTags.MAP:
BType constraintType = ((BMapType) type).constraint;
return isInherentlyImmutableType(constraintType) ||
isSelectivelyImmutableType(constraintType, unresolvedTypes, forceCheck);
case TypeTags.OBJECT:
BObjectType objectType = (BObjectType) type;
if (Symbols.isFlagOn(objectType.tsymbol.flags, Flags.CLASS) &&
(disallowReadOnlyObjects || !Symbols.isFlagOn(objectType.flags, Flags.READONLY))) {
return false;
}
for (BField field : objectType.fields.values()) {
BType fieldType = field.type;
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes, forceCheck)) {
return false;
}
}
return true;
case TypeTags.TABLE:
BType tableConstraintType = ((BTableType) type).constraint;
return isInherentlyImmutableType(tableConstraintType) ||
isSelectivelyImmutableType(tableConstraintType, unresolvedTypes, forceCheck);
case TypeTags.UNION:
boolean readonlyIntersectionExists = false;
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (isInherentlyImmutableType(memberType) ||
isSelectivelyImmutableType(memberType, disallowReadOnlyObjects, unresolvedTypes,
forceCheck)) {
readonlyIntersectionExists = true;
}
}
return readonlyIntersectionExists;
case TypeTags.INTERSECTION:
return isSelectivelyImmutableType(((BIntersectionType) type).effectiveType, false, unresolvedTypes,
forceCheck);
}
return false;
}
private boolean containsTypeParams(BInvokableType type) {
boolean hasParameterizedTypes = type.paramTypes.stream()
.anyMatch(t -> {
if (t.tag == TypeTags.FUNCTION_POINTER) {
return containsTypeParams((BInvokableType) t);
}
return TypeParamAnalyzer.isTypeParam(t);
});
if (hasParameterizedTypes) {
return hasParameterizedTypes;
}
if (type.retType.tag == TypeTags.FUNCTION_POINTER) {
return containsTypeParams((BInvokableType) type.retType);
}
return TypeParamAnalyzer.isTypeParam(type.retType);
}
private boolean isSameFunctionType(BInvokableType source, BInvokableType target, Set<TypePair> unresolvedTypes) {
return checkFunctionTypeEquality(source, target, unresolvedTypes, this::isSameType);
}
private boolean checkFunctionTypeEquality(BInvokableType source, BInvokableType target,
Set<TypePair> unresolvedTypes, TypeEqualityPredicate equality) {
if (hasIncompatibleIsolatedFlags(source, target) || hasIncompatibleTransactionalFlags(source, target)) {
return false;
}
if (source.paramTypes.size() != target.paramTypes.size()) {
return false;
}
for (int i = 0; i < source.paramTypes.size(); i++) {
if (!equality.test(source.paramTypes.get(i), target.paramTypes.get(i), unresolvedTypes)) {
return false;
}
}
if ((source.restType != null && target.restType == null) ||
target.restType != null && source.restType == null) {
return false;
} else if (source.restType != null && !equality.test(source.restType, target.restType, unresolvedTypes)) {
return false;
}
if (source.retType == null && target.retType == null) {
return true;
} else if (source.retType == null || target.retType == null) {
return false;
}
return isAssignable(source.retType, target.retType, unresolvedTypes);
}
private boolean hasIncompatibleIsolatedFlags(BInvokableType source, BInvokableType target) {
return Symbols.isFlagOn(target.flags, Flags.ISOLATED) && !Symbols.isFlagOn(source.flags, Flags.ISOLATED);
}
private boolean hasIncompatibleTransactionalFlags(BInvokableType source, BInvokableType target) {
return Symbols.isFlagOn(source.flags, Flags.TRANSACTIONAL) &&
!Symbols.isFlagOn(target.flags, Flags.TRANSACTIONAL);
}
public boolean isSameArrayType(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (target.tag != TypeTags.ARRAY || source.tag != TypeTags.ARRAY) {
return false;
}
BArrayType lhsArrayType = (BArrayType) target;
BArrayType rhsArrayType = (BArrayType) source;
boolean hasSameTypeElements = isSameType(lhsArrayType.eType, rhsArrayType.eType, unresolvedTypes);
if (lhsArrayType.state == BArrayState.OPEN) {
return (rhsArrayType.state == BArrayState.OPEN) && hasSameTypeElements;
}
return checkSealedArraySizeEquality(rhsArrayType, lhsArrayType) && hasSameTypeElements;
}
public boolean checkSealedArraySizeEquality(BArrayType rhsArrayType, BArrayType lhsArrayType) {
return lhsArrayType.size == rhsArrayType.size;
}
public boolean checkStructEquivalency(BType rhsType, BType lhsType) {
return checkStructEquivalency(rhsType, lhsType, new HashSet<>());
}
private boolean checkStructEquivalency(BType rhsType, BType lhsType, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(rhsType, lhsType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
if (rhsType.tag == TypeTags.OBJECT && lhsType.tag == TypeTags.OBJECT) {
return checkObjectEquivalency((BObjectType) rhsType, (BObjectType) lhsType, unresolvedTypes);
}
if (rhsType.tag == TypeTags.RECORD && lhsType.tag == TypeTags.RECORD) {
return checkRecordEquivalency((BRecordType) rhsType, (BRecordType) lhsType, unresolvedTypes);
}
return false;
}
private int getObjectFuncCount(BObjectTypeSymbol sym) {
if (sym.initializerFunc != null && sym.attachedFuncs.contains(sym.initializerFunc)) {
return sym.attachedFuncs.size() - 1;
}
return sym.attachedFuncs.size();
}
public boolean checkRecordEquivalency(BRecordType rhsType, BRecordType lhsType, Set<TypePair> unresolvedTypes) {
if (lhsType.sealed && !rhsType.sealed) {
return false;
}
if (!rhsType.sealed && !isAssignable(rhsType.restFieldType, lhsType.restFieldType, unresolvedTypes)) {
return false;
}
return checkFieldEquivalency(lhsType, rhsType, unresolvedTypes);
}
public void setForeachTypedBindingPatternType(BLangForeach foreachNode) {
BType collectionType = foreachNode.collection.type;
BType varType;
switch (collectionType.tag) {
case TypeTags.STRING:
varType = symTable.stringType;
break;
case TypeTags.ARRAY:
BArrayType arrayType = (BArrayType) collectionType;
varType = arrayType.eType;
break;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) collectionType;
LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);
if (tupleType.restType != null) {
tupleTypes.add(tupleType.restType);
}
varType = tupleTypes.size() == 1 ?
tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);
break;
case TypeTags.MAP:
BMapType bMapType = (BMapType) collectionType;
varType = bMapType.constraint;
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) collectionType;
varType = inferRecordFieldType(recordType);
break;
case TypeTags.XML:
varType = BUnionType.create(null, symTable.xmlType, symTable.stringType);
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) collectionType;
varType = tableType.constraint;
break;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) collectionType;
if (streamType.constraint.tag == TypeTags.NONE) {
varType = symTable.anydataType;
break;
}
varType = streamType.constraint;
if (streamType.error != null) {
BType actualType = BUnionType.create(null, varType, streamType.error);
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
varType, actualType);
}
break;
case TypeTags.OBJECT:
BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);
if (nextMethodReturnType != null) {
foreachNode.resultType = getRecordType(nextMethodReturnType);
BType valueType = (foreachNode.resultType != null)
? ((BRecordType) foreachNode.resultType).fields.get("value").type : null;
BType errorType = getErrorType(nextMethodReturnType);
if (errorType != null) {
BType actualType = BUnionType.create(null, valueType, errorType);
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
valueType, actualType);
}
foreachNode.nillableResultType = nextMethodReturnType;
foreachNode.varType = valueType;
return;
}
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE);
case TypeTags.SEMANTIC_ERROR:
foreachNode.varType = symTable.semanticError;
foreachNode.resultType = symTable.semanticError;
foreachNode.nillableResultType = symTable.semanticError;
return;
default:
foreachNode.varType = symTable.semanticError;
foreachNode.resultType = symTable.semanticError;
foreachNode.nillableResultType = symTable.semanticError;
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.ITERABLE_NOT_SUPPORTED_COLLECTION,
collectionType);
return;
}
BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
BUnionType nextMethodReturnType =
(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);
foreachNode.varType = varType;
foreachNode.resultType = getRecordType(nextMethodReturnType);
foreachNode.nillableResultType = nextMethodReturnType;
}
public void setInputClauseTypedBindingPatternType(BLangInputClause bLangInputClause) {
if (bLangInputClause.collection == null) {
return;
}
BType collectionType = bLangInputClause.collection.type;
BType varType;
switch (collectionType.tag) {
case TypeTags.STRING:
varType = symTable.stringType;
break;
case TypeTags.ARRAY:
BArrayType arrayType = (BArrayType) collectionType;
varType = arrayType.eType;
break;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) collectionType;
LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);
if (tupleType.restType != null) {
tupleTypes.add(tupleType.restType);
}
varType = tupleTypes.size() == 1 ?
tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);
break;
case TypeTags.MAP:
BMapType bMapType = (BMapType) collectionType;
varType = bMapType.constraint;
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) collectionType;
varType = inferRecordFieldType(recordType);
break;
case TypeTags.XML:
varType = BUnionType.create(null, symTable.xmlType, symTable.stringType);
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) collectionType;
varType = tableType.constraint;
break;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) collectionType;
if (streamType.constraint.tag == TypeTags.NONE) {
varType = symTable.anydataType;
break;
}
varType = streamType.constraint;
break;
case TypeTags.OBJECT:
BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);
if (nextMethodReturnType != null) {
bLangInputClause.resultType = getRecordType(nextMethodReturnType);
bLangInputClause.nillableResultType = nextMethodReturnType;
bLangInputClause.varType = ((BRecordType) bLangInputClause.resultType).fields.get("value").type;
return;
}
dlog.error(bLangInputClause.collection.pos,
DiagnosticErrorCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE);
case TypeTags.SEMANTIC_ERROR:
bLangInputClause.varType = symTable.semanticError;
bLangInputClause.resultType = symTable.semanticError;
bLangInputClause.nillableResultType = symTable.semanticError;
return;
default:
bLangInputClause.varType = symTable.semanticError;
bLangInputClause.resultType = symTable.semanticError;
bLangInputClause.nillableResultType = symTable.semanticError;
dlog.error(bLangInputClause.collection.pos, DiagnosticErrorCode.ITERABLE_NOT_SUPPORTED_COLLECTION,
collectionType);
return;
}
BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
BUnionType nextMethodReturnType =
(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);
bLangInputClause.varType = varType;
bLangInputClause.resultType = getRecordType(nextMethodReturnType);
bLangInputClause.nillableResultType = nextMethodReturnType;
}
public BUnionType getVarTypeFromIterableObject(BObjectType collectionType) {
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) collectionType.tsymbol;
for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) {
return getVarTypeFromIteratorFunc(func);
}
}
return null;
}
private BUnionType getVarTypeFromIteratorFunc(BAttachedFunction candidateIteratorFunc) {
if (!candidateIteratorFunc.type.paramTypes.isEmpty()) {
return null;
}
BType returnType = candidateIteratorFunc.type.retType;
return getVarTypeFromIteratorFuncReturnType(returnType);
}
public BUnionType getVarTypeFromIteratorFuncReturnType(BType returnType) {
BObjectTypeSymbol objectTypeSymbol;
if (returnType.tag != TypeTags.OBJECT) {
return null;
}
objectTypeSymbol = (BObjectTypeSymbol) returnType.tsymbol;
for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.NEXT_FUNC)) {
return getVarTypeFromNextFunc(func);
}
}
return null;
}
private BUnionType getVarTypeFromNextFunc(BAttachedFunction nextFunc) {
BType returnType;
if (!nextFunc.type.paramTypes.isEmpty()) {
return null;
}
returnType = nextFunc.type.retType;
if (checkNextFuncReturnType(returnType)) {
return (BUnionType) returnType;
}
return null;
}
private boolean checkNextFuncReturnType(BType returnType) {
if (returnType.tag != TypeTags.UNION) {
return false;
}
List<BType> types = new ArrayList<>(((BUnionType) returnType).getMemberTypes());
if (!types.removeIf(type -> type.tag == TypeTags.NIL)) {
return false;
}
types.removeIf(type -> type.tag == TypeTags.ERROR);
if (types.size() != 1) {
return false;
}
if (types.get(0).tag != TypeTags.RECORD) {
return false;
}
BRecordType recordType = (BRecordType) types.get(0);
return checkRecordTypeInNextFuncReturnType(recordType);
}
private boolean checkRecordTypeInNextFuncReturnType(BRecordType recordType) {
if (!recordType.sealed) {
return false;
}
if (recordType.fields.size() != 1) {
return false;
}
return recordType.fields.containsKey(BLangCompilerConstants.VALUE_FIELD);
}
private BRecordType getRecordType(BUnionType type) {
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.RECORD) {
return (BRecordType) member;
}
}
return null;
}
public BErrorType getErrorType(BUnionType type) {
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.ERROR) {
return (BErrorType) member;
} else if (member.tag == TypeTags.UNION) {
BErrorType e = getErrorType((BUnionType) member);
if (e != null) {
return e;
}
}
}
return null;
}
public BType getResultTypeOfNextInvocation(BObjectType iteratorType) {
BAttachedFunction nextFunc = getAttachedFuncFromObject(iteratorType, BLangCompilerConstants.NEXT_FUNC);
return Objects.requireNonNull(nextFunc).type.retType;
}
public BAttachedFunction getAttachedFuncFromObject(BObjectType objectType, String funcName) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) objectType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (funcName.equals(bAttachedFunction.funcName.value)) {
return bAttachedFunction;
}
}
return null;
}
public BType inferRecordFieldType(BRecordType recordType) {
Map<String, BField> fields = recordType.fields;
BUnionType unionType = BUnionType.create(null);
if (!recordType.sealed) {
unionType.add(recordType.restFieldType);
}
for (BField field : fields.values()) {
if (isAssignable(field.type, unionType)) {
continue;
}
if (isAssignable(unionType, field.type)) {
unionType = BUnionType.create(null);
}
unionType.add(field.type);
}
if (unionType.getMemberTypes().size() > 1) {
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, recordType.tsymbol.pkgID, null,
recordType.tsymbol.owner, symTable.builtinPos, VIRTUAL);
return unionType;
}
return unionType.getMemberTypes().iterator().next();
}
/**
* Enum to represent type test result.
*
* @since 1.2.0
*/
enum TypeTestResult {
NOT_FOUND,
TRUE,
FALSE
}
TypeTestResult isBuiltInTypeWidenPossible(BType actualType, BType targetType) {
int targetTag = targetType.tag;
int actualTag = actualType.tag;
if (actualTag < TypeTags.JSON && targetTag < TypeTags.JSON) {
switch (actualTag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
if (targetTag == TypeTags.BOOLEAN || targetTag == TypeTags.STRING) {
return TypeTestResult.FALSE;
}
break;
case TypeTags.BOOLEAN:
if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT
|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.STRING) {
return TypeTestResult.FALSE;
}
break;
case TypeTags.STRING:
if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT
|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.BOOLEAN) {
return TypeTestResult.FALSE;
}
break;
}
}
switch (actualTag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.STRING:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.CHAR_STRING:
if (targetTag == TypeTags.JSON || targetTag == TypeTags.ANYDATA || targetTag == TypeTags.ANY ||
targetTag == TypeTags.READONLY) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.ANYDATA:
case TypeTags.TYPEDESC:
if (targetTag == TypeTags.ANY) {
return TypeTestResult.TRUE;
}
break;
default:
}
if (TypeTags.isIntegerTypeTag(targetTag) && actualTag == targetTag) {
return TypeTestResult.FALSE;
}
if ((TypeTags.isIntegerTypeTag(actualTag) || actualTag == TypeTags.BYTE)
&& (TypeTags.isIntegerTypeTag(targetTag) || targetTag == TypeTags.BYTE)) {
return checkBuiltInIntSubtypeWidenPossible(actualType, targetType);
}
if (actualTag == TypeTags.CHAR_STRING && TypeTags.STRING == targetTag) {
return TypeTestResult.TRUE;
}
return TypeTestResult.NOT_FOUND;
}
private TypeTestResult checkBuiltInIntSubtypeWidenPossible(BType actualType, BType targetType) {
int actualTag = actualType.tag;
switch (targetType.tag) {
case TypeTags.INT:
if (actualTag == TypeTags.BYTE || TypeTags.isIntegerTypeTag(actualTag)) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.SIGNED32_INT:
if (actualTag == TypeTags.SIGNED16_INT || actualTag == TypeTags.SIGNED8_INT ||
actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.SIGNED16_INT:
if (actualTag == TypeTags.SIGNED8_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED32_INT:
if (actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED16_INT:
if (actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.BYTE:
if (actualTag == TypeTags.UNSIGNED8_INT) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED8_INT:
if (actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
}
return TypeTestResult.NOT_FOUND;
}
public boolean isImplicityCastable(BType actualType, BType targetType) {
/* The word Builtin refers for Compiler known types. */
BType newTargetType = targetType;
if ((targetType.tag == TypeTags.UNION || targetType.tag == TypeTags.FINITE) && isValueType(actualType)) {
newTargetType = symTable.anyType;
} else if (targetType.tag == TypeTags.INTERSECTION) {
newTargetType = ((BIntersectionType) targetType).effectiveType;
}
TypeTestResult result = isBuiltInTypeWidenPossible(actualType, newTargetType);
if (result != TypeTestResult.NOT_FOUND) {
return result == TypeTestResult.TRUE;
}
if (isValueType(targetType) &&
(actualType.tag == TypeTags.FINITE ||
(actualType.tag == TypeTags.UNION && ((BUnionType) actualType).getMemberTypes().stream()
.anyMatch(type -> type.tag == TypeTags.FINITE && isAssignable(type, targetType))))) {
return targetType.tag == TypeTags.INT || targetType.tag == TypeTags.BYTE || targetType.tag == TypeTags.FLOAT
|| targetType.tag == TypeTags.STRING || targetType.tag == TypeTags.BOOLEAN;
} else if (targetType.tag == TypeTags.ERROR
&& (actualType.tag == TypeTags.UNION
&& isAllErrorMembers((BUnionType) actualType))) {
return true;
} else if (targetType.tag == TypeTags.STRING && actualType.tag == TypeTags.XML_TEXT) {
return true;
}
return false;
}
public boolean isTypeCastable(BLangExpression expr, BType sourceType, BType targetType) {
if (getTypeIntersection(sourceType, symTable.errorType) != symTable.semanticError
&& getTypeIntersection(targetType, symTable.errorType) == symTable.semanticError) {
return false;
}
if (sourceType.tag == TypeTags.SEMANTIC_ERROR || targetType.tag == TypeTags.SEMANTIC_ERROR ||
sourceType == targetType) {
return true;
}
if (isAssignable(sourceType, targetType) || isAssignable(targetType, sourceType)) {
return true;
}
if (isNumericConversionPossible(expr, sourceType, targetType)) {
return true;
}
boolean validTypeCast = false;
if (sourceType.tag == TypeTags.UNION) {
if (getTypeForUnionTypeMembersAssignableToType((BUnionType) sourceType, targetType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (targetType.tag == TypeTags.UNION) {
if (getTypeForUnionTypeMembersAssignableToType((BUnionType) targetType, sourceType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (sourceType.tag == TypeTags.FINITE) {
if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) sourceType, targetType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (targetType.tag == TypeTags.FINITE) {
if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) targetType, sourceType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (validTypeCast) {
if (isValueType(sourceType)) {
setImplicitCastExpr(expr, sourceType, symTable.anyType);
}
return true;
}
return false;
}
boolean isNumericConversionPossible(BLangExpression expr, BType sourceType,
BType targetType) {
final boolean isSourceNumericType = isBasicNumericType(sourceType);
final boolean isTargetNumericType = isBasicNumericType(targetType);
if (isSourceNumericType && isTargetNumericType) {
return true;
}
if (targetType.tag == TypeTags.UNION) {
HashSet<Integer> typeTags = new HashSet<>();
for (BType bType : ((BUnionType) targetType).getMemberTypes()) {
if (isBasicNumericType(bType)) {
typeTags.add(bType.tag);
if (typeTags.size() > 1) {
return false;
}
}
}
}
if (!isTargetNumericType && targetType.tag != TypeTags.UNION) {
return false;
}
if (isSourceNumericType) {
setImplicitCastExpr(expr, sourceType, symTable.anyType);
return true;
}
switch (sourceType.tag) {
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.JSON:
return true;
case TypeTags.UNION:
for (BType memType : ((BUnionType) sourceType).getMemberTypes()) {
if (isBasicNumericType(memType) ||
(memType.tag == TypeTags.FINITE &&
finiteTypeContainsNumericTypeValues((BFiniteType) memType))) {
return true;
}
}
break;
case TypeTags.FINITE:
if (finiteTypeContainsNumericTypeValues((BFiniteType) sourceType)) {
return true;
}
break;
}
return false;
}
private boolean isAllErrorMembers(BUnionType actualType) {
return actualType.getMemberTypes().stream().allMatch(t -> isAssignable(t, symTable.errorType));
}
public void setImplicitCastExpr(BLangExpression expr, BType actualType, BType expType) {
if (!isImplicityCastable(actualType, expType)) {
return;
}
BLangTypeConversionExpr implicitConversionExpr =
(BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
implicitConversionExpr.pos = expr.pos;
implicitConversionExpr.expr = expr.impConversionExpr == null ? expr : expr.impConversionExpr;
implicitConversionExpr.type = expType;
implicitConversionExpr.targetType = expType;
implicitConversionExpr.internal = true;
expr.impConversionExpr = implicitConversionExpr;
}
public BType getElementType(BType type) {
if (type.tag != TypeTags.ARRAY) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
public boolean checkListenerCompatibility(BType type) {
if (type.tag != TypeTags.OBJECT) {
return false;
}
BObjectType rhsType = (BObjectType) type;
List<BAttachedFunction> rhsFuncs = ((BStructureTypeSymbol) rhsType.tsymbol).attachedFuncs;
ListenerValidationModel listenerValidationModel = new ListenerValidationModel(this, symTable);
return listenerValidationModel.checkMethods(rhsFuncs);
}
public boolean isValidErrorDetailType(BType detailType) {
switch (detailType.tag) {
case TypeTags.MAP:
return isAssignable(detailType, symTable.detailType);
case TypeTags.RECORD: {
if (isSealed((BRecordType) detailType)) {
return false;
}
return isAssignable(detailType, symTable.detailType);
}
}
return false;
}
private boolean isSealed(BRecordType recordType) {
return recordType.sealed;
}
private boolean isNullable(BType fieldType) {
return fieldType.isNullable();
}
private class BSameTypeVisitor implements BTypeVisitor<BType, Boolean> {
Set<TypePair> unresolvedTypes;
BSameTypeVisitor(Set<TypePair> unresolvedTypes) {
this.unresolvedTypes = unresolvedTypes;
}
@Override
public Boolean visit(BType t, BType s) {
if (t == s) {
return true;
}
switch (t.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
return t.tag == s.tag
&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));
case TypeTags.ANY:
case TypeTags.ANYDATA:
return t.tag == s.tag && hasSameReadonlyFlag(s, t)
&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));
default:
break;
}
return false;
}
@Override
public Boolean visit(BBuiltInRefType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnyType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnydataType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BMapType t, BType s) {
if (s.tag != TypeTags.MAP || !hasSameReadonlyFlag(s, t)) {
return false;
}
BMapType sType = ((BMapType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFutureType t, BType s) {
return s.tag == TypeTags.FUTURE && t.constraint.tag == ((BFutureType) s).constraint.tag;
}
@Override
public Boolean visit(BXMLType t, BType s) {
return visit((BBuiltInRefType) t, s);
}
@Override
public Boolean visit(BJSONType t, BType s) {
return s.tag == TypeTags.JSON && hasSameReadonlyFlag(s, t);
}
@Override
public Boolean visit(BArrayType t, BType s) {
return s.tag == TypeTags.ARRAY && hasSameReadonlyFlag(s, t) && isSameArrayType(s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BObjectType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.OBJECT) {
return false;
}
return t.tsymbol.pkgID.equals(s.tsymbol.pkgID) && t.tsymbol.name.equals(s.tsymbol.name);
}
@Override
public Boolean visit(BRecordType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.RECORD || !hasSameReadonlyFlag(s, t)) {
return false;
}
BRecordType source = (BRecordType) s;
if (source.fields.size() != t.fields.size()) {
return false;
}
for (BField sourceField : source.fields.values()) {
if (t.fields.containsKey(sourceField.name.value)) {
BField targetField = t.fields.get(sourceField.name.value);
if (isSameType(sourceField.type, targetField.type, this.unresolvedTypes) &&
hasSameOptionalFlag(sourceField.symbol, targetField.symbol) &&
(!Symbols.isFlagOn(targetField.symbol.flags, Flags.READONLY) ||
Symbols.isFlagOn(sourceField.symbol.flags, Flags.READONLY))) {
continue;
}
}
return false;
}
return isSameType(source.restFieldType, t.restFieldType, this.unresolvedTypes);
}
private boolean hasSameOptionalFlag(BVarSymbol s, BVarSymbol t) {
return ((s.flags & Flags.OPTIONAL) ^ (t.flags & Flags.OPTIONAL)) != Flags.OPTIONAL;
}
private boolean hasSameReadonlyFlag(BType source, BType target) {
return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY);
}
public Boolean visit(BTupleType t, BType s) {
if (s.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(s, t)) {
return false;
}
BTupleType source = (BTupleType) s;
if (source.tupleTypes.size() != t.tupleTypes.size()) {
return false;
}
for (int i = 0; i < source.tupleTypes.size(); i++) {
if (t.getTupleTypes().get(i) == symTable.noType) {
continue;
}
if (!isSameType(source.getTupleTypes().get(i), t.tupleTypes.get(i), this.unresolvedTypes)) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BStreamType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BTableType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BInvokableType t, BType s) {
return s.tag == TypeTags.INVOKABLE && isSameFunctionType((BInvokableType) s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BUnionType tUnionType, BType s) {
if (s.tag != TypeTags.UNION || !hasSameReadonlyFlag(s, tUnionType)) {
return false;
}
BUnionType sUnionType = (BUnionType) s;
if (sUnionType.getMemberTypes().size()
!= tUnionType.getMemberTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sUnionType.getMemberTypes());
Set<BType> targetTypes = new LinkedHashSet<>(tUnionType.getMemberTypes());
boolean notSameType = sourceTypes
.stream()
.map(sT -> targetTypes
.stream()
.anyMatch(it -> isSameType(it, sT, this.unresolvedTypes)))
.anyMatch(foundSameType -> !foundSameType);
return !notSameType;
}
@Override
public Boolean visit(BIntersectionType tIntersectionType, BType s) {
if (s.tag != TypeTags.INTERSECTION || !hasSameReadonlyFlag(s, tIntersectionType)) {
return false;
}
BIntersectionType sIntersectionType = (BIntersectionType) s;
if (sIntersectionType.getConstituentTypes().size() != tIntersectionType.getConstituentTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sIntersectionType.getConstituentTypes());
Set<BType> targetTypes = new LinkedHashSet<>(tIntersectionType.getConstituentTypes());
for (BType sourceType : sourceTypes) {
boolean foundSameType = false;
for (BType targetType : targetTypes) {
if (isSameType(sourceType, targetType, this.unresolvedTypes)) {
foundSameType = true;
break;
}
}
if (!foundSameType) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BErrorType t, BType s) {
if (s.tag != TypeTags.ERROR) {
return false;
}
BErrorType source = (BErrorType) s;
if (!source.typeIdSet.equals(t.typeIdSet)) {
return false;
}
if (source.detailType == t.detailType) {
return true;
}
return isSameType(source.detailType, t.detailType, this.unresolvedTypes);
}
@Override
public Boolean visit(BTypedescType t, BType s) {
if (s.tag != TypeTags.TYPEDESC) {
return false;
}
BTypedescType sType = ((BTypedescType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFiniteType t, BType s) {
return s == t;
}
@Override
public Boolean visit(BParameterizedType t, BType s) {
if (s.tag != TypeTags.PARAMETERIZED_TYPE) {
return false;
}
BParameterizedType sType = (BParameterizedType) s;
return isSameType(sType.paramValueType, t.paramValueType) && sType.paramSymbol.equals(t.paramSymbol);
}
};
private boolean checkFieldEquivalency(BRecordType lhsType, BRecordType rhsType, Set<TypePair> unresolvedTypes) {
Map<String, BField> rhsFields = new LinkedHashMap<>(rhsType.fields);
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsFields.get(lhsField.name.value);
if (rhsField == null) {
return false;
}
if (hasIncompatibleReadOnlyFlags(lhsField.symbol.flags, rhsField.symbol.flags)) {
return false;
}
if (!Symbols.isOptional(lhsField.symbol) && Symbols.isOptional(rhsField.symbol)) {
return false;
}
if (!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {
return false;
}
rhsFields.remove(lhsField.name.value);
}
return rhsFields.entrySet().stream().allMatch(
fieldEntry -> isAssignable(fieldEntry.getValue().type, lhsType.restFieldType, unresolvedTypes));
}
private BAttachedFunction getMatchingInvokableType(List<BAttachedFunction> rhsFuncList, BAttachedFunction lhsFunc,
Set<TypePair> unresolvedTypes) {
return rhsFuncList.stream()
.filter(rhsFunc -> lhsFunc.funcName.equals(rhsFunc.funcName))
.filter(rhsFunc -> isFunctionTypeAssignable(rhsFunc.type, lhsFunc.type, unresolvedTypes))
.findFirst()
.orElse(null);
}
private boolean isInSameVisibilityRegion(BSymbol lhsSym, BSymbol rhsSym) {
if (Symbols.isPrivate(lhsSym)) {
return Symbols.isPrivate(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID)
&& lhsSym.owner.name.equals(rhsSym.owner.name);
} else if (Symbols.isPublic(lhsSym)) {
return Symbols.isPublic(rhsSym);
}
return !Symbols.isPrivate(rhsSym) && !Symbols.isPublic(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID);
}
private boolean isAssignableToUnionType(BType source, BType target, Set<TypePair> unresolvedTypes) {
Set<BType> sourceTypes = new LinkedHashSet<>();
Set<BType> targetTypes = new LinkedHashSet<>();
if (source.tag == TypeTags.UNION) {
sourceTypes.addAll(getEffectiveMemberTypes((BUnionType) source));
} else {
sourceTypes.add(source);
}
if (target.tag == TypeTags.UNION) {
targetTypes.addAll(getEffectiveMemberTypes((BUnionType) target));
} else {
targetTypes.add(target);
}
for (BType s : sourceTypes) {
if (s.tag == TypeTags.NEVER) {
continue;
}
boolean isAssignableToAnyTargetType = true;
for (BType t : targetTypes) {
if (isAssignable(s, t, unresolvedTypes)) {
isAssignableToAnyTargetType = false;
break;
}
}
if (isAssignableToAnyTargetType && (s.tag != TypeTags.FINITE || !isAssignable(s, target, unresolvedTypes))
&& (s.tag != TypeTags.XML ||
!isAssignableToUnionType(expandedXMLBuiltinSubtypes, target, unresolvedTypes))) {
return false;
}
}
return true;
}
private Set<BType> getEffectiveMemberTypes(BUnionType unionType) {
Set<BType> memTypes = new LinkedHashSet<>();
for (BType memberType : unionType.getMemberTypes()) {
if (memberType.tag == TypeTags.INTERSECTION) {
BType effectiveType = ((BIntersectionType) memberType).effectiveType;
if (effectiveType.tag == TypeTags.UNION) {
memTypes.addAll(getEffectiveMemberTypes((BUnionType) effectiveType));
continue;
}
memTypes.add(effectiveType);
continue;
}
memTypes.add(memberType);
}
return memTypes;
}
private boolean isFiniteTypeAssignable(BFiniteType finiteType, BType targetType, Set<TypePair> unresolvedTypes) {
if (targetType.tag == TypeTags.FINITE) {
return finiteType.getValueSpace().stream()
.allMatch(expression -> isAssignableToFiniteType(targetType, (BLangLiteral) expression));
}
if (targetType.tag == TypeTags.UNION) {
List<BType> unionMemberTypes = getAllTypes(targetType);
return finiteType.getValueSpace().stream()
.allMatch(valueExpr -> unionMemberTypes.stream()
.anyMatch(targetMemType -> targetMemType.tag == TypeTags.FINITE ?
isAssignableToFiniteType(targetMemType, (BLangLiteral) valueExpr) :
isAssignable(valueExpr.type, targetType, unresolvedTypes)));
}
return finiteType.getValueSpace().stream()
.allMatch(expression -> isAssignable(expression.type, targetType, unresolvedTypes));
}
boolean isAssignableToFiniteType(BType type, BLangLiteral literalExpr) {
if (type.tag != TypeTags.FINITE) {
return false;
}
BFiniteType expType = (BFiniteType) type;
return expType.getValueSpace().stream().anyMatch(memberLiteral -> {
if (((BLangLiteral) memberLiteral).value == null) {
return literalExpr.value == null;
}
return checkLiteralAssignabilityBasedOnType((BLangLiteral) memberLiteral, literalExpr);
});
}
/**
* Method to check the literal assignability based on the types of the literals. For numeric literals the
* assignability depends on the equivalency of the literals. If the candidate literal could either be a simple
* literal or a constant. In case of a constant, it is assignable to the base literal if and only if both
* literals have same type and equivalent values.
*
* @param baseLiteral Literal based on which we check the assignability.
* @param candidateLiteral Literal to be tested whether it is assignable to the base literal or not.
* @return true if assignable; false otherwise.
*/
boolean checkLiteralAssignabilityBasedOnType(BLangLiteral baseLiteral, BLangLiteral candidateLiteral) {
if (baseLiteral.getKind() != candidateLiteral.getKind()) {
return false;
}
Object baseValue = baseLiteral.value;
Object candidateValue = candidateLiteral.value;
int candidateTypeTag = candidateLiteral.type.tag;
switch (baseLiteral.type.tag) {
case TypeTags.BYTE:
if (candidateTypeTag == TypeTags.BYTE || (candidateTypeTag == TypeTags.INT &&
!candidateLiteral.isConstant && isByteLiteralValue((Long) candidateValue))) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.INT:
if (candidateTypeTag == TypeTags.INT) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED32_INT:
if (candidateTypeTag == TypeTags.INT && isSigned32LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED16_INT:
if (candidateTypeTag == TypeTags.INT && isSigned16LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED8_INT:
if (candidateTypeTag == TypeTags.INT && isSigned8LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED32_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned32LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED16_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned16LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED8_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned8LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.FLOAT:
String baseValueStr = String.valueOf(baseValue);
String originalValue = baseLiteral.originalValue != null ? baseLiteral.originalValue : baseValueStr;
if (NumericLiteralSupport.isDecimalDiscriminated(originalValue)) {
return false;
}
double baseDoubleVal = Double.parseDouble(baseValueStr);
double candidateDoubleVal;
if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {
candidateDoubleVal = ((Long) candidateValue).doubleValue();
return baseDoubleVal == candidateDoubleVal;
} else if (candidateTypeTag == TypeTags.FLOAT) {
candidateDoubleVal = Double.parseDouble(String.valueOf(candidateValue));
return baseDoubleVal == candidateDoubleVal;
}
break;
case TypeTags.DECIMAL:
BigDecimal baseDecimalVal = NumericLiteralSupport.parseBigDecimal(baseValue);
BigDecimal candidateDecimalVal;
if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {
candidateDecimalVal = new BigDecimal((long) candidateValue, MathContext.DECIMAL128);
return baseDecimalVal.compareTo(candidateDecimalVal) == 0;
} else if (candidateTypeTag == TypeTags.FLOAT && !candidateLiteral.isConstant ||
candidateTypeTag == TypeTags.DECIMAL) {
if (NumericLiteralSupport.isFloatDiscriminated(String.valueOf(candidateValue))) {
return false;
}
candidateDecimalVal = NumericLiteralSupport.parseBigDecimal(candidateValue);
return baseDecimalVal.compareTo(candidateDecimalVal) == 0;
}
break;
default:
return baseValue.equals(candidateValue);
}
return false;
}
boolean isByteLiteralValue(Long longObject) {
return (longObject.intValue() >= BBYTE_MIN_VALUE && longObject.intValue() <= BBYTE_MAX_VALUE);
}
boolean isSigned32LiteralValue(Long longObject) {
return (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE);
}
boolean isSigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE);
}
boolean isSigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE);
}
boolean isUnsigned32LiteralValue(Long longObject) {
return (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE);
}
boolean isUnsigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE);
}
boolean isUnsigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE);
}
boolean isCharLiteralValue(String literal) {
return (literal.codePoints().count() == 1);
}
/**
* Method to retrieve a type representing all the values in the value space of a finite type that are assignable to
* the target type.
*
* @param finiteType the finite type
* @param targetType the target type
* @return a new finite type if at least one value in the value space of the specified finiteType is
* assignable to targetType (the same if all are assignable), else semanticError
*/
BType getTypeForFiniteTypeValuesAssignableToType(BFiniteType finiteType, BType targetType) {
if (isAssignable(finiteType, targetType)) {
return finiteType;
}
Set<BLangExpression> matchingValues = finiteType.getValueSpace().stream()
.filter(
expr -> isAssignable(expr.type, targetType) ||
isAssignableToFiniteType(targetType, (BLangLiteral) expr) ||
(targetType.tag == TypeTags.UNION &&
((BUnionType) targetType).getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.anyMatch(filteredType -> isAssignableToFiniteType(filteredType,
(BLangLiteral) expr))))
.collect(Collectors.toSet());
if (matchingValues.isEmpty()) {
return symTable.semanticError;
}
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, finiteType.tsymbol.flags,
names.fromString("$anonType$" + UNDERSCORE + finiteTypeCount++),
finiteType.tsymbol.pkgID, null,
finiteType.tsymbol.owner, finiteType.tsymbol.pos,
VIRTUAL);
BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, matchingValues);
finiteTypeSymbol.type = intersectingFiniteType;
return intersectingFiniteType;
}
/**
* Method to retrieve a type representing all the member types of a union type that are assignable to
* the target type.
*
* @param unionType the union type
* @param targetType the target type
* @return a single type or a new union type if at least one member type of the union type is
* assignable to targetType, else semanticError
*/
BType getTypeForUnionTypeMembersAssignableToType(BUnionType unionType, BType targetType) {
List<BType> intersection = new LinkedList<>();
unionType.getMemberTypes().forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
BType finiteTypeWithMatches = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) memType,
targetType);
if (finiteTypeWithMatches != symTable.semanticError) {
intersection.add(finiteTypeWithMatches);
}
} else {
if (isAssignable(memType, targetType)) {
intersection.add(memType);
}
}
});
if (intersection.isEmpty()) {
return symTable.semanticError;
}
if (intersection.size() == 1) {
return intersection.get(0);
} else {
return BUnionType.create(null, new LinkedHashSet<>(intersection));
}
}
boolean validEqualityIntersectionExists(BType lhsType, BType rhsType) {
if (!lhsType.isPureType() || !rhsType.isPureType()) {
return false;
}
if (isAssignable(lhsType, rhsType) || isAssignable(rhsType, lhsType)) {
return true;
}
Set<BType> lhsTypes = expandAndGetMemberTypesRecursive(lhsType);
Set<BType> rhsTypes = expandAndGetMemberTypesRecursive(rhsType);
return equalityIntersectionExists(lhsTypes, rhsTypes);
}
private boolean equalityIntersectionExists(Set<BType> lhsTypes, Set<BType> rhsTypes) {
if ((lhsTypes.contains(symTable.anydataType) &&
rhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR)) ||
(rhsTypes.contains(symTable.anydataType) &&
lhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR))) {
return true;
}
boolean matchFound = lhsTypes
.stream()
.anyMatch(s -> rhsTypes
.stream()
.anyMatch(t -> isSameType(s, t)));
if (!matchFound) {
matchFound = equalityIntersectionExistsForComplexTypes(lhsTypes, rhsTypes);
}
return matchFound;
}
/**
* Retrieves member types of the specified type, expanding maps/arrays of/constrained by unions types to individual
* maps/arrays.
*
* e.g., (string|int)[] would cause three entries as string[], int[], (string|int)[]
*
* @param bType the type for which member types needs to be identified
* @return a set containing all the retrieved member types
*/
public Set<BType> expandAndGetMemberTypesRecursive(BType bType) {
Set<BType> memberTypes = new LinkedHashSet<>();
switch (bType.tag) {
case TypeTags.BYTE:
case TypeTags.INT:
memberTypes.add(symTable.intType);
memberTypes.add(symTable.byteType);
break;
case TypeTags.FINITE:
BFiniteType expType = (BFiniteType) bType;
expType.getValueSpace().forEach(value -> {
memberTypes.add(value.type);
});
break;
case TypeTags.UNION:
BUnionType unionType = (BUnionType) bType;
unionType.getMemberTypes().forEach(member -> {
memberTypes.addAll(expandAndGetMemberTypesRecursive(member));
});
break;
case TypeTags.ARRAY:
BType arrayElementType = ((BArrayType) bType).getElementType();
if (((BArrayType) bType).getSize() != -1) {
memberTypes.add(new BArrayType(arrayElementType));
}
if (arrayElementType.tag == TypeTags.UNION) {
Set<BType> elementUnionTypes = expandAndGetMemberTypesRecursive(arrayElementType);
elementUnionTypes.forEach(elementUnionType -> {
memberTypes.add(new BArrayType(elementUnionType));
});
}
memberTypes.add(bType);
break;
case TypeTags.MAP:
BType mapConstraintType = ((BMapType) bType).getConstraint();
if (mapConstraintType.tag == TypeTags.UNION) {
Set<BType> constraintUnionTypes = expandAndGetMemberTypesRecursive(mapConstraintType);
constraintUnionTypes.forEach(constraintUnionType -> {
memberTypes.add(new BMapType(TypeTags.MAP, constraintUnionType, symTable.mapType.tsymbol));
});
}
memberTypes.add(bType);
break;
default:
memberTypes.add(bType);
}
return memberTypes;
}
private boolean tupleIntersectionExists(BTupleType lhsType, BTupleType rhsType) {
if (lhsType.getTupleTypes().size() != rhsType.getTupleTypes().size()) {
return false;
}
List<BType> lhsMemberTypes = lhsType.getTupleTypes();
List<BType> rhsMemberTypes = rhsType.getTupleTypes();
for (int i = 0; i < lhsType.getTupleTypes().size(); i++) {
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberTypes.get(i)),
expandAndGetMemberTypesRecursive(rhsMemberTypes.get(i)))) {
return false;
}
}
return true;
}
private boolean equalityIntersectionExistsForComplexTypes(Set<BType> lhsTypes, Set<BType> rhsTypes) {
for (BType lhsMemberType : lhsTypes) {
switch (lhsMemberType.tag) {
case TypeTags.INT:
case TypeTags.STRING:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.NIL:
if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {
return true;
}
break;
case TypeTags.JSON:
if (jsonEqualityIntersectionExists(rhsTypes)) {
return true;
}
break;
case TypeTags.TUPLE:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&
tupleIntersectionExists((BTupleType) lhsMemberType, (BTupleType) rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&
arrayTupleEqualityIntersectionExists((BArrayType) rhsMemberType,
(BTupleType) lhsMemberType))) {
return true;
}
break;
case TypeTags.ARRAY:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&
equalityIntersectionExists(
expandAndGetMemberTypesRecursive(((BArrayType) lhsMemberType).eType),
expandAndGetMemberTypesRecursive(((BArrayType) rhsMemberType).eType)))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&
arrayTupleEqualityIntersectionExists((BArrayType) lhsMemberType,
(BTupleType) rhsMemberType))) {
return true;
}
break;
case TypeTags.MAP:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&
equalityIntersectionExists(
expandAndGetMemberTypesRecursive(((BMapType) lhsMemberType).constraint),
expandAndGetMemberTypesRecursive(((BMapType) rhsMemberType).constraint)))) {
return true;
}
if (!isAssignable(((BMapType) lhsMemberType).constraint, symTable.errorType) &&
rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&
mapRecordEqualityIntersectionExists((BMapType) lhsMemberType,
(BRecordType) rhsMemberType))) {
return true;
}
break;
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> checkStructEquivalency(rhsMemberType, lhsMemberType) ||
checkStructEquivalency(lhsMemberType, rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&
recordEqualityIntersectionExists((BRecordType) lhsMemberType,
(BRecordType) rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON) &&
jsonEqualityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&
mapRecordEqualityIntersectionExists((BMapType) rhsMemberType,
(BRecordType) lhsMemberType))) {
return true;
}
break;
}
}
return false;
}
private boolean arrayTupleEqualityIntersectionExists(BArrayType arrayType, BTupleType tupleType) {
Set<BType> elementTypes = expandAndGetMemberTypesRecursive(arrayType.eType);
return tupleType.tupleTypes.stream()
.allMatch(tupleMemType -> equalityIntersectionExists(elementTypes,
expandAndGetMemberTypesRecursive(tupleMemType)));
}
private boolean recordEqualityIntersectionExists(BRecordType lhsType, BRecordType rhsType) {
Map<String, BField> lhsFields = lhsType.fields;
Map<String, BField> rhsFields = rhsType.fields;
List<Name> matchedFieldNames = new ArrayList<>();
for (BField lhsField : lhsFields.values()) {
if (rhsFields.containsKey(lhsField.name.value)) {
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),
expandAndGetMemberTypesRecursive(
rhsFields.get(lhsField.name.value).type))) {
return false;
}
matchedFieldNames.add(lhsField.getName());
} else {
if (Symbols.isFlagOn(lhsField.symbol.flags, Flags.OPTIONAL)) {
break;
}
if (rhsType.sealed) {
return false;
}
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),
expandAndGetMemberTypesRecursive(rhsType.restFieldType))) {
return false;
}
}
}
for (BField rhsField : rhsFields.values()) {
if (matchedFieldNames.contains(rhsField.getName())) {
continue;
}
if (!Symbols.isFlagOn(rhsField.symbol.flags, Flags.OPTIONAL)) {
if (lhsType.sealed) {
return false;
}
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(rhsField.type),
expandAndGetMemberTypesRecursive(lhsType.restFieldType))) {
return false;
}
}
}
return true;
}
private boolean mapRecordEqualityIntersectionExists(BMapType mapType, BRecordType recordType) {
Set<BType> mapConstrTypes = expandAndGetMemberTypesRecursive(mapType.getConstraint());
for (BField field : recordType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) &&
!equalityIntersectionExists(mapConstrTypes, expandAndGetMemberTypesRecursive(field.type))) {
return false;
}
}
return true;
}
private boolean jsonEqualityIntersectionExists(Set<BType> typeSet) {
for (BType type : typeSet) {
switch (type.tag) {
case TypeTags.MAP:
if (!isAssignable(((BMapType) type).constraint, symTable.errorType)) {
return true;
}
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
if (recordType.fields.values().stream()
.allMatch(field -> Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) ||
!isAssignable(field.type, symTable.errorType))) {
return true;
}
break;
default:
if (isAssignable(type, symTable.jsonType)) {
return true;
}
}
}
return false;
}
public BType getRemainingType(BType originalType, BType typeToRemove) {
switch (originalType.tag) {
case TypeTags.UNION:
return getRemainingType((BUnionType) originalType, getAllTypes(typeToRemove));
case TypeTags.FINITE:
return getRemainingType((BFiniteType) originalType, getAllTypes(typeToRemove));
default:
return originalType;
}
}
BType getTypeIntersection(BType lhsType, BType rhsType) {
List<BType> narrowingTypes = getAllTypes(rhsType);
LinkedHashSet<BType> intersection = narrowingTypes.stream().map(type -> {
if (isAssignable(type, lhsType)) {
return type;
} else if (isAssignable(lhsType, type)) {
return lhsType;
} else if (lhsType.tag == TypeTags.FINITE) {
BType intersectionType = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) lhsType, type);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.FINITE) {
BType intersectionType = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) type, lhsType);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (lhsType.tag == TypeTags.UNION) {
BType intersectionType = getTypeForUnionTypeMembersAssignableToType((BUnionType) lhsType, type);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.UNION) {
BType intersectionType = getTypeForUnionTypeMembersAssignableToType((BUnionType) type, lhsType);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.NULL_SET) {
return type;
}
return null;
}).filter(type -> type != null).collect(Collectors.toCollection(LinkedHashSet::new));
if (intersection.isEmpty()) {
if (lhsType.tag == TypeTags.NULL_SET) {
return lhsType;
}
return symTable.semanticError;
}
if (intersection.contains(symTable.semanticError)) {
return symTable.semanticError;
} else if (intersection.size() == 1) {
return intersection.toArray(new BType[0])[0];
} else {
return BUnionType.create(null, intersection);
}
}
private BType getRemainingType(BUnionType originalType, List<BType> removeTypes) {
List<BType> remainingTypes = getAllTypes(originalType);
removeTypes.forEach(removeType -> remainingTypes.removeIf(type -> isAssignable(type, removeType)));
List<BType> finiteTypesToRemove = new ArrayList<>();
List<BType> finiteTypesToAdd = new ArrayList<>();
for (BType remainingType : remainingTypes) {
if (remainingType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) remainingType;
finiteTypesToRemove.add(finiteType);
BType remainingTypeWithMatchesRemoved = getRemainingType(finiteType, removeTypes);
if (remainingTypeWithMatchesRemoved != symTable.semanticError) {
finiteTypesToAdd.add(remainingTypeWithMatchesRemoved);
}
}
}
remainingTypes.removeAll(finiteTypesToRemove);
remainingTypes.addAll(finiteTypesToAdd);
if (remainingTypes.size() == 1) {
return remainingTypes.get(0);
}
if (remainingTypes.isEmpty()) {
return symTable.nullSet;
}
return BUnionType.create(null, new LinkedHashSet<>(remainingTypes));
}
private BType getRemainingType(BFiniteType originalType, List<BType> removeTypes) {
Set<BLangExpression> remainingValueSpace = new LinkedHashSet<>();
for (BLangExpression valueExpr : originalType.getValueSpace()) {
boolean matchExists = false;
for (BType remType : removeTypes) {
if (isAssignable(valueExpr.type, remType) ||
isAssignableToFiniteType(remType, (BLangLiteral) valueExpr)) {
matchExists = true;
break;
}
}
if (!matchExists) {
remainingValueSpace.add(valueExpr);
}
}
if (remainingValueSpace.isEmpty()) {
return symTable.semanticError;
}
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, originalType.tsymbol.flags,
names.fromString("$anonType$" + UNDERSCORE + finiteTypeCount++),
originalType.tsymbol.pkgID, null,
originalType.tsymbol.owner, originalType.tsymbol.pos,
VIRTUAL);
BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, remainingValueSpace);
finiteTypeSymbol.type = intersectingFiniteType;
return intersectingFiniteType;
}
public BType getSafeType(BType type, boolean liftNil, boolean liftError) {
switch (type.tag) {
case TypeTags.JSON:
BJSONType jsonType = (BJSONType) type;
return new BJSONType(jsonType.tag, jsonType.tsymbol, false);
case TypeTags.ANY:
return new BAnyType(type.tag, type.tsymbol, false);
case TypeTags.ANYDATA:
return new BAnydataType(type.tag, type.tsymbol, false);
case TypeTags.READONLY:
return new BReadonlyType(type.tag, type.tsymbol, false);
}
if (type.tag != TypeTags.UNION) {
return type;
}
BUnionType unionType = (BUnionType) type;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>(unionType.getMemberTypes());
BUnionType errorLiftedType = BUnionType.create(null, memTypes);
if (liftNil) {
errorLiftedType.remove(symTable.nilType);
}
if (liftError) {
errorLiftedType.remove(symTable.errorType);
}
if (errorLiftedType.getMemberTypes().size() == 1) {
return errorLiftedType.getMemberTypes().toArray(new BType[0])[0];
}
return errorLiftedType;
}
public List<BType> getAllTypes(BType type) {
if (type.tag != TypeTags.UNION) {
return Lists.of(type);
}
List<BType> memberTypes = new ArrayList<>();
((BUnionType) type).getMemberTypes().forEach(memberType -> memberTypes.addAll(getAllTypes(memberType)));
return memberTypes;
}
public boolean isAllowedConstantType(BType type) {
switch (type.tag) {
case TypeTags.BOOLEAN:
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.NIL:
return true;
case TypeTags.MAP:
return isAllowedConstantType(((BMapType) type).constraint);
case TypeTags.FINITE:
BLangExpression finiteValue = ((BFiniteType) type).getValueSpace().toArray(new BLangExpression[0])[0];
return isAllowedConstantType(finiteValue.type);
default:
return false;
}
}
public boolean isValidLiteral(BLangLiteral literal, BType targetType) {
BType literalType = literal.type;
if (literalType.tag == targetType.tag) {
return true;
}
switch (targetType.tag) {
case TypeTags.BYTE:
return literalType.tag == TypeTags.INT && isByteLiteralValue((Long) literal.value);
case TypeTags.DECIMAL:
return literalType.tag == TypeTags.FLOAT || literalType.tag == TypeTags.INT;
case TypeTags.FLOAT:
return literalType.tag == TypeTags.INT;
case TypeTags.SIGNED32_INT:
return literalType.tag == TypeTags.INT && isSigned32LiteralValue((Long) literal.value);
case TypeTags.SIGNED16_INT:
return literalType.tag == TypeTags.INT && isSigned16LiteralValue((Long) literal.value);
case TypeTags.SIGNED8_INT:
return literalType.tag == TypeTags.INT && isSigned8LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED32_INT:
return literalType.tag == TypeTags.INT && isUnsigned32LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED16_INT:
return literalType.tag == TypeTags.INT && isUnsigned16LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED8_INT:
return literalType.tag == TypeTags.INT && isUnsigned8LiteralValue((Long) literal.value);
case TypeTags.CHAR_STRING:
return literalType.tag == TypeTags.STRING && isCharLiteralValue((String) literal.value);
default:
return false;
}
}
/**
* Validate if the return type of the given function is a subtype of `error?`, containing `()`.
*
* @param function The function of which the return type should be validated
* @param diagnosticCode The code to log if the return type is invalid
*/
public void validateErrorOrNilReturn(BLangFunction function, DiagnosticCode diagnosticCode) {
BType returnType = function.returnTypeNode.type;
if (returnType.tag == TypeTags.NIL) {
return;
}
if (returnType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) returnType).getMemberTypes();
if (returnType.isNullable() &&
memberTypes.stream().allMatch(type -> type.tag == TypeTags.NIL || type.tag == TypeTags.ERROR)) {
return;
}
}
dlog.error(function.returnTypeNode.pos, diagnosticCode, function.returnTypeNode.type.toString());
}
/**
* Type vector of size two, to hold the source and the target types.
*
* @since 0.982.0
*/
private static class TypePair {
BType sourceType;
BType targetType;
public TypePair(BType sourceType, BType targetType) {
this.sourceType = sourceType;
this.targetType = targetType;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof TypePair)) {
return false;
}
TypePair other = (TypePair) obj;
return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType);
}
@Override
public int hashCode() {
return Objects.hash(sourceType, targetType);
}
}
/**
* A functional interface for parameterizing the type of type checking that needs to be done on the source and
* target types.
*
* @since 0.995.0
*/
private interface TypeEqualityPredicate {
boolean test(BType source, BType target, Set<TypePair> unresolvedTypes);
}
public boolean hasFillerValue(BType type) {
switch (type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.NIL:
case TypeTags.TABLE:
case TypeTags.ANYDATA:
case TypeTags.MAP:
case TypeTags.ANY:
return true;
case TypeTags.ARRAY:
return checkFillerValue((BArrayType) type);
case TypeTags.FINITE:
return checkFillerValue((BFiniteType) type);
case TypeTags.UNION:
return checkFillerValue((BUnionType) type);
case TypeTags.OBJECT:
return checkFillerValue((BObjectType) type);
case TypeTags.RECORD:
return checkFillerValue((BRecordType) type);
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) type;
return tupleType.getTupleTypes().stream().allMatch(eleType -> hasFillerValue(eleType));
default:
if (TypeTags.isIntegerTypeTag(type.tag)) {
return true;
}
return false;
}
}
private boolean checkFillerValue(BObjectType type) {
if ((type.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
return false;
}
BAttachedFunction initFunction = ((BObjectTypeSymbol) type.tsymbol).initializerFunc;
if (initFunction == null) {
return true;
}
if (initFunction.symbol.getReturnType().getKind() != TypeKind.NIL) {
return false;
}
for (BVarSymbol bVarSymbol : initFunction.symbol.getParameters()) {
if (!bVarSymbol.defaultableParam) {
return false;
}
}
return true;
}
/**
* This will handle two types. Singleton : As singleton can have one value that value should it self be a valid fill
* value Union : 1. if nil is a member it is the fill values 2. else all the values should belong to same type and
* the default value for that type should be a member of the union precondition : value space should have at least
* one element
*
* @param type BFiniteType union or finite
* @return boolean whether type has a valid filler value or not
*/
private boolean checkFillerValue(BFiniteType type) {
if (type.isNullable()) {
return true;
}
if (type.getValueSpace().size() == 1) {
return true;
}
Iterator iterator = type.getValueSpace().iterator();
BLangExpression firstElement = (BLangExpression) iterator.next();
boolean defaultFillValuePresent = isImplicitDefaultValue(firstElement);
while (iterator.hasNext()) {
BLangExpression value = (BLangExpression) iterator.next();
if (!isSameBasicType(value.type, firstElement.type)) {
return false;
}
if (!defaultFillValuePresent && isImplicitDefaultValue(value)) {
defaultFillValuePresent = true;
}
}
return defaultFillValuePresent;
}
private boolean hasImplicitDefaultValue(Set<BLangExpression> valueSpace) {
for (BLangExpression expression : valueSpace) {
if (isImplicitDefaultValue(expression)) {
return true;
}
}
return false;
}
private boolean checkFillerValue(BUnionType type) {
if (type.isNullable()) {
return true;
}
Set<BType> memberTypes = new HashSet<>();
boolean hasFillerValue = false;
boolean defaultValuePresent = false;
boolean finiteTypePresent = false;
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.FINITE) {
Set<BType> uniqueValues = getValueTypes(((BFiniteType) member).getValueSpace());
memberTypes.addAll(uniqueValues);
if (!defaultValuePresent && hasImplicitDefaultValue(((BFiniteType) member).getValueSpace())) {
defaultValuePresent = true;
}
finiteTypePresent = true;
} else {
memberTypes.add(member);
}
if (!hasFillerValue && hasFillerValue(member)) {
hasFillerValue = true;
}
}
if (!hasFillerValue) {
return false;
}
Iterator<BType> iterator = memberTypes.iterator();
BType firstMember = iterator.next();
while (iterator.hasNext()) {
if (!isSameBasicType(firstMember, iterator.next())) {
return false;
}
}
if (finiteTypePresent) {
return defaultValuePresent;
}
return true;
}
private boolean isSameBasicType(BType source, BType target) {
if (isSameType(source, target)) {
return true;
}
if (TypeTags.isIntegerTypeTag(source.tag) && TypeTags.isIntegerTypeTag(target.tag)) {
return true;
}
return false;
}
private Set<BType> getValueTypes(Set<BLangExpression> valueSpace) {
Set<BType> uniqueType = new HashSet<>();
for (BLangExpression expression : valueSpace) {
uniqueType.add(expression.type);
}
return uniqueType;
}
private boolean isImplicitDefaultValue(BLangExpression expression) {
if ((expression.getKind() == NodeKind.LITERAL) || (expression.getKind() == NodeKind.NUMERIC_LITERAL)) {
BLangLiteral literalExpression = (BLangLiteral) expression;
BType literalExprType = literalExpression.type;
Object value = literalExpression.getValue();
switch (literalExprType.getKind()) {
case INT:
case BYTE:
return value.equals(Long.valueOf(0));
case STRING:
return value == null || value.equals("");
case DECIMAL:
case FLOAT:
return value.equals(String.valueOf(0.0));
case BOOLEAN:
return value.equals(Boolean.valueOf(false));
case NIL:
return true;
default:
return false;
}
}
return false;
}
private boolean checkFillerValue(BRecordType type) {
for (BField field : type.fields.values()) {
if (Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) {
continue;
}
if (Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
return false;
}
}
return true;
}
private boolean checkFillerValue(BArrayType type) {
if (type.size == -1) {
return true;
}
return hasFillerValue(type.eType);
}
/**
* Get result type of the query output.
*
* @param type type of query expression.
* @return result type.
*/
public BType resolveExprType(BType type) {
switch (type.tag) {
case TypeTags.STREAM:
return ((BStreamType) type).constraint;
case TypeTags.TABLE:
return ((BTableType) type).constraint;
case TypeTags.ARRAY:
return ((BArrayType) type).eType;
case TypeTags.UNION:
List<BType> exprTypes = new ArrayList<>(((BUnionType) type).getMemberTypes());
for (BType returnType : exprTypes) {
switch (returnType.tag) {
case TypeTags.STREAM:
return ((BStreamType) returnType).constraint;
case TypeTags.TABLE:
return ((BTableType) returnType).constraint;
case TypeTags.ARRAY:
return ((BArrayType) returnType).eType;
case TypeTags.STRING:
case TypeTags.XML:
return returnType;
}
}
default:
return type;
}
}
private boolean isSimpleBasicType(int tag) {
switch (tag) {
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.NIL:
return true;
default:
return (TypeTags.isIntegerTypeTag(tag)) || (TypeTags.isStringTypeTag(tag));
}
}
/**
* Check whether a type is an ordered type.
*
* @param type type.
* @return boolean whether the type is an ordered type or not.
*/
public boolean isOrderedType(BType type) {
switch (type.tag) {
case TypeTags.UNION:
Set<BType> memberTypes = ((BUnionType) type).getMemberTypes();
for (BType memType : memberTypes) {
if (!isOrderedType(memType)) {
return false;
}
}
return memberTypes.size() <= 2 && memberTypes.contains(symTable.nilType);
case TypeTags.ARRAY:
BType elementType = ((BArrayType) type).eType;
return isOrderedType(elementType);
default:
return isSimpleBasicType(type.tag);
}
}
public boolean isUnionOfSimpleBasicTypes(BType type) {
if (type.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) type).getMemberTypes();
for (BType memType : memberTypes) {
if (!isSimpleBasicType(memType.tag)) {
return false;
}
}
return true;
}
return isSimpleBasicType(type.tag);
}
public boolean isSubTypeOfReadOnlyOrIsolatedObjectUnion(BType type) {
if (isInherentlyImmutableType(type) || Symbols.isFlagOn(type.flags, Flags.READONLY)) {
return true;
}
int tag = type.tag;
if (tag == TypeTags.OBJECT) {
return isIsolated(type);
}
if (tag != TypeTags.UNION) {
return false;
}
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isSubTypeOfReadOnlyOrIsolatedObjectUnion(memberType)) {
return false;
}
}
return true;
}
private boolean isIsolated(BType type) {
return Symbols.isFlagOn(type.flags, Flags.ISOLATED);
}
private static class ListenerValidationModel {
private final Types types;
private final SymbolTable symtable;
private final BType serviceNameType;
boolean attachFound;
boolean detachFound;
boolean startFound;
boolean gracefulStopFound;
boolean immediateStopFound;
public ListenerValidationModel(Types types, SymbolTable symTable) {
this.types = types;
this.symtable = symTable;
this.serviceNameType =
BUnionType.create(null, symtable.stringType, symtable.arrayStringType, symtable.nilType);
}
boolean isValidListener() {
return attachFound && detachFound && startFound && gracefulStopFound && immediateStopFound;
}
private boolean checkMethods(List<BAttachedFunction> rhsFuncs) {
for (BAttachedFunction func : rhsFuncs) {
switch (func.funcName.value) {
case "attach":
if (!checkAttachMethod(func)) {
return false;
}
break;
case "detach":
if (!checkDetachMethod(func)) {
return false;
}
break;
case "start":
if (!checkStartMethod(func)) {
return true;
}
break;
case "gracefulStop":
if (!checkGracefulStop(func)) {
return false;
}
break;
case "immediateStop":
if (!checkImmediateStop(func)) {
return false;
}
break;
}
}
return isValidListener();
}
private boolean emptyParamList(BAttachedFunction func) {
return func.type.paramTypes.isEmpty() && func.type.restType != symtable.noType;
}
private boolean publicAndReturnsErrorOrNil(BAttachedFunction func) {
if (!Symbols.isPublic(func.symbol)) {
return false;
}
if (!types.isAssignable(func.type.retType, symtable.errorOrNilType)) {
return false;
}
return true;
}
private boolean isPublicNoParamReturnsErrorOrNil(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (!emptyParamList(func)) {
return false;
}
return true;
}
private boolean checkImmediateStop(BAttachedFunction func) {
return immediateStopFound = isPublicNoParamReturnsErrorOrNil(func);
}
private boolean checkGracefulStop(BAttachedFunction func) {
return gracefulStopFound = isPublicNoParamReturnsErrorOrNil(func);
}
private boolean checkStartMethod(BAttachedFunction func) {
return startFound = publicAndReturnsErrorOrNil(func);
}
private boolean checkDetachMethod(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (func.type.paramTypes.size() != 1) {
return false;
}
BType firstParamType = func.type.paramTypes.get(0);
boolean isMatchingSignature = firstParamType.tag == TypeTags.OBJECT
&& Symbols.isService(firstParamType.tsymbol);
return detachFound = isMatchingSignature;
}
private boolean checkAttachMethod(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (func.type.paramTypes.size() != 2) {
return false;
}
BType firstParamType = func.type.paramTypes.get(0);
if (firstParamType.tag != TypeTags.OBJECT) {
return false;
}
if (!Symbols.isService(firstParamType.tsymbol)) {
return false;
}
BType secondParamType = func.type.paramTypes.get(1);
boolean sameType = types.isAssignable(secondParamType, this.serviceNameType);
return attachFound = sameType;
}
private boolean isServiceObject(BType type) {
if (type.tag != TypeTags.OBJECT) {
return false;
}
return Symbols.isService(type.tsymbol);
}
}
} |
Add a method, e.g. getJobID to reuse these codes. | public void testStopJob() throws Exception {
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.isSync = false;
String sessionId = mockExecutor.openSession("test-session");
OutputStream outputStream = new ByteArrayOutputStream(256);
try (CliClient client =
new CliClient(
() -> TerminalUtils.createDumbTerminal(outputStream),
sessionId,
mockExecutor,
historyTempFile(),
null)) {
client.executeInNonInteractiveMode(INSERT_INTO_STATEMENT);
String dmlResult = outputStream.toString();
Pattern pattern = Pattern.compile("[\\s\\S]*Job ID: (.*)[\\s\\S]*");
Matcher matcher = pattern.matcher(dmlResult);
assertThat(matcher.matches()).isTrue();
String jobId = matcher.group(1);
client.executeInNonInteractiveMode("STOP JOB '" + jobId + "'");
String stopResult = outputStream.toString();
assertThat(stopResult).contains(CliStrings.MESSAGE_STOP_JOB_STATEMENT);
}
} | String jobId = matcher.group(1); | public void testStopJob() throws Exception {
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.isSync = false;
String sessionId = mockExecutor.openSession("test-session");
OutputStream outputStream = new ByteArrayOutputStream(256);
try (CliClient client =
new CliClient(
() -> TerminalUtils.createDumbTerminal(outputStream),
sessionId,
mockExecutor,
historyTempFile(),
null)) {
client.executeInNonInteractiveMode(INSERT_INTO_STATEMENT);
String dmlResult = outputStream.toString();
String jobId = extractJobId(dmlResult);
client.executeInNonInteractiveMode("STOP JOB '" + jobId + "'");
String stopResult = outputStream.toString();
assertThat(stopResult).contains(CliStrings.MESSAGE_STOP_JOB_STATEMENT);
}
} | class CliClientTest extends TestLogger {
private static final String INSERT_INTO_STATEMENT =
"INSERT INTO MyTable SELECT * FROM MyOtherTable";
private static final String INSERT_OVERWRITE_STATEMENT =
"INSERT OVERWRITE MyTable SELECT * FROM MyOtherTable";
@Test
public void testUpdateSubmission() throws Exception {
verifyUpdateSubmission(INSERT_INTO_STATEMENT, false, false);
verifyUpdateSubmission(INSERT_OVERWRITE_STATEMENT, false, false);
}
@Test
public void testFailedUpdateSubmission() throws Exception {
verifyUpdateSubmission(INSERT_INTO_STATEMENT, true, true);
verifyUpdateSubmission(INSERT_OVERWRITE_STATEMENT, true, true);
}
@Test
public void testExecuteSqlFile() throws Exception {
MockExecutor executor = new MockExecutor();
executeSqlFromContent(
executor,
String.join(
";\n",
Arrays.asList(
INSERT_INTO_STATEMENT, "", INSERT_OVERWRITE_STATEMENT, "\n")));
assertThat(executor.receivedStatement).contains(INSERT_OVERWRITE_STATEMENT);
}
@Test
public void testSqlCompletion() throws IOException {
verifySqlCompletion("", 0, Arrays.asList("CLEAR", "HELP", "EXIT", "QUIT", "RESET", "SET"));
verifySqlCompletion("SELE", 4, Collections.emptyList());
verifySqlCompletion("QU", 2, Collections.singletonList("QUIT"));
verifySqlCompletion("qu", 2, Collections.singletonList("QUIT"));
verifySqlCompletion(" qu", 2, Collections.singletonList("QUIT"));
verifySqlCompletion("set ", 3, Collections.emptyList());
verifySqlCompletion("show t ", 6, Collections.emptyList());
verifySqlCompletion("show ", 4, Collections.emptyList());
verifySqlCompletion("show modules", 12, Collections.emptyList());
}
@Test
public void testHistoryFile() throws Exception {
final MockExecutor mockExecutor = new MockExecutor();
String sessionId = mockExecutor.openSession("test-session");
InputStream inputStream = new ByteArrayInputStream("help;\nuse catalog cat;\n".getBytes());
Path historyFilePath = historyTempFile();
try (Terminal terminal =
new DumbTerminal(inputStream, new TerminalUtils.MockOutputStream());
CliClient client =
new CliClient(
() -> terminal, sessionId, mockExecutor, historyFilePath, null)) {
client.executeInInteractiveMode();
List<String> content = Files.readAllLines(historyFilePath);
assertThat(content.size()).isEqualTo(2);
assertThat(content.get(0)).contains("help");
assertThat(content.get(1)).contains("use catalog cat");
}
}
@Test
public void testGetEOFinNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList("DESC MyOtherTable;", "SHOW TABLES");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
executeSqlFromContent(mockExecutor, content);
assertThat(mockExecutor.receivedStatement).contains(statements.get(1));
}
@Test
public void testUnknownStatementInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"ERT INTO MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
executeSqlFromContent(mockExecutor, content);
assertThat(statements.get(0)).isEqualTo(mockExecutor.receivedStatement);
}
@Test
public void testFailedExecutionInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"INSERT INTO MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.failExecution = true;
executeSqlFromContent(mockExecutor, content);
assertThat(statements.get(0)).isEqualTo(mockExecutor.receivedStatement);
}
@Test
public void testIllegalResultModeInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"SELECT * FROM MyOtherTable;",
"HELP;",
"INSERT INTO MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
String output = executeSqlFromContent(mockExecutor, content);
assertThat(output)
.contains(
"In non-interactive mode, it only supports to use TABLEAU as value of "
+ "sql-client.execution.result-mode when execute query. Please add "
+ "'SET sql-client.execution.result-mode=TABLEAU;' in the sql file.");
}
@Test
public void testIllegalStatementInInitFile() throws Exception {
final List<String> statements =
Arrays.asList(
"CREATE TABLE source (a int, b string) with ( 'connector' = 'values');",
"INSERT INTO MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
String sessionId = mockExecutor.openSession("test-session");
CliClient cliClient =
new CliClient(DEFAULT_TERMINAL_FACTORY, sessionId, mockExecutor, historyTempFile());
assertThat(cliClient.executeInitialization(content)).isFalse();
}
@Test(timeout = 10000)
public void testCancelExecutionInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"HELP;",
"CREATE TABLE tbl( -- comment\n"
+ "-- comment with ;\n"
+ "id INT,\n"
+ "name STRING\n"
+ ") WITH (\n"
+ " 'connector' = 'values'\n"
+ ");\n",
"INSERT INTO \n" + "MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;",
"QUIT;\n");
final int hookIndex = 2;
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.isSync = true;
String sessionId = mockExecutor.openSession("test-session");
Path historyFilePath = historyTempFile();
OutputStream outputStream = new ByteArrayOutputStream(256);
try (CliClient client =
new CliClient(
() -> TerminalUtils.createDumbTerminal(outputStream),
sessionId,
mockExecutor,
historyFilePath,
null)) {
Thread thread = new Thread(() -> client.executeInNonInteractiveMode(content));
thread.start();
while (!mockExecutor.isAwait) {
Thread.sleep(10);
}
thread.interrupt();
while (thread.isAlive()) {
Thread.sleep(10);
}
assertThat(outputStream.toString())
.contains("java.lang.InterruptedException: sleep interrupted");
}
assertThat(statements.get(hookIndex)).isEqualTo(mockExecutor.receivedStatement);
}
@Test
public void testCancelExecutionInteractiveMode() throws Exception {
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.isSync = true;
String sessionId = mockExecutor.openSession("test-session");
Path historyFilePath = historyTempFile();
InputStream inputStream =
new ByteArrayInputStream("SET 'key'='value';\nSELECT 1;\nSET;\n ".getBytes());
OutputStream outputStream = new ByteArrayOutputStream(248);
try (CliClient client =
new CliClient(
() -> TerminalUtils.createDumbTerminal(inputStream, outputStream),
sessionId,
mockExecutor,
historyFilePath,
null)) {
Thread thread =
new Thread(
() -> {
try {
client.executeInInteractiveMode();
} catch (Exception ignore) {
}
});
thread.start();
while (!mockExecutor.isAwait) {
Thread.sleep(10);
}
client.getTerminal().raise(Terminal.Signal.INT);
CommonTestUtils.waitUntilCondition(
() -> outputStream.toString().contains("'key' = 'value'"));
}
}
@Test(timeout = 10000)
@Test(timeout = 10000)
public void testStopJobWithSavepoint() throws Exception {
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.isSync = false;
final String mockSavepoint = "/my/savepoint/path";
mockExecutor.savepoint = mockSavepoint;
String sessionId = mockExecutor.openSession("test-session");
OutputStream outputStream = new ByteArrayOutputStream(256);
try (CliClient client =
new CliClient(
() -> TerminalUtils.createDumbTerminal(outputStream),
sessionId,
mockExecutor,
historyTempFile(),
null)) {
client.executeInNonInteractiveMode(INSERT_INTO_STATEMENT);
String dmlResult = outputStream.toString();
Pattern pattern = Pattern.compile("[\\s\\S]*Job ID: (.*)[\\s\\S]*");
Matcher matcher = pattern.matcher(dmlResult);
assertThat(matcher.matches()).isTrue();
String jobId = matcher.group(1);
client.executeInNonInteractiveMode("STOP JOB '" + jobId + "' WITH SAVEPOINT");
String stopResult = outputStream.toString();
assertThat(stopResult)
.contains(
String.format(
CliStrings.MESSAGE_STOP_JOB_WITH_SAVEPOINT_STATEMENT,
mockSavepoint));
}
}
private void verifyUpdateSubmission(
String statement, boolean failExecution, boolean testFailure) throws Exception {
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.failExecution = failExecution;
String result = executeSqlFromContent(mockExecutor, statement);
if (testFailure) {
assertThat(result).contains(MESSAGE_SQL_EXECUTION_ERROR);
} else {
assertThat(result).doesNotContain(MESSAGE_SQL_EXECUTION_ERROR);
assertThat(SqlMultiLineParser.formatSqlFile(statement))
.isEqualTo(SqlMultiLineParser.formatSqlFile(mockExecutor.receivedStatement));
}
}
private void verifySqlCompletion(String statement, int position, List<String> expectedHints)
throws IOException {
final MockExecutor mockExecutor = new MockExecutor();
String sessionId = mockExecutor.openSession("test-session");
final SqlCompleter completer = new SqlCompleter(sessionId, mockExecutor);
final SqlMultiLineParser parser =
new SqlMultiLineParser(new SqlCommandParserImpl(mockExecutor, sessionId));
try (Terminal terminal = TerminalUtils.createDumbTerminal()) {
final LineReader reader = LineReaderBuilder.builder().terminal(terminal).build();
final ParsedLine parsedLine =
parser.parse(statement, position, Parser.ParseContext.COMPLETE);
final List<Candidate> candidates = new ArrayList<>();
final List<String> results = new ArrayList<>();
completer.complete(reader, parsedLine, candidates);
candidates.forEach(item -> results.add(item.value()));
assertThat(results.containsAll(expectedHints)).isTrue();
assertThat(statement).isEqualTo(mockExecutor.receivedStatement);
assertThat(position).isEqualTo(mockExecutor.receivedPosition);
}
}
private Path historyTempFile() throws IOException {
return File.createTempFile("history", "tmp").toPath();
}
private String executeSqlFromContent(MockExecutor executor, String content) throws IOException {
String sessionId = executor.openSession("test-session");
OutputStream outputStream = new ByteArrayOutputStream(256);
try (CliClient client =
new CliClient(
() -> TerminalUtils.createDumbTerminal(outputStream),
sessionId,
executor,
historyTempFile(),
null)) {
client.executeInNonInteractiveMode(content);
}
return outputStream.toString();
}
private static class MockExecutor implements Executor {
public boolean failExecution;
public String savepoint;
public volatile boolean isSync = false;
public volatile boolean isAwait = false;
public String receivedStatement;
public int receivedPosition;
private final Map<String, SessionContext> sessionMap = new HashMap<>();
private final SqlParserHelper helper = new SqlParserHelper();
@Override
public void start() throws SqlExecutionException {}
@Override
public String openSession(@Nullable String sessionId) throws SqlExecutionException {
Configuration configuration = new Configuration();
configuration.set(TABLE_DML_SYNC, isSync);
DefaultContext defaultContext =
new DefaultContext(
Collections.emptyList(),
configuration,
Collections.singletonList(new DefaultCLI()));
SessionContext context = SessionContext.create(defaultContext, sessionId);
sessionMap.put(sessionId, context);
helper.registerTables();
return sessionId;
}
@Override
public void closeSession(String sessionId) throws SqlExecutionException {}
@Override
public Map<String, String> getSessionConfigMap(String sessionId)
throws SqlExecutionException {
return this.sessionMap.get(sessionId).getConfigMap();
}
@Override
public ReadableConfig getSessionConfig(String sessionId) throws SqlExecutionException {
SessionContext context = this.sessionMap.get(sessionId);
return context.getReadableConfig();
}
@Override
public void resetSessionProperties(String sessionId) throws SqlExecutionException {}
@Override
public void resetSessionProperty(String sessionId, String key)
throws SqlExecutionException {}
@Override
public void setSessionProperty(String sessionId, String key, String value)
throws SqlExecutionException {
SessionContext context = this.sessionMap.get(sessionId);
context.set(key, value);
}
@Override
public TableResultInternal executeOperation(String sessionId, Operation operation)
throws SqlExecutionException {
if (failExecution) {
throw new SqlExecutionException("Fail execution.");
}
if (operation instanceof ModifyOperation) {
if (isSync) {
isAwait = true;
try {
Thread.sleep(60_000L);
} catch (InterruptedException e) {
throw new SqlExecutionException("Fail to execute", e);
}
}
return new TestTableResult(
new TestingJobClient(),
ResultKind.SUCCESS_WITH_CONTENT,
ResolvedSchema.of(Column.physical("result", DataTypes.BIGINT())),
CloseableIterator.adapterForIterator(
Collections.singletonList(Row.of(-1L)).iterator()));
}
return TestTableResult.TABLE_RESULT_OK;
}
@Override
public TableResultInternal executeModifyOperations(
String sessionId, List<ModifyOperation> operations) throws SqlExecutionException {
if (failExecution) {
throw new SqlExecutionException("Fail execution.");
}
if (isSync) {
isAwait = true;
try {
Thread.sleep(60_000L);
} catch (InterruptedException e) {
throw new SqlExecutionException("Fail to execute", e);
}
}
return new TestTableResult(
new TestingJobClient(),
ResultKind.SUCCESS_WITH_CONTENT,
ResolvedSchema.of(Column.physical("result", DataTypes.BIGINT())),
CloseableIterator.adapterForIterator(
Collections.singletonList(Row.of(-1L)).iterator()));
}
@Override
public Operation parseStatement(String sessionId, String statement)
throws SqlExecutionException {
receivedStatement = statement;
try {
return helper.getSqlParser().parse(statement).get(0);
} catch (Exception ex) {
throw new SqlExecutionException("Parse error: " + statement, ex);
}
}
@Override
public List<String> completeStatement(String sessionId, String statement, int position) {
receivedStatement = statement;
receivedPosition = position;
return Arrays.asList(helper.getSqlParser().getCompletionHints(statement, position));
}
@Override
public ResultDescriptor executeQuery(String sessionId, QueryOperation query)
throws SqlExecutionException {
if (isSync) {
isAwait = true;
try {
Thread.sleep(60_000L);
} catch (InterruptedException e) {
throw new SqlExecutionException("Fail to execute", e);
}
}
return null;
}
@Override
public TypedResult<List<RowData>> retrieveResultChanges(String sessionId, String resultId)
throws SqlExecutionException {
return null;
}
@Override
public TypedResult<Integer> snapshotResult(String sessionId, String resultId, int pageSize)
throws SqlExecutionException {
return null;
}
@Override
public List<RowData> retrieveResultPage(String resultId, int page)
throws SqlExecutionException {
return null;
}
@Override
public void cancelQuery(String sessionId, String resultId) throws SqlExecutionException {
}
@Override
public void addJar(String sessionId, String jarUrl) {
throw new UnsupportedOperationException("Not implemented.");
}
@Override
public void removeJar(String sessionId, String jarUrl) {
throw new UnsupportedOperationException("Not implemented.");
}
@Override
public List<String> listJars(String sessionId) {
throw new UnsupportedOperationException("Not implemented.");
}
@Override
public Optional<String> stopJob(
String sessionId, String jobId, boolean isWithSavepoint, boolean isWithDrain)
throws SqlExecutionException {
if (isWithSavepoint) {
return Optional.of(savepoint);
} else {
return Optional.empty();
}
}
}
} | class CliClientTest extends TestLogger {
private static final String INSERT_INTO_STATEMENT =
"INSERT INTO MyTable SELECT * FROM MyOtherTable";
private static final String INSERT_OVERWRITE_STATEMENT =
"INSERT OVERWRITE MyTable SELECT * FROM MyOtherTable";
private static final String ORIGIN_HIVE_SQL = "SELECT pos\t FROM source_table;\n";
private static final String HIVE_SQL_WITHOUT_COMPLETER = "SELECT pos FROM source_table;";
private static final String HIVE_SQL_WITH_COMPLETER = "SELECT POSITION FROM source_table;";
@Test
public void testUpdateSubmission() throws Exception {
verifyUpdateSubmission(INSERT_INTO_STATEMENT, false, false);
verifyUpdateSubmission(INSERT_OVERWRITE_STATEMENT, false, false);
}
@Test
public void testFailedUpdateSubmission() throws Exception {
verifyUpdateSubmission(INSERT_INTO_STATEMENT, true, true);
verifyUpdateSubmission(INSERT_OVERWRITE_STATEMENT, true, true);
}
@Test
public void testExecuteSqlFile() throws Exception {
MockExecutor executor = new MockExecutor();
executeSqlFromContent(
executor,
String.join(
";\n",
Arrays.asList(
INSERT_INTO_STATEMENT, "", INSERT_OVERWRITE_STATEMENT, "\n")));
assertThat(executor.receivedStatement).contains(INSERT_OVERWRITE_STATEMENT);
}
@Test
public void testExecuteSqlFileWithoutSqlCompleter() throws Exception {
MockExecutor executor = new MockExecutor(new SqlParserHelper(SqlDialect.HIVE));
executeSqlFromContent(executor, ORIGIN_HIVE_SQL);
assertThat(executor.receivedStatement).contains(HIVE_SQL_WITHOUT_COMPLETER);
}
@Test
public void testExecuteSqlInteractiveWithSqlCompleter() throws Exception {
final MockExecutor mockExecutor = new MockExecutor(new SqlParserHelper(SqlDialect.HIVE));
String sessionId = mockExecutor.openSession("test-session");
InputStream inputStream = new ByteArrayInputStream(ORIGIN_HIVE_SQL.getBytes());
OutputStream outputStream = new ByteArrayOutputStream(256);
try (Terminal terminal = new DumbTerminal(inputStream, outputStream);
CliClient client =
new CliClient(
() -> terminal, sessionId, mockExecutor, historyTempFile(), null)) {
client.executeInInteractiveMode();
assertThat(mockExecutor.receivedStatement).contains(HIVE_SQL_WITH_COMPLETER);
}
}
@Test
public void testSqlCompletion() throws IOException {
verifySqlCompletion("", 0, Arrays.asList("CLEAR", "HELP", "EXIT", "QUIT", "RESET", "SET"));
verifySqlCompletion("SELE", 4, Collections.emptyList());
verifySqlCompletion("QU", 2, Collections.singletonList("QUIT"));
verifySqlCompletion("qu", 2, Collections.singletonList("QUIT"));
verifySqlCompletion(" qu", 2, Collections.singletonList("QUIT"));
verifySqlCompletion("set ", 3, Collections.emptyList());
verifySqlCompletion("show t ", 6, Collections.emptyList());
verifySqlCompletion("show ", 4, Collections.emptyList());
verifySqlCompletion("show modules", 12, Collections.emptyList());
}
@Test
public void testHistoryFile() throws Exception {
final MockExecutor mockExecutor = new MockExecutor();
String sessionId = mockExecutor.openSession("test-session");
InputStream inputStream = new ByteArrayInputStream("help;\nuse catalog cat;\n".getBytes());
Path historyFilePath = historyTempFile();
try (Terminal terminal =
new DumbTerminal(inputStream, new TerminalUtils.MockOutputStream());
CliClient client =
new CliClient(
() -> terminal, sessionId, mockExecutor, historyFilePath, null)) {
client.executeInInteractiveMode();
List<String> content = Files.readAllLines(historyFilePath);
assertThat(content.size()).isEqualTo(2);
assertThat(content.get(0)).contains("help");
assertThat(content.get(1)).contains("use catalog cat");
}
}
@Test
public void testGetEOFinNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList("DESC MyOtherTable;", "SHOW TABLES");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
executeSqlFromContent(mockExecutor, content);
assertThat(mockExecutor.receivedStatement).contains(statements.get(1));
}
@Test
public void testUnknownStatementInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"ERT INTO MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
executeSqlFromContent(mockExecutor, content);
assertThat(statements.get(0)).isEqualTo(mockExecutor.receivedStatement);
}
@Test
public void testFailedExecutionInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"INSERT INTO MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.failExecution = true;
executeSqlFromContent(mockExecutor, content);
assertThat(statements.get(0)).isEqualTo(mockExecutor.receivedStatement);
}
@Test
public void testIllegalResultModeInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"SELECT * FROM MyOtherTable;",
"HELP;",
"INSERT INTO MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
String output = executeSqlFromContent(mockExecutor, content);
assertThat(output)
.contains(
"In non-interactive mode, it only supports to use TABLEAU as value of "
+ "sql-client.execution.result-mode when execute query. Please add "
+ "'SET sql-client.execution.result-mode=TABLEAU;' in the sql file.");
}
@Test
public void testIllegalStatementInInitFile() throws Exception {
final List<String> statements =
Arrays.asList(
"CREATE TABLE source (a int, b string) with ( 'connector' = 'values');",
"INSERT INTO MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;");
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
String sessionId = mockExecutor.openSession("test-session");
CliClient cliClient =
new CliClient(DEFAULT_TERMINAL_FACTORY, sessionId, mockExecutor, historyTempFile());
assertThat(cliClient.executeInitialization(content)).isFalse();
}
@Test(timeout = 10000)
public void testCancelExecutionInNonInteractiveMode() throws Exception {
final List<String> statements =
Arrays.asList(
"HELP;",
"CREATE TABLE tbl( -- comment\n"
+ "-- comment with ;\n"
+ "id INT,\n"
+ "name STRING\n"
+ ") WITH (\n"
+ " 'connector' = 'values'\n"
+ ");\n",
"INSERT INTO \n" + "MyOtherTable VALUES (1, 101), (2, 102);",
"DESC MyOtherTable;",
"SHOW TABLES;",
"QUIT;\n");
final int hookIndex = 2;
String content = String.join("\n", statements);
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.isSync = true;
String sessionId = mockExecutor.openSession("test-session");
Path historyFilePath = historyTempFile();
OutputStream outputStream = new ByteArrayOutputStream(256);
try (CliClient client =
new CliClient(
() -> TerminalUtils.createDumbTerminal(outputStream),
sessionId,
mockExecutor,
historyFilePath,
null)) {
Thread thread = new Thread(() -> client.executeInNonInteractiveMode(content));
thread.start();
while (!mockExecutor.isAwait) {
Thread.sleep(10);
}
thread.interrupt();
while (thread.isAlive()) {
Thread.sleep(10);
}
assertThat(outputStream.toString())
.contains("java.lang.InterruptedException: sleep interrupted");
}
assertThat(statements.get(hookIndex)).isEqualTo(mockExecutor.receivedStatement);
}
@Test
public void testCancelExecutionInteractiveMode() throws Exception {
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.isSync = true;
String sessionId = mockExecutor.openSession("test-session");
Path historyFilePath = historyTempFile();
InputStream inputStream =
new ByteArrayInputStream("SET 'key'='value';\nSELECT 1;\nSET;\n ".getBytes());
OutputStream outputStream = new ByteArrayOutputStream(248);
try (CliClient client =
new CliClient(
() -> TerminalUtils.createDumbTerminal(inputStream, outputStream),
sessionId,
mockExecutor,
historyFilePath,
null)) {
Thread thread =
new Thread(
() -> {
try {
client.executeInInteractiveMode();
} catch (Exception ignore) {
}
});
thread.start();
while (!mockExecutor.isAwait) {
Thread.sleep(10);
}
client.getTerminal().raise(Terminal.Signal.INT);
CommonTestUtils.waitUntilCondition(
() -> outputStream.toString().contains("'key' = 'value'"));
}
}
@Test(timeout = 10000)
@Test(timeout = 10000)
public void testStopJobWithSavepoint() throws Exception {
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.isSync = false;
final String mockSavepoint = "/my/savepoint/path";
mockExecutor.savepoint = mockSavepoint;
String sessionId = mockExecutor.openSession("test-session");
OutputStream outputStream = new ByteArrayOutputStream(256);
try (CliClient client =
new CliClient(
() -> TerminalUtils.createDumbTerminal(outputStream),
sessionId,
mockExecutor,
historyTempFile(),
null)) {
client.executeInNonInteractiveMode(INSERT_INTO_STATEMENT);
String dmlResult = outputStream.toString();
String jobId = extractJobId(dmlResult);
client.executeInNonInteractiveMode("STOP JOB '" + jobId + "' WITH SAVEPOINT");
String stopResult = outputStream.toString();
assertThat(stopResult)
.contains(
String.format(
CliStrings.MESSAGE_STOP_JOB_WITH_SAVEPOINT_STATEMENT,
mockSavepoint));
}
}
private void verifyUpdateSubmission(
String statement, boolean failExecution, boolean testFailure) throws Exception {
final MockExecutor mockExecutor = new MockExecutor();
mockExecutor.failExecution = failExecution;
String result = executeSqlFromContent(mockExecutor, statement);
if (testFailure) {
assertThat(result).contains(MESSAGE_SQL_EXECUTION_ERROR);
} else {
assertThat(result).doesNotContain(MESSAGE_SQL_EXECUTION_ERROR);
assertThat(SqlMultiLineParser.formatSqlFile(statement))
.isEqualTo(SqlMultiLineParser.formatSqlFile(mockExecutor.receivedStatement));
}
}
private void verifySqlCompletion(String statement, int position, List<String> expectedHints)
throws IOException {
final MockExecutor mockExecutor = new MockExecutor();
String sessionId = mockExecutor.openSession("test-session");
final SqlCompleter completer = new SqlCompleter(sessionId, mockExecutor);
final SqlMultiLineParser parser =
new SqlMultiLineParser(new SqlCommandParserImpl(mockExecutor, sessionId));
try (Terminal terminal = TerminalUtils.createDumbTerminal()) {
final LineReader reader = LineReaderBuilder.builder().terminal(terminal).build();
final ParsedLine parsedLine =
parser.parse(statement, position, Parser.ParseContext.COMPLETE);
final List<Candidate> candidates = new ArrayList<>();
final List<String> results = new ArrayList<>();
completer.complete(reader, parsedLine, candidates);
candidates.forEach(item -> results.add(item.value()));
assertThat(results.containsAll(expectedHints)).isTrue();
assertThat(statement).isEqualTo(mockExecutor.receivedStatement);
assertThat(position).isEqualTo(mockExecutor.receivedPosition);
}
}
private Path historyTempFile() throws IOException {
return File.createTempFile("history", "tmp").toPath();
}
private String executeSqlFromContent(MockExecutor executor, String content) throws IOException {
String sessionId = executor.openSession("test-session");
OutputStream outputStream = new ByteArrayOutputStream(256);
try (CliClient client =
new CliClient(
() -> TerminalUtils.createDumbTerminal(outputStream),
sessionId,
executor,
historyTempFile(),
null)) {
client.executeInNonInteractiveMode(content);
}
return outputStream.toString();
}
private String extractJobId(String result) {
Pattern pattern = Pattern.compile("[\\s\\S]*Job ID: (.*)[\\s\\S]*");
Matcher matcher = pattern.matcher(result);
if (!matcher.matches()) {
throw new IllegalStateException("No job ID found in string: " + result);
}
return matcher.group(1);
}
private static class MockExecutor implements Executor {
public boolean failExecution;
public String savepoint;
public volatile boolean isSync = false;
public volatile boolean isAwait = false;
public String receivedStatement;
public int receivedPosition;
private final Map<String, SessionContext> sessionMap = new HashMap<>();
private final SqlParserHelper helper;
public MockExecutor() {
this.helper = new SqlParserHelper();
}
public MockExecutor(SqlParserHelper helper) {
this.helper = helper;
}
@Override
public void start() throws SqlExecutionException {}
@Override
public String openSession(@Nullable String sessionId) throws SqlExecutionException {
Configuration configuration = new Configuration();
configuration.set(TABLE_DML_SYNC, isSync);
DefaultContext defaultContext =
new DefaultContext(
Collections.emptyList(),
configuration,
Collections.singletonList(new DefaultCLI()));
SessionContext context = SessionContext.create(defaultContext, sessionId);
sessionMap.put(sessionId, context);
helper.registerTables();
return sessionId;
}
@Override
public void closeSession(String sessionId) throws SqlExecutionException {}
@Override
public Map<String, String> getSessionConfigMap(String sessionId)
throws SqlExecutionException {
return this.sessionMap.get(sessionId).getConfigMap();
}
@Override
public ReadableConfig getSessionConfig(String sessionId) throws SqlExecutionException {
SessionContext context = this.sessionMap.get(sessionId);
return context.getReadableConfig();
}
@Override
public void resetSessionProperties(String sessionId) throws SqlExecutionException {}
@Override
public void resetSessionProperty(String sessionId, String key)
throws SqlExecutionException {}
@Override
public void setSessionProperty(String sessionId, String key, String value)
throws SqlExecutionException {
SessionContext context = this.sessionMap.get(sessionId);
context.set(key, value);
}
@Override
public TableResultInternal executeOperation(String sessionId, Operation operation)
throws SqlExecutionException {
if (failExecution) {
throw new SqlExecutionException("Fail execution.");
}
if (operation instanceof ModifyOperation) {
if (isSync) {
isAwait = true;
try {
Thread.sleep(60_000L);
} catch (InterruptedException e) {
throw new SqlExecutionException("Fail to execute", e);
}
}
return new TestTableResult(
new TestingJobClient(),
ResultKind.SUCCESS_WITH_CONTENT,
ResolvedSchema.of(Column.physical("result", DataTypes.BIGINT())),
CloseableIterator.adapterForIterator(
Collections.singletonList(Row.of(-1L)).iterator()));
}
return TestTableResult.TABLE_RESULT_OK;
}
@Override
public TableResultInternal executeModifyOperations(
String sessionId, List<ModifyOperation> operations) throws SqlExecutionException {
if (failExecution) {
throw new SqlExecutionException("Fail execution.");
}
if (isSync) {
isAwait = true;
try {
Thread.sleep(60_000L);
} catch (InterruptedException e) {
throw new SqlExecutionException("Fail to execute", e);
}
}
return new TestTableResult(
new TestingJobClient(),
ResultKind.SUCCESS_WITH_CONTENT,
ResolvedSchema.of(Column.physical("result", DataTypes.BIGINT())),
CloseableIterator.adapterForIterator(
Collections.singletonList(Row.of(-1L)).iterator()));
}
@Override
public Operation parseStatement(String sessionId, String statement)
throws SqlExecutionException {
receivedStatement = statement;
try {
return helper.getSqlParser().parse(statement).get(0);
} catch (Exception ex) {
throw new SqlExecutionException("Parse error: " + statement, ex);
}
}
@Override
public List<String> completeStatement(String sessionId, String statement, int position) {
receivedStatement = statement;
receivedPosition = position;
return Arrays.asList(helper.getSqlParser().getCompletionHints(statement, position));
}
@Override
public ResultDescriptor executeQuery(String sessionId, QueryOperation query)
throws SqlExecutionException {
if (isSync) {
isAwait = true;
try {
Thread.sleep(60_000L);
} catch (InterruptedException e) {
throw new SqlExecutionException("Fail to execute", e);
}
}
return null;
}
@Override
public TypedResult<List<RowData>> retrieveResultChanges(String sessionId, String resultId)
throws SqlExecutionException {
return null;
}
@Override
public TypedResult<Integer> snapshotResult(String sessionId, String resultId, int pageSize)
throws SqlExecutionException {
return null;
}
@Override
public List<RowData> retrieveResultPage(String resultId, int page)
throws SqlExecutionException {
return null;
}
@Override
public void cancelQuery(String sessionId, String resultId) throws SqlExecutionException {
}
@Override
public void removeJar(String sessionId, String jarUrl) {
throw new UnsupportedOperationException("Not implemented.");
}
@Override
public Optional<String> stopJob(
String sessionId, String jobId, boolean isWithSavepoint, boolean isWithDrain)
throws SqlExecutionException {
if (isWithSavepoint) {
return Optional.of(savepoint);
} else {
return Optional.empty();
}
}
}
} |
Ah, I missed it! Will fix now | public Charset convert(String value) {
try {
return Charset.forName(value);
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
} | throw new IllegalArgumentException(e); | public Charset convert(String value) {
try {
return Charset.forName(value);
} catch (Exception e) {
throw new IllegalArgumentException("Unable to create Charset from: '" + value + "'", e);
}
} | class CharsetConverter implements Converter<Charset>, Serializable {
private static final long serialVersionUID = 2320905063828247874L;
@Override
} | class CharsetConverter implements Converter<Charset>, Serializable {
private static final long serialVersionUID = 2320905063828247874L;
@Override
} |
Perhaps it's this file that should be deleted? | public void require_that_valid_tar_application_can_be_unpacked() throws IOException {
File outFile = createTarFile();
try (CompressedApplicationInputStream unpacked = streamFromTarGz(outFile)) {
File outApp = unpacked.decompress();
assertTestApp(outApp);
}
} | File outFile = createTarFile(); | public void require_that_valid_tar_application_can_be_unpacked() throws IOException {
File outFile = createTarFile();
try (CompressedApplicationInputStream unpacked = streamFromTarGz(outFile)) {
File outApp = unpacked.decompress();
assertTestApp(outApp);
}
} | class CompressedApplicationInputStreamTest {
private static void writeFileToTar(ArchiveOutputStream taos, File file) throws IOException {
taos.putArchiveEntry(taos.createArchiveEntry(file, file.getName()));
ByteStreams.copy(new FileInputStream(file), taos);
taos.closeArchiveEntry();
}
private static File createArchiveFile(ArchiveOutputStream taos, File outFile) throws IOException {
File app = new File("src/test/resources/deploy/validapp");
writeFileToTar(taos, new File(app, "services.xml"));
writeFileToTar(taos, new File(app, "hosts.xml"));
writeFileToTar(taos, new File(app, "deployment.xml"));
taos.close();
return outFile;
}
public static File createTarFile() throws IOException {
File outFile = File.createTempFile("testapp", ".tar.gz");
ArchiveOutputStream archiveOutputStream = new TarArchiveOutputStream(new GZIPOutputStream(new FileOutputStream(outFile)));
return createArchiveFile(archiveOutputStream, outFile);
}
private static File createZipFile() throws IOException {
File outFile = File.createTempFile("testapp", ".tar.gz");
ArchiveOutputStream archiveOutputStream = new ZipArchiveOutputStream(new FileOutputStream(outFile));
return createArchiveFile(archiveOutputStream, outFile);
}
private void assertTestApp(File outApp) {
String [] files = outApp.list();
assertNotNull(files);
assertEquals(3, files.length);
assertTrue(List.of(files).containsAll(List.of("hosts.xml", "services.xml", "deployment.xml")));
}
@Test
@Test
public void require_that_valid_tar_application_in_subdir_can_be_unpacked() throws IOException {
File outFile = File.createTempFile("testapp", ".tar.gz");
ArchiveOutputStream archiveOutputStream = new TarArchiveOutputStream(new GZIPOutputStream(new FileOutputStream(outFile)));
File app = new File("src/test/resources/deploy/validapp");
File file = new File(app, "services.xml");
archiveOutputStream.putArchiveEntry(archiveOutputStream.createArchiveEntry(file, "application/" + file.getName()));
ByteStreams.copy(new FileInputStream(file), archiveOutputStream);
archiveOutputStream.closeArchiveEntry();
file = new File(app, "hosts.xml");
archiveOutputStream.putArchiveEntry(archiveOutputStream.createArchiveEntry(file, "application/" + file.getName()));
ByteStreams.copy(new FileInputStream(file), archiveOutputStream);
archiveOutputStream.closeArchiveEntry();
file = new File(app, "deployment.xml");
archiveOutputStream.putArchiveEntry(archiveOutputStream.createArchiveEntry(file, "application/" + file.getName()));
ByteStreams.copy(new FileInputStream(file), archiveOutputStream);
archiveOutputStream.closeArchiveEntry();
archiveOutputStream.close();
try (CompressedApplicationInputStream unpacked = streamFromTarGz(outFile)) {
File outApp = unpacked.decompress();
assertEquals("application", outApp.getName());
assertTestApp(outApp);
}
}
@Test
public void require_that_valid_zip_application_can_be_unpacked() throws IOException {
File outFile = createZipFile();
try (CompressedApplicationInputStream unpacked = streamFromZip(outFile)) {
File outApp = unpacked.decompress();
assertTestApp(outApp);
}
}
@Test
public void require_that_gnu_tared_file_can_be_unpacked() throws IOException, InterruptedException {
File gzFile = createTarGz("src/test/resources/deploy/validapp");
assertTrue(gzFile.exists());
CompressedApplicationInputStream unpacked = CompressedApplicationInputStream.createFromCompressedStream(new FileInputStream(gzFile), "application/x-gzip", Long.MAX_VALUE);
File outApp = unpacked.decompress();
assertTestApp(outApp);
}
@Test
public void require_that_nested_app_can_be_unpacked() throws IOException, InterruptedException {
File gzFile = createTarGz("src/test/resources/deploy/advancedapp");
assertTrue(gzFile.exists());
File outApp;
try (CompressedApplicationInputStream unpacked = streamFromTarGz(gzFile)) {
outApp = unpacked.decompress();
}
List<File> files = Arrays.asList(outApp.listFiles());
assertEquals(5, files.size());
assertTrue(files.contains(new File(outApp, "services.xml")));
assertTrue(files.contains(new File(outApp, "hosts.xml")));
assertTrue(files.contains(new File(outApp, "deployment.xml")));
assertTrue(files.contains(new File(outApp, "schemas")));
assertTrue(files.contains(new File(outApp, "external")));
File sd = files.get(files.indexOf(new File(outApp, "schemas")));
assertTrue(sd.isDirectory());
assertEquals(1, sd.listFiles().length);
assertEquals(new File(sd, "keyvalue.sd").getAbsolutePath(), sd.listFiles()[0].getAbsolutePath());
File ext = files.get(files.indexOf(new File(outApp, "external")));
assertTrue(ext.isDirectory());
assertEquals(1, ext.listFiles().length);
assertEquals(new File(ext, "foo").getAbsolutePath(), ext.listFiles()[0].getAbsolutePath());
files = Arrays.asList(ext.listFiles());
File foo = files.get(files.indexOf(new File(ext, "foo")));
assertTrue(foo.isDirectory());
assertEquals(1, foo.listFiles().length);
assertEquals(new File(foo, "bar").getAbsolutePath(), foo.listFiles()[0].getAbsolutePath());
files = Arrays.asList(foo.listFiles());
File bar = files.get(files.indexOf(new File(foo, "bar")));
assertTrue(bar.isDirectory());
assertEquals(1, bar.listFiles().length);
assertTrue(bar.listFiles()[0].isFile());
assertEquals(new File(bar, "lol").getAbsolutePath(), bar.listFiles()[0].getAbsolutePath());
}
@Test(expected = InternalServerException.class)
public void require_that_invalid_application_returns_error_when_unpacked() throws Exception {
File app = new File("src/test/resources/deploy/validapp/services.xml");
streamFromTarGz(app).close();
}
private static File createTarGz(String appDir) throws IOException, InterruptedException {
File tmpTar = File.createTempFile("myapp", ".tar");
Process p = new ProcessBuilder("tar", "-C", appDir, "-cvf", tmpTar.getAbsolutePath(), ".").start();
p.waitFor();
p = new ProcessBuilder("gzip", tmpTar.getAbsolutePath()).start();
p.waitFor();
File gzFile = new File(tmpTar.getAbsolutePath() + ".gz");
assertTrue(gzFile.exists());
return gzFile;
}
private static CompressedApplicationInputStream streamFromZip(File zipFile) {
return Exceptions.uncheck(() -> CompressedApplicationInputStream.createFromCompressedStream(new FileInputStream(zipFile), "application/zip", Long.MAX_VALUE));
}
private static CompressedApplicationInputStream streamFromTarGz(File tarFile) {
return Exceptions.uncheck(() -> CompressedApplicationInputStream.createFromCompressedStream(new FileInputStream(tarFile), "application/x-gzip", Long.MAX_VALUE));
}
} | class CompressedApplicationInputStreamTest {
private static void writeFileToTar(ArchiveOutputStream taos, File file) throws IOException {
taos.putArchiveEntry(taos.createArchiveEntry(file, file.getName()));
ByteStreams.copy(new FileInputStream(file), taos);
taos.closeArchiveEntry();
}
private static File createArchiveFile(ArchiveOutputStream taos, File outFile) throws IOException {
File app = new File("src/test/resources/deploy/validapp");
writeFileToTar(taos, new File(app, "services.xml"));
writeFileToTar(taos, new File(app, "hosts.xml"));
writeFileToTar(taos, new File(app, "deployment.xml"));
taos.close();
return outFile;
}
public static File createTarFile() throws IOException {
File outFile = File.createTempFile("testapp", ".tar.gz");
ArchiveOutputStream archiveOutputStream = new TarArchiveOutputStream(new GZIPOutputStream(new FileOutputStream(outFile)));
return createArchiveFile(archiveOutputStream, outFile);
}
private static File createZipFile() throws IOException {
File outFile = File.createTempFile("testapp", ".tar.gz");
ArchiveOutputStream archiveOutputStream = new ZipArchiveOutputStream(new FileOutputStream(outFile));
return createArchiveFile(archiveOutputStream, outFile);
}
private void assertTestApp(File outApp) {
String [] files = outApp.list();
assertNotNull(files);
assertEquals(3, files.length);
assertTrue(List.of(files).containsAll(List.of("hosts.xml", "services.xml", "deployment.xml")));
}
@Test
@Test
public void require_that_valid_tar_application_in_subdir_can_be_unpacked() throws IOException {
File outFile = File.createTempFile("testapp", ".tar.gz");
ArchiveOutputStream archiveOutputStream = new TarArchiveOutputStream(new GZIPOutputStream(new FileOutputStream(outFile)));
File app = new File("src/test/resources/deploy/validapp");
File file = new File(app, "services.xml");
archiveOutputStream.putArchiveEntry(archiveOutputStream.createArchiveEntry(file, "application/" + file.getName()));
ByteStreams.copy(new FileInputStream(file), archiveOutputStream);
archiveOutputStream.closeArchiveEntry();
file = new File(app, "hosts.xml");
archiveOutputStream.putArchiveEntry(archiveOutputStream.createArchiveEntry(file, "application/" + file.getName()));
ByteStreams.copy(new FileInputStream(file), archiveOutputStream);
archiveOutputStream.closeArchiveEntry();
file = new File(app, "deployment.xml");
archiveOutputStream.putArchiveEntry(archiveOutputStream.createArchiveEntry(file, "application/" + file.getName()));
ByteStreams.copy(new FileInputStream(file), archiveOutputStream);
archiveOutputStream.closeArchiveEntry();
archiveOutputStream.close();
try (CompressedApplicationInputStream unpacked = streamFromTarGz(outFile)) {
File outApp = unpacked.decompress();
assertEquals("application", outApp.getName());
assertTestApp(outApp);
}
}
@Test
public void require_that_valid_zip_application_can_be_unpacked() throws IOException {
File outFile = createZipFile();
try (CompressedApplicationInputStream unpacked = streamFromZip(outFile)) {
File outApp = unpacked.decompress();
assertTestApp(outApp);
}
}
@Test
public void require_that_gnu_tared_file_can_be_unpacked() throws IOException, InterruptedException {
File gzFile = createTarGz("src/test/resources/deploy/validapp");
assertTrue(gzFile.exists());
CompressedApplicationInputStream unpacked = CompressedApplicationInputStream.createFromCompressedStream(new FileInputStream(gzFile), "application/x-gzip", Long.MAX_VALUE);
File outApp = unpacked.decompress();
assertTestApp(outApp);
}
@Test
public void require_that_nested_app_can_be_unpacked() throws IOException, InterruptedException {
File gzFile = createTarGz("src/test/resources/deploy/advancedapp");
assertTrue(gzFile.exists());
File outApp;
try (CompressedApplicationInputStream unpacked = streamFromTarGz(gzFile)) {
outApp = unpacked.decompress();
}
List<File> files = Arrays.asList(outApp.listFiles());
assertEquals(5, files.size());
assertTrue(files.contains(new File(outApp, "services.xml")));
assertTrue(files.contains(new File(outApp, "hosts.xml")));
assertTrue(files.contains(new File(outApp, "deployment.xml")));
assertTrue(files.contains(new File(outApp, "schemas")));
assertTrue(files.contains(new File(outApp, "external")));
File sd = files.get(files.indexOf(new File(outApp, "schemas")));
assertTrue(sd.isDirectory());
assertEquals(1, sd.listFiles().length);
assertEquals(new File(sd, "keyvalue.sd").getAbsolutePath(), sd.listFiles()[0].getAbsolutePath());
File ext = files.get(files.indexOf(new File(outApp, "external")));
assertTrue(ext.isDirectory());
assertEquals(1, ext.listFiles().length);
assertEquals(new File(ext, "foo").getAbsolutePath(), ext.listFiles()[0].getAbsolutePath());
files = Arrays.asList(ext.listFiles());
File foo = files.get(files.indexOf(new File(ext, "foo")));
assertTrue(foo.isDirectory());
assertEquals(1, foo.listFiles().length);
assertEquals(new File(foo, "bar").getAbsolutePath(), foo.listFiles()[0].getAbsolutePath());
files = Arrays.asList(foo.listFiles());
File bar = files.get(files.indexOf(new File(foo, "bar")));
assertTrue(bar.isDirectory());
assertEquals(1, bar.listFiles().length);
assertTrue(bar.listFiles()[0].isFile());
assertEquals(new File(bar, "lol").getAbsolutePath(), bar.listFiles()[0].getAbsolutePath());
}
@Test(expected = InternalServerException.class)
public void require_that_invalid_application_returns_error_when_unpacked() throws Exception {
File app = new File("src/test/resources/deploy/validapp/services.xml");
streamFromTarGz(app).close();
}
private static File createTarGz(String appDir) throws IOException, InterruptedException {
File tmpTar = File.createTempFile("myapp", ".tar");
Process p = new ProcessBuilder("tar", "-C", appDir, "-cvf", tmpTar.getAbsolutePath(), ".").start();
p.waitFor();
p = new ProcessBuilder("gzip", tmpTar.getAbsolutePath()).start();
p.waitFor();
File gzFile = new File(tmpTar.getAbsolutePath() + ".gz");
assertTrue(gzFile.exists());
return gzFile;
}
private static CompressedApplicationInputStream streamFromZip(File zipFile) {
return Exceptions.uncheck(() -> CompressedApplicationInputStream.createFromCompressedStream(new FileInputStream(zipFile), "application/zip", Long.MAX_VALUE));
}
private static CompressedApplicationInputStream streamFromTarGz(File tarFile) {
return Exceptions.uncheck(() -> CompressedApplicationInputStream.createFromCompressedStream(new FileInputStream(tarFile), "application/x-gzip", Long.MAX_VALUE));
}
} |
It seems that it is not necessary to be that complicated, I use a easier realization. | public void analyze(Analyzer analyzer) throws AnalysisException, UserException {
if (type == null) {
type = SetType.DEFAULT;
}
if (Strings.isNullOrEmpty(variable)) {
throw new AnalysisException("No variable name in set statement.");
}
if (type == SetType.GLOBAL) {
if (!Catalog.getCurrentCatalog().getAuth().checkGlobalPriv(ConnectContext.get(), PrivPredicate.ADMIN)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR,
"ADMIN");
}
}
if (value == null) {
return;
}
if (value instanceof SlotRef) {
value = new StringLiteral(((SlotRef) value).getColumnName());
}
value.analyze(analyzer);
if (!value.isConstant()) {
throw new AnalysisException("Set statement does't support non-constant expr.");
}
final Expr literalExpr = value.getResultValue();
if (!(literalExpr instanceof LiteralExpr)) {
throw new AnalysisException("Set statement does't support computing expr:" + literalExpr.toSql());
}
result = (LiteralExpr)literalExpr;
if (variable.equalsIgnoreCase(SessionVariable.SQL_MODE)) {
if (result instanceof StringLiteral) {
String sqlMode = result.getStringValue();
result = new StringLiteral(SqlModeHelper.encode(sqlMode).toString());
}
else if (result instanceof IntLiteral) {
String sqlMode = SqlModeHelper.decode(result.getLongValue());
result = new IntLiteral(SqlModeHelper.encode(sqlMode).toString(), Type.BIGINT);
}
}
if (variable.equalsIgnoreCase(GlobalVariable.HEARTBEAT_FLAGS)) {
if (result instanceof StringLiteral) {
String flags = result.getStringValue();
result = new StringLiteral(HeartbeatFlagHelper.analyze(flags).toString());
} else if (result instanceof IntLiteral) {
String sqlMode = HeartbeatFlagHelper.analyze(result.getLongValue());
result = new IntLiteral(result.getLongValue(), Type.BIGINT);
}
}
if (variable.equalsIgnoreCase(SessionVariable.RESOURCE_VARIABLE)) {
if (result != null && !UserResource.isValidGroup(result.getStringValue())) {
throw new AnalysisException("Invalid resource group, now we support {low, normal, high}.");
}
}
} | if (variable.equalsIgnoreCase(GlobalVariable.HEARTBEAT_FLAGS)) { | public void analyze(Analyzer analyzer) throws AnalysisException, UserException {
if (type == null) {
type = SetType.DEFAULT;
}
if (Strings.isNullOrEmpty(variable)) {
throw new AnalysisException("No variable name in set statement.");
}
if (type == SetType.GLOBAL) {
if (!Catalog.getCurrentCatalog().getAuth().checkGlobalPriv(ConnectContext.get(), PrivPredicate.ADMIN)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR,
"ADMIN");
}
}
if (value == null) {
return;
}
if (value instanceof SlotRef) {
value = new StringLiteral(((SlotRef) value).getColumnName());
}
value.analyze(analyzer);
if (!value.isConstant()) {
throw new AnalysisException("Set statement does't support non-constant expr.");
}
final Expr literalExpr = value.getResultValue();
if (!(literalExpr instanceof LiteralExpr)) {
throw new AnalysisException("Set statement does't support computing expr:" + literalExpr.toSql());
}
result = (LiteralExpr)literalExpr;
if (variable.equalsIgnoreCase(SessionVariable.SQL_MODE)) {
if (result instanceof StringLiteral) {
String sqlMode = result.getStringValue();
result = new StringLiteral(SqlModeHelper.encode(sqlMode).toString());
}
else if (result instanceof IntLiteral) {
String sqlMode = SqlModeHelper.decode(result.getLongValue());
result = new IntLiteral(SqlModeHelper.encode(sqlMode).toString(), Type.BIGINT);
}
}
if (variable.equalsIgnoreCase(SessionVariable.RESOURCE_VARIABLE)) {
if (result != null && !UserResource.isValidGroup(result.getStringValue())) {
throw new AnalysisException("Invalid resource group, now we support {low, normal, high}.");
}
}
if (variable.equalsIgnoreCase(SessionVariable.DEFAULT_ROWSET_TYPE)) {
if (type != SetType.GLOBAL) {
throw new AnalysisException("default_rowset_type must be global. use set global");
}
if (result != null && !HeartbeatFlags.isValidRowsetType(result.getStringValue())) {
throw new AnalysisException("Invalid rowset type, now we support {alpha, beta}.");
}
}
} | class SetVar {
private String variable;
private Expr value;
private SetType type;
private LiteralExpr result;
public SetVar() {
}
public SetVar(SetType type, String variable, Expr value) {
this.type = type;
this.variable = variable;
this.value = value;
if (value instanceof LiteralExpr) {
this.result = (LiteralExpr)value;
}
}
public SetVar(String variable, Expr value) {
this.type = SetType.DEFAULT;
this.variable = variable;
this.value = value;
if (value instanceof LiteralExpr) {
this.result = (LiteralExpr)value;
}
}
public String getVariable() {
return variable;
}
public LiteralExpr getValue() {
return result;
}
public SetType getType() {
return type;
}
public void setType(SetType type) {
this.type = type;
}
public String toSql() {
StringBuilder sb = new StringBuilder();
sb.append(type.toSql());
sb.append(" ").append(variable).append(" = ").append(value.toSql());
return sb.toString();
}
@Override
public String toString() {
return toSql();
}
} | class SetVar {
private String variable;
private Expr value;
private SetType type;
private LiteralExpr result;
public SetVar() {
}
public SetVar(SetType type, String variable, Expr value) {
this.type = type;
this.variable = variable;
this.value = value;
if (value instanceof LiteralExpr) {
this.result = (LiteralExpr)value;
}
}
public SetVar(String variable, Expr value) {
this.type = SetType.DEFAULT;
this.variable = variable;
this.value = value;
if (value instanceof LiteralExpr) {
this.result = (LiteralExpr)value;
}
}
public String getVariable() {
return variable;
}
public LiteralExpr getValue() {
return result;
}
public SetType getType() {
return type;
}
public void setType(SetType type) {
this.type = type;
}
public String toSql() {
StringBuilder sb = new StringBuilder();
sb.append(type.toSql());
sb.append(" ").append(variable).append(" = ").append(value.toSql());
return sb.toString();
}
@Override
public String toString() {
return toSql();
}
} |
I'll probably replace the job error with a job status some time soon, and then this won't be an issue :) | public boolean isOutOfCapacity() {
return jobError.filter(error -> error == DeploymentJobs.JobError.outOfCapacity).isPresent();
} | return jobError.filter(error -> error == DeploymentJobs.JobError.outOfCapacity).isPresent(); | public boolean isOutOfCapacity() {
return jobError.filter(error -> error == DeploymentJobs.JobError.outOfCapacity).isPresent();
} | class JobStatus {
private final DeploymentJobs.JobType type;
private final Optional<JobRun> lastTriggered;
private final Optional<JobRun> lastCompleted;
private final Optional<JobRun> firstFailing;
private final Optional<JobRun> lastSuccess;
private final Optional<DeploymentJobs.JobError> jobError;
/**
* Used by the persistence layer (only) to create a complete JobStatus instance.
* Other creation should be by using initial- and with- methods.
*/
public JobStatus(DeploymentJobs.JobType type, Optional<DeploymentJobs.JobError> jobError,
Optional<JobRun> lastTriggered, Optional<JobRun> lastCompleted,
Optional<JobRun> firstFailing, Optional<JobRun> lastSuccess) {
requireNonNull(type, "jobType cannot be null");
requireNonNull(jobError, "jobError cannot be null");
requireNonNull(lastTriggered, "lastTriggered cannot be null");
requireNonNull(lastCompleted, "lastCompleted cannot be null");
requireNonNull(firstFailing, "firstFailing cannot be null");
requireNonNull(lastSuccess, "lastSuccess cannot be null");
this.type = type;
this.jobError = jobError;
this.lastTriggered = type == DeploymentJobs.JobType.component ? Optional.empty() : lastTriggered;
this.lastCompleted = lastCompleted;
this.firstFailing = firstFailing;
this.lastSuccess = lastSuccess;
}
/** Returns an empty job status */
public static JobStatus initial(DeploymentJobs.JobType type) {
return new JobStatus(type, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty());
}
public JobStatus withTriggering(Version platform, ApplicationVersion application, Optional<Deployment> deployment, String reason, Instant triggeredAt) {
return withTriggering(JobRun.triggering(platform, application, deployment.map(Deployment::version), deployment.map(Deployment::applicationVersion), reason, triggeredAt));
}
public JobStatus withTriggering(JobRun jobRun) {
return new JobStatus(type, jobError, Optional.of(jobRun), lastCompleted, firstFailing, lastSuccess);
}
public JobStatus withCompletion(long runId, Optional<DeploymentJobs.JobError> jobError, Instant completion) {
return withCompletion(lastTriggered.get().completion(runId, completion), jobError);
}
public JobStatus withCompletion(JobRun completion, Optional<DeploymentJobs.JobError> jobError) {
Optional<JobRun> firstFailing = this.firstFailing;
if (jobError.isPresent() && ! this.firstFailing.isPresent())
firstFailing = Optional.of(completion);
Optional<JobRun> lastSuccess = this.lastSuccess;
if ( ! jobError.isPresent()) {
lastSuccess = Optional.of(completion);
firstFailing = Optional.empty();
}
return new JobStatus(type, jobError, lastTriggered, Optional.of(completion), firstFailing, lastSuccess);
}
public DeploymentJobs.JobType type() { return type; }
/** Returns true unless this job last completed with a failure */
public boolean isSuccess() {
return lastCompleted().isPresent() && ! jobError.isPresent();
}
/** The error of the last completion, or empty if the last run succeeded */
public Optional<DeploymentJobs.JobError> jobError() { return jobError; }
/** Returns whether this last failed on out of capacity */
/**
* Returns the last triggering of this job, or empty if the controller has never triggered it
* and not seen a deployment for it
*/
public Optional<JobRun> lastTriggered() { return lastTriggered; }
/** Returns the last completion of this job (whether failing or succeeding), or empty if it never completed */
public Optional<JobRun> lastCompleted() { return lastCompleted; }
/** Returns the run when this started failing, or empty if it is not currently failing */
public Optional<JobRun> firstFailing() { return firstFailing; }
/** Returns the run when this last succeeded, or empty if it has never succeeded */
public Optional<JobRun> lastSuccess() { return lastSuccess; }
@Override
public String toString() {
return "job status of " + type + "[ " +
"last triggered: " + lastTriggered.map(JobRun::toString).orElse("(never)") +
", last completed: " + lastCompleted.map(JobRun::toString).orElse("(never)") +
", first failing: " + firstFailing.map(JobRun::toString).orElse("(not failing)") +
", lastSuccess: " + lastSuccess.map(JobRun::toString).orElse("(never)") + "]";
}
@Override
public int hashCode() { return Objects.hash(type, jobError, lastTriggered, lastCompleted, firstFailing, lastSuccess); }
@Override
public boolean equals(Object o) {
if (o == this) return true;
if ( ! ( o instanceof JobStatus)) return false;
JobStatus other = (JobStatus)o;
return Objects.equals(type, other.type) &&
Objects.equals(jobError, other.jobError) &&
Objects.equals(lastTriggered, other.lastTriggered) &&
Objects.equals(lastCompleted, other.lastCompleted) &&
Objects.equals(firstFailing, other.firstFailing) &&
Objects.equals(lastSuccess, other.lastSuccess);
}
/** Information about a particular triggering or completion of a run of a job. This is immutable. */
public static class JobRun {
private final long id;
private final Version platform;
private final ApplicationVersion application;
private final Optional<Version> sourcePlatform;
private final Optional<ApplicationVersion> sourceApplication;
private final String reason;
private final Instant at;
public JobRun(long id, Version platform, ApplicationVersion application, Optional<Version> sourcePlatform,
Optional<ApplicationVersion> sourceApplication, String reason, Instant at) {
this.id = id;
this.platform = requireNonNull(platform);
this.application = requireNonNull(application);
this.sourcePlatform = sourcePlatform;
this.sourceApplication = sourceApplication;
this.reason = requireNonNull(reason);
this.at = requireNonNull(at);
}
public static JobRun triggering(Version platform, ApplicationVersion application, Optional<Version> sourcePlatform,
Optional<ApplicationVersion> sourceApplication, String reason, Instant at) {
return new JobRun(-1, platform, application, sourcePlatform, sourceApplication, reason, at);
}
public JobRun completion(long id, Instant at) {
return new JobRun(id, platform, application, sourcePlatform, sourceApplication, reason, at);
}
/** Returns the id of this run of this job, or -1 if not known */
public long id() { return id; }
/** Returns the Vespa version used on this run */
public Version platform() { return platform; }
/** Returns the Vespa version this run upgraded from, if already deployed */
public Optional<Version> sourcePlatform() { return sourcePlatform; }
/** Returns the application version used in this run */
public ApplicationVersion application() { return application; }
/** Returns the application version this run upgraded from, if already deployed */
public Optional<ApplicationVersion> sourceApplication() { return sourceApplication; }
/** Returns a human-readable reason for this particular job run */
public String reason() { return reason; }
/** Returns the time if this triggering or completion */
public Instant at() { return at; }
@Override
public String toString() {
return "job run " + id + " of version " + platform +
(sourcePlatform.map(version -> " (" + version + ")").orElse("")) +
" " + application.id() +
(sourceApplication.map(version -> " (" + version.id() + ")").orElse("")) +
" at " + at;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof JobRun)) return false;
JobRun run = (JobRun) o;
if (id != run.id) return false;
if (!platform.equals(run.platform)) return false;
if (!application.equals(run.application)) return false;
if (!sourcePlatform.equals(run.sourcePlatform)) return false;
if (!sourceApplication.equals(run.sourceApplication)) return false;
return at.equals(run.at);
}
@Override
public int hashCode() {
int result = (int) (id ^ (id >>> 32));
result = 31 * result + platform.hashCode();
result = 31 * result + application.hashCode();
result = 31 * result + sourcePlatform.hashCode();
result = 31 * result + sourceApplication.hashCode();
result = 31 * result + at.hashCode();
return result;
}
}
} | class JobStatus {
private final DeploymentJobs.JobType type;
private final Optional<JobRun> lastTriggered;
private final Optional<JobRun> lastCompleted;
private final Optional<JobRun> firstFailing;
private final Optional<JobRun> lastSuccess;
private final Optional<DeploymentJobs.JobError> jobError;
/**
* Used by the persistence layer (only) to create a complete JobStatus instance.
* Other creation should be by using initial- and with- methods.
*/
public JobStatus(DeploymentJobs.JobType type, Optional<DeploymentJobs.JobError> jobError,
Optional<JobRun> lastTriggered, Optional<JobRun> lastCompleted,
Optional<JobRun> firstFailing, Optional<JobRun> lastSuccess) {
requireNonNull(type, "jobType cannot be null");
requireNonNull(jobError, "jobError cannot be null");
requireNonNull(lastTriggered, "lastTriggered cannot be null");
requireNonNull(lastCompleted, "lastCompleted cannot be null");
requireNonNull(firstFailing, "firstFailing cannot be null");
requireNonNull(lastSuccess, "lastSuccess cannot be null");
this.type = type;
this.jobError = jobError;
this.lastTriggered = type == DeploymentJobs.JobType.component ? Optional.empty() : lastTriggered;
this.lastCompleted = lastCompleted;
this.firstFailing = firstFailing;
this.lastSuccess = lastSuccess;
}
/** Returns an empty job status */
public static JobStatus initial(DeploymentJobs.JobType type) {
return new JobStatus(type, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty());
}
public JobStatus withTriggering(Version platform, ApplicationVersion application, Optional<Deployment> deployment, String reason, Instant triggeredAt) {
return withTriggering(JobRun.triggering(platform, application, deployment.map(Deployment::version), deployment.map(Deployment::applicationVersion), reason, triggeredAt));
}
public JobStatus withTriggering(JobRun jobRun) {
return new JobStatus(type, jobError, Optional.of(jobRun), lastCompleted, firstFailing, lastSuccess);
}
public JobStatus withCompletion(long runId, Optional<DeploymentJobs.JobError> jobError, Instant completion) {
return withCompletion(lastTriggered.get().completion(runId, completion), jobError);
}
public JobStatus withCompletion(JobRun completion, Optional<DeploymentJobs.JobError> jobError) {
Optional<JobRun> firstFailing = this.firstFailing;
if (jobError.isPresent() && ! this.firstFailing.isPresent())
firstFailing = Optional.of(completion);
Optional<JobRun> lastSuccess = this.lastSuccess;
if ( ! jobError.isPresent()) {
lastSuccess = Optional.of(completion);
firstFailing = Optional.empty();
}
return new JobStatus(type, jobError, lastTriggered, Optional.of(completion), firstFailing, lastSuccess);
}
public DeploymentJobs.JobType type() { return type; }
/** Returns true unless this job last completed with a failure */
public boolean isSuccess() {
return lastCompleted().isPresent() && ! jobError.isPresent();
}
/** The error of the last completion, or empty if the last run succeeded */
public Optional<DeploymentJobs.JobError> jobError() { return jobError; }
/** Returns whether this last failed on out of capacity */
/**
* Returns the last triggering of this job, or empty if the controller has never triggered it
* and not seen a deployment for it
*/
public Optional<JobRun> lastTriggered() { return lastTriggered; }
/** Returns the last completion of this job (whether failing or succeeding), or empty if it never completed */
public Optional<JobRun> lastCompleted() { return lastCompleted; }
/** Returns the run when this started failing, or empty if it is not currently failing */
public Optional<JobRun> firstFailing() { return firstFailing; }
/** Returns the run when this last succeeded, or empty if it has never succeeded */
public Optional<JobRun> lastSuccess() { return lastSuccess; }
@Override
public String toString() {
return "job status of " + type + "[ " +
"last triggered: " + lastTriggered.map(JobRun::toString).orElse("(never)") +
", last completed: " + lastCompleted.map(JobRun::toString).orElse("(never)") +
", first failing: " + firstFailing.map(JobRun::toString).orElse("(not failing)") +
", lastSuccess: " + lastSuccess.map(JobRun::toString).orElse("(never)") + "]";
}
@Override
public int hashCode() { return Objects.hash(type, jobError, lastTriggered, lastCompleted, firstFailing, lastSuccess); }
@Override
public boolean equals(Object o) {
if (o == this) return true;
if ( ! ( o instanceof JobStatus)) return false;
JobStatus other = (JobStatus)o;
return Objects.equals(type, other.type) &&
Objects.equals(jobError, other.jobError) &&
Objects.equals(lastTriggered, other.lastTriggered) &&
Objects.equals(lastCompleted, other.lastCompleted) &&
Objects.equals(firstFailing, other.firstFailing) &&
Objects.equals(lastSuccess, other.lastSuccess);
}
/** Information about a particular triggering or completion of a run of a job. This is immutable. */
public static class JobRun {
private final long id;
private final Version platform;
private final ApplicationVersion application;
private final Optional<Version> sourcePlatform;
private final Optional<ApplicationVersion> sourceApplication;
private final String reason;
private final Instant at;
public JobRun(long id, Version platform, ApplicationVersion application, Optional<Version> sourcePlatform,
Optional<ApplicationVersion> sourceApplication, String reason, Instant at) {
this.id = id;
this.platform = requireNonNull(platform);
this.application = requireNonNull(application);
this.sourcePlatform = sourcePlatform;
this.sourceApplication = sourceApplication;
this.reason = requireNonNull(reason);
this.at = requireNonNull(at);
}
public static JobRun triggering(Version platform, ApplicationVersion application, Optional<Version> sourcePlatform,
Optional<ApplicationVersion> sourceApplication, String reason, Instant at) {
return new JobRun(-1, platform, application, sourcePlatform, sourceApplication, reason, at);
}
public JobRun completion(long id, Instant at) {
return new JobRun(id, platform, application, sourcePlatform, sourceApplication, reason, at);
}
/** Returns the id of this run of this job, or -1 if not known */
public long id() { return id; }
/** Returns the Vespa version used on this run */
public Version platform() { return platform; }
/** Returns the Vespa version this run upgraded from, if already deployed */
public Optional<Version> sourcePlatform() { return sourcePlatform; }
/** Returns the application version used in this run */
public ApplicationVersion application() { return application; }
/** Returns the application version this run upgraded from, if already deployed */
public Optional<ApplicationVersion> sourceApplication() { return sourceApplication; }
/** Returns a human-readable reason for this particular job run */
public String reason() { return reason; }
/** Returns the time if this triggering or completion */
public Instant at() { return at; }
@Override
public String toString() {
return "job run " + id + " of version " + platform +
(sourcePlatform.map(version -> " (" + version + ")").orElse("")) +
" " + application.id() +
(sourceApplication.map(version -> " (" + version.id() + ")").orElse("")) +
" at " + at;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof JobRun)) return false;
JobRun run = (JobRun) o;
if (id != run.id) return false;
if (!platform.equals(run.platform)) return false;
if (!application.equals(run.application)) return false;
if (!sourcePlatform.equals(run.sourcePlatform)) return false;
if (!sourceApplication.equals(run.sourceApplication)) return false;
return at.equals(run.at);
}
@Override
public int hashCode() {
int result = (int) (id ^ (id >>> 32));
result = 31 * result + platform.hashCode();
result = 31 * result + application.hashCode();
result = 31 * result + sourcePlatform.hashCode();
result = 31 * result + sourceApplication.hashCode();
result = 31 * result + at.hashCode();
return result;
}
}
} |
Currently, the method of geting the mapping is compatible. Alias and index can be queried. Therefore, the mapping relationship is not maintained. | public List<String> listTableNames(SessionContext ctx, String dbName) {
List<String> indexes = esRestClient.getIndexes().stream().distinct().collect(Collectors.toList());
esRestClient.getAliases().entrySet().stream().filter(e -> indexes.contains(e.getKey()))
.flatMap(e -> e.getValue().stream()).distinct().forEach(indexes::add);
return indexes;
} | esRestClient.getAliases().entrySet().stream().filter(e -> indexes.contains(e.getKey())) | public List<String> listTableNames(SessionContext ctx, String dbName) {
return esRestClient.listTable();
} | class EsExternalDataSource extends ExternalDataSource {
public static final String DEFAULT_DB = "default_es_db";
private static final Logger LOG = LogManager.getLogger(EsExternalDataSource.class);
private static final String PROP_HOSTS = "elasticsearch.hosts";
private static final String PROP_USERNAME = "elasticsearch.username";
private static final String PROP_PASSWORD = "elasticsearch.password";
private static final String PROP_DOC_VALUE_SCAN = "elasticsearch.doc_value_scan";
private static final String PROP_KEYWORD_SNIFF = "elasticsearch.keyword_sniff";
private static final String PROP_NODES_DISCOVERY = "elasticsearch.nodes_discovery";
private static final String PROP_SSL = "elasticsearch.ssl";
private Map<String, Long> dbNameToId;
private Map<Long, EsExternalDatabase> idToDb;
private EsRestClient esRestClient;
private boolean initialized = false;
private String[] nodes;
private String username = null;
private String password = null;
private boolean enableDocValueScan = true;
private boolean enableKeywordSniff = true;
private boolean enableSsl = false;
private boolean enableNodesDiscovery = true;
/**
* Default constructor for EsExternalDataSource.
*/
public EsExternalDataSource(long catalogId, String name, Map<String, String> props) throws DdlException {
this.id = catalogId;
this.name = name;
this.type = "es";
validate(props);
this.dsProperty = new DataSourceProperty();
this.dsProperty.setProperties(props);
}
private void validate(Map<String, String> properties) throws DdlException {
if (properties == null) {
throw new DdlException(
"Please set properties of elasticsearch table, " + "they are: hosts, user, password, index");
}
if (StringUtils.isBlank(properties.get(PROP_HOSTS))) {
throw new DdlException("Hosts of ES table is null.");
}
nodes = properties.get(PROP_HOSTS).trim().split(",");
if (StringUtils.isNotBlank(properties.get(PROP_USERNAME))) {
username = properties.get(PROP_USERNAME).trim();
}
if (StringUtils.isNotBlank(properties.get(PROP_PASSWORD))) {
password = properties.get(PROP_PASSWORD).trim();
}
if (properties.containsKey(PROP_DOC_VALUE_SCAN)) {
enableDocValueScan = EsUtil.getBoolean(properties, PROP_DOC_VALUE_SCAN);
}
if (properties.containsKey(PROP_KEYWORD_SNIFF)) {
enableKeywordSniff = EsUtil.getBoolean(properties, PROP_KEYWORD_SNIFF);
}
if (properties.containsKey(PROP_NODES_DISCOVERY)) {
enableNodesDiscovery = EsUtil.getBoolean(properties, PROP_NODES_DISCOVERY);
}
if (properties.containsKey(PROP_SSL)) {
enableSsl = EsUtil.getBoolean(properties, PROP_SSL);
for (String seed : nodes) {
if (enableSsl && seed.startsWith("http:
throw new DdlException("if ssl_enabled is true, the https protocol must be used");
}
if (!enableSsl && seed.startsWith("https:
throw new DdlException("if ssl_enabled is false, the http protocol must be used");
}
}
}
}
/**
* Datasource can't be init when creating because the external datasource may depend on third system.
* So you have to make sure the client of third system is initialized before any method was called.
*/
private synchronized void makeSureInitialized() {
if (!initialized) {
init();
initialized = true;
}
}
private void init() {
try {
validate(this.dsProperty.getProperties());
} catch (DdlException e) {
LOG.warn("validate error", e);
}
dbNameToId = Maps.newConcurrentMap();
idToDb = Maps.newConcurrentMap();
this.esRestClient = new EsRestClient(this.nodes, this.username, this.password, this.enableSsl);
long defaultDbId = Env.getCurrentEnv().getNextId();
dbNameToId.put(DEFAULT_DB, defaultDbId);
idToDb.put(defaultDbId, new EsExternalDatabase(this, defaultDbId, "default"));
}
@Override
public List<String> listDatabaseNames(SessionContext ctx) {
makeSureInitialized();
return new ArrayList<>(dbNameToId.keySet());
}
@Override
@Nullable
@Override
public ExternalDatabase getDbNullable(String dbName) {
makeSureInitialized();
String realDbName = ClusterNamespace.getNameFromFullName(dbName);
if (!dbNameToId.containsKey(realDbName)) {
return null;
}
return new EsExternalDatabase(this, dbNameToId.get(realDbName), realDbName);
}
@Override
public boolean tableExist(SessionContext ctx, String dbName, String tblName) {
return esRestClient.existIndex(this.esRestClient.getClient(), tblName);
}
@Override
public List<Long> getDbIds() {
return Lists.newArrayList(dbNameToId.values());
}
} | class EsExternalDataSource extends ExternalDataSource {
public static final String DEFAULT_DB = "default_db";
private static final Logger LOG = LogManager.getLogger(EsExternalDataSource.class);
private static final String PROP_HOSTS = "elasticsearch.hosts";
private static final String PROP_USERNAME = "elasticsearch.username";
private static final String PROP_PASSWORD = "elasticsearch.password";
private static final String PROP_DOC_VALUE_SCAN = "elasticsearch.doc_value_scan";
private static final String PROP_KEYWORD_SNIFF = "elasticsearch.keyword_sniff";
private static final String PROP_NODES_DISCOVERY = "elasticsearch.nodes_discovery";
private static final String PROP_SSL = "elasticsearch.ssl";
private Map<String, Long> dbNameToId;
private Map<Long, EsExternalDatabase> idToDb;
private EsRestClient esRestClient;
private boolean initialized = false;
private String[] nodes;
private String username = null;
private String password = null;
private boolean enableDocValueScan = true;
private boolean enableKeywordSniff = true;
private boolean enableSsl = false;
private boolean enableNodesDiscovery = true;
/**
* Default constructor for EsExternalDataSource.
*/
public EsExternalDataSource(long catalogId, String name, Map<String, String> props) throws DdlException {
this.id = catalogId;
this.name = name;
this.type = "es";
validate(props);
this.dsProperty = new DataSourceProperty();
this.dsProperty.setProperties(props);
}
private void validate(Map<String, String> properties) throws DdlException {
if (properties == null) {
throw new DdlException(
"Please set properties of elasticsearch table, " + "they are: hosts, user, password, index");
}
if (StringUtils.isBlank(properties.get(PROP_HOSTS))) {
throw new DdlException("Hosts of ES table is null.");
}
nodes = properties.get(PROP_HOSTS).trim().split(",");
if (StringUtils.isNotBlank(properties.get(PROP_USERNAME))) {
username = properties.get(PROP_USERNAME).trim();
}
if (StringUtils.isNotBlank(properties.get(PROP_PASSWORD))) {
password = properties.get(PROP_PASSWORD).trim();
}
if (properties.containsKey(PROP_DOC_VALUE_SCAN)) {
enableDocValueScan = EsUtil.getBoolean(properties, PROP_DOC_VALUE_SCAN);
}
if (properties.containsKey(PROP_KEYWORD_SNIFF)) {
enableKeywordSniff = EsUtil.getBoolean(properties, PROP_KEYWORD_SNIFF);
}
if (properties.containsKey(PROP_NODES_DISCOVERY)) {
enableNodesDiscovery = EsUtil.getBoolean(properties, PROP_NODES_DISCOVERY);
}
if (properties.containsKey(PROP_SSL)) {
enableSsl = EsUtil.getBoolean(properties, PROP_SSL);
for (String seed : nodes) {
if (enableSsl && seed.startsWith("http:
throw new DdlException("if ssl_enabled is true, the https protocol must be used");
}
if (!enableSsl && seed.startsWith("https:
throw new DdlException("if ssl_enabled is false, the http protocol must be used");
}
}
}
}
/**
* Datasource can't be init when creating because the external datasource may depend on third system.
* So you have to make sure the client of third system is initialized before any method was called.
*/
private synchronized void makeSureInitialized() {
if (!initialized) {
init();
initialized = true;
}
}
private void init() {
try {
validate(this.dsProperty.getProperties());
} catch (DdlException e) {
LOG.warn("validate error", e);
}
dbNameToId = Maps.newConcurrentMap();
idToDb = Maps.newConcurrentMap();
this.esRestClient = new EsRestClient(this.nodes, this.username, this.password, this.enableSsl);
long defaultDbId = Env.getCurrentEnv().getNextId();
dbNameToId.put(DEFAULT_DB, defaultDbId);
idToDb.put(defaultDbId, new EsExternalDatabase(this, defaultDbId, "default"));
}
@Override
public List<String> listDatabaseNames(SessionContext ctx) {
makeSureInitialized();
return new ArrayList<>(dbNameToId.keySet());
}
@Override
@Nullable
@Override
public ExternalDatabase getDbNullable(String dbName) {
makeSureInitialized();
String realDbName = ClusterNamespace.getNameFromFullName(dbName);
if (!dbNameToId.containsKey(realDbName)) {
return null;
}
return new EsExternalDatabase(this, dbNameToId.get(realDbName), realDbName);
}
@Override
public boolean tableExist(SessionContext ctx, String dbName, String tblName) {
return esRestClient.existIndex(this.esRestClient.getClient(), tblName);
}
@Override
public List<Long> getDbIds() {
return Lists.newArrayList(dbNameToId.values());
}
} |
why here need to disable decimal256 explicitly? add some comment? | public static void initBuiltins(FunctionSet functionSet) {
for (int i = 0; i < Type.getNumericTypes().size(); i++) {
Type t1 = Type.getNumericTypes().get(i);
for (int j = 0; j < Type.getNumericTypes().size(); j++) {
Type t2 = Type.getNumericTypes().get(j);
Type retType = Type.getNextNumType(Type.getAssignmentCompatibleType(t1, t2, false, false));
NullableMode mode = retType.isDecimalV3() ? NullableMode.CUSTOM : NullableMode.DEPEND_ON_ARGUMENT;
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MULTIPLY.getName(), Lists.newArrayList(t1, t2), retType, mode));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.ADD.getName(), Lists.newArrayList(t1, t2), retType, mode));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.SUBTRACT.getName(), Lists.newArrayList(t1, t2), retType, mode));
}
}
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DOUBLE, Type.DOUBLE),
Type.DOUBLE, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.MAX_DECIMALV2_TYPE, Type.MAX_DECIMALV2_TYPE),
Type.MAX_DECIMALV2_TYPE, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL32, Type.DECIMAL32),
Type.DECIMAL32, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL32, Type.DECIMAL64),
Type.DECIMAL32, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL32, Type.DECIMAL128),
Type.DECIMAL32, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL64, Type.DECIMAL64),
Type.DECIMAL64, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL64, Type.DECIMAL128),
Type.DECIMAL64, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL128, Type.DECIMAL128),
Type.DECIMAL128, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL64, Type.DECIMAL32),
Type.DECIMAL32, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL128, Type.DECIMAL64),
Type.DECIMAL64, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL128, Type.DECIMAL32),
Type.DECIMAL128, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MOD.getName(),
Lists.<Type>newArrayList(Type.FLOAT, Type.FLOAT),
Type.FLOAT, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MOD.getName(),
Lists.<Type>newArrayList(Type.DOUBLE, Type.DOUBLE),
Type.DOUBLE, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MOD.getName(),
Lists.<Type>newArrayList(Type.MAX_DECIMALV2_TYPE, Type.MAX_DECIMALV2_TYPE),
Type.MAX_DECIMALV2_TYPE, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MOD.getName(),
Lists.<Type>newArrayList(Type.DECIMAL32, Type.DECIMAL32),
Type.DECIMAL32, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MOD.getName(),
Lists.<Type>newArrayList(Type.DECIMAL64, Type.DECIMAL64),
Type.DECIMAL64, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MOD.getName(),
Lists.<Type>newArrayList(Type.DECIMAL128, Type.DECIMAL128),
Type.DECIMAL128, Function.NullableMode.ALWAYS_NULLABLE));
for (int i = 0; i < Type.getIntegerTypes().size(); i++) {
Type t1 = Type.getIntegerTypes().get(i);
for (int j = 0; j < Type.getIntegerTypes().size(); j++) {
Type t2 = Type.getIntegerTypes().get(j);
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.INT_DIVIDE.getName(), Lists.newArrayList(t1, t2),
Type.getAssignmentCompatibleType(t1, t2, false, false),
Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MOD.getName(), Lists.newArrayList(t1, t2),
Type.getAssignmentCompatibleType(t1, t2, false, false),
Function.NullableMode.ALWAYS_NULLABLE));
}
}
} | functionSet.addBuiltin(ScalarFunction.createBuiltinOperator( | public static void initBuiltins(FunctionSet functionSet) {
for (int i = 0; i < Type.getNumericTypes().size(); i++) {
Type t1 = Type.getNumericTypes().get(i);
for (int j = 0; j < Type.getNumericTypes().size(); j++) {
Type t2 = Type.getNumericTypes().get(j);
Type retType = Type.getNextNumType(Type.getAssignmentCompatibleType(t1, t2, false, false));
NullableMode mode = retType.isDecimalV3() ? NullableMode.CUSTOM : NullableMode.DEPEND_ON_ARGUMENT;
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MULTIPLY.getName(), Lists.newArrayList(t1, t2), retType, mode));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.ADD.getName(), Lists.newArrayList(t1, t2), retType, mode));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.SUBTRACT.getName(), Lists.newArrayList(t1, t2), retType, mode));
}
}
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DOUBLE, Type.DOUBLE),
Type.DOUBLE, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.MAX_DECIMALV2_TYPE, Type.MAX_DECIMALV2_TYPE),
Type.MAX_DECIMALV2_TYPE, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL32, Type.DECIMAL32),
Type.DECIMAL32, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL32, Type.DECIMAL64),
Type.DECIMAL32, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL32, Type.DECIMAL128),
Type.DECIMAL32, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL64, Type.DECIMAL64),
Type.DECIMAL64, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL64, Type.DECIMAL128),
Type.DECIMAL64, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL128, Type.DECIMAL128),
Type.DECIMAL128, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL64, Type.DECIMAL32),
Type.DECIMAL32, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL128, Type.DECIMAL64),
Type.DECIMAL64, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.DIVIDE.getName(),
Lists.<Type>newArrayList(Type.DECIMAL128, Type.DECIMAL32),
Type.DECIMAL128, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MOD.getName(),
Lists.<Type>newArrayList(Type.FLOAT, Type.FLOAT),
Type.FLOAT, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MOD.getName(),
Lists.<Type>newArrayList(Type.DOUBLE, Type.DOUBLE),
Type.DOUBLE, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MOD.getName(),
Lists.<Type>newArrayList(Type.MAX_DECIMALV2_TYPE, Type.MAX_DECIMALV2_TYPE),
Type.MAX_DECIMALV2_TYPE, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MOD.getName(),
Lists.<Type>newArrayList(Type.DECIMAL32, Type.DECIMAL32),
Type.DECIMAL32, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MOD.getName(),
Lists.<Type>newArrayList(Type.DECIMAL64, Type.DECIMAL64),
Type.DECIMAL64, Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MOD.getName(),
Lists.<Type>newArrayList(Type.DECIMAL128, Type.DECIMAL128),
Type.DECIMAL128, Function.NullableMode.ALWAYS_NULLABLE));
for (int i = 0; i < Type.getIntegerTypes().size(); i++) {
Type t1 = Type.getIntegerTypes().get(i);
for (int j = 0; j < Type.getIntegerTypes().size(); j++) {
Type t2 = Type.getIntegerTypes().get(j);
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.INT_DIVIDE.getName(), Lists.newArrayList(t1, t2),
Type.getAssignmentCompatibleType(t1, t2, false, false),
Function.NullableMode.ALWAYS_NULLABLE));
functionSet.addBuiltin(ScalarFunction.createBuiltinOperator(
Operator.MOD.getName(), Lists.newArrayList(t1, t2),
Type.getAssignmentCompatibleType(t1, t2, false, false),
Function.NullableMode.ALWAYS_NULLABLE));
}
}
} | class ArithmeticExpr extends Expr {
enum OperatorPosition {
BINARY_INFIX,
UNARY_PREFIX,
UNARY_POSTFIX,
}
public enum Operator {
MULTIPLY("*", "multiply", OperatorPosition.BINARY_INFIX, TExprOpcode.MULTIPLY),
DIVIDE("/", "divide", OperatorPosition.BINARY_INFIX, TExprOpcode.DIVIDE),
MOD("%", "mod", OperatorPosition.BINARY_INFIX, TExprOpcode.MOD),
INT_DIVIDE("DIV", "int_divide", OperatorPosition.BINARY_INFIX, TExprOpcode.INT_DIVIDE),
ADD("+", "add", OperatorPosition.BINARY_INFIX, TExprOpcode.ADD),
SUBTRACT("-", "subtract", OperatorPosition.BINARY_INFIX, TExprOpcode.SUBTRACT),
BITAND("&", "bitand", OperatorPosition.BINARY_INFIX, TExprOpcode.BITAND),
BITOR("|", "bitor", OperatorPosition.BINARY_INFIX, TExprOpcode.BITOR),
BITXOR("^", "bitxor", OperatorPosition.BINARY_INFIX, TExprOpcode.BITXOR),
BITNOT("~", "bitnot", OperatorPosition.UNARY_PREFIX, TExprOpcode.BITNOT),
FACTORIAL("!", "factorial", OperatorPosition.UNARY_POSTFIX, TExprOpcode.FACTORIAL);
private final String description;
private final String name;
private final OperatorPosition pos;
private final TExprOpcode opcode;
Operator(String description, String name, OperatorPosition pos, TExprOpcode opcode) {
this.description = description;
this.name = name;
this.pos = pos;
this.opcode = opcode;
}
@Override
public String toString() {
return description;
}
public String getName() {
return name;
}
public OperatorPosition getPos() {
return pos;
}
public TExprOpcode getOpcode() {
return opcode;
}
public boolean isUnary() {
return pos == OperatorPosition.UNARY_PREFIX
|| pos == OperatorPosition.UNARY_POSTFIX;
}
public boolean isBinary() {
return pos == OperatorPosition.BINARY_INFIX;
}
}
private final Operator op;
public ArithmeticExpr(Operator op, Expr e1, Expr e2) {
super();
this.op = op;
Preconditions.checkNotNull(e1);
children.add(e1);
Preconditions.checkArgument(
op == Operator.BITNOT && e2 == null || op != Operator.BITNOT && e2 != null);
if (e2 != null) {
children.add(e2);
}
}
/**
* constructor only used for Nereids.
*/
public ArithmeticExpr(Operator op, Expr e1, Expr e2, Type returnType, NullableMode nullableMode) {
this(op, e1, e2);
List<Type> argTypes;
if (e2 == null) {
argTypes = Lists.newArrayList(e1.getType());
} else {
argTypes = Lists.newArrayList(e1.getType(), e2.getType());
}
fn = new Function(new FunctionName(op.getName()), argTypes, returnType, false, true, nullableMode);
type = returnType;
}
/**
* Copy c'tor used in clone().
*/
protected ArithmeticExpr(ArithmeticExpr other) {
super(other);
this.op = other.op;
}
@Override
public String toString() {
return toSql();
}
@Override
public Expr clone() {
return new ArithmeticExpr(this);
}
@Override
public String toSqlImpl() {
if (children.size() == 1) {
return op.toString() + " " + getChild(0).toSql();
} else {
return "(" + getChild(0).toSql() + " " + op.toString() + " " + getChild(1).toSql() + ")";
}
}
@Override
public String toDigestImpl() {
if (children.size() == 1) {
return op.toString() + " " + getChild(0).toDigest();
} else {
return getChild(0).toDigest() + " " + op.toString() + " " + getChild(1).toDigest();
}
}
@Override
protected void toThrift(TExprNode msg) {
msg.node_type = TExprNodeType.ARITHMETIC_EXPR;
if (!(type.isDecimalV2() || type.isDecimalV3())) {
msg.setOpcode(op.getOpcode());
msg.setOutputColumn(outputColumn);
}
}
@Override
public boolean equals(Object obj) {
if (!super.equals(obj)) {
return false;
}
return ((ArithmeticExpr) obj).opcode == opcode;
}
@Override
public void computeOutputColumn(Analyzer analyzer) {
super.computeOutputColumn(analyzer);
List<TupleId> tupleIds = Lists.newArrayList();
getIds(tupleIds, null);
Preconditions.checkArgument(tupleIds.size() == 1);
}
private Type findCommonType(Type t1, Type t2) {
PrimitiveType pt1 = t1.getPrimitiveType();
PrimitiveType pt2 = t2.getPrimitiveType();
if (pt1 == PrimitiveType.DOUBLE || pt2 == PrimitiveType.DOUBLE) {
return Type.DOUBLE;
} else if (pt1 == PrimitiveType.DECIMALV2 || pt2 == PrimitiveType.DECIMALV2) {
return pt1 == PrimitiveType.DECIMALV2 && pt2 == PrimitiveType.DECIMALV2
|| (ConnectContext.get() != null
&& ConnectContext.get().getSessionVariable().roundPreciseDecimalV2Value)
? Type.MAX_DECIMALV2_TYPE : Type.DOUBLE;
} else if (pt1 == PrimitiveType.DECIMAL32 || pt2 == PrimitiveType.DECIMAL32) {
return pt1 == PrimitiveType.DECIMAL32 && pt2 == PrimitiveType.DECIMAL32 ? Type.DECIMAL32 : Type.DOUBLE;
} else if (pt1 == PrimitiveType.DECIMAL64 || pt2 == PrimitiveType.DECIMAL64) {
return pt1 == PrimitiveType.DECIMAL64 && pt2 == PrimitiveType.DECIMAL64 ? Type.DECIMAL64 : Type.DOUBLE;
} else if (pt1 == PrimitiveType.DECIMAL128 || pt2 == PrimitiveType.DECIMAL128) {
return pt1 == PrimitiveType.DECIMAL128 && pt2 == PrimitiveType.DECIMAL128 ? Type.DECIMAL128 : Type.DOUBLE;
} else if (pt1 == PrimitiveType.LARGEINT || pt2 == PrimitiveType.LARGEINT) {
return Type.LARGEINT;
} else {
if (pt1 != PrimitiveType.BIGINT && pt2 != PrimitiveType.BIGINT) {
return Type.INVALID;
}
return Type.BIGINT;
}
}
private boolean castIfHaveSameType(Type t1, Type t2, Type target) throws AnalysisException {
if (t1 == target || t2 == target) {
castChild(target, 0);
castChild(target, 1);
return true;
}
return false;
}
private void castUpperInteger(Type t1, Type t2) throws AnalysisException {
if (!t1.isIntegerType() || !t2.isIntegerType()) {
return;
}
if (castIfHaveSameType(t1, t2, Type.BIGINT)) {
return;
}
if (castIfHaveSameType(t1, t2, Type.INT)) {
return;
}
if (castIfHaveSameType(t1, t2, Type.SMALLINT)) {
return;
}
if (castIfHaveSameType(t1, t2, Type.TINYINT)) {
return;
}
}
private void analyzeNoneDecimalOp(Type t1, Type t2) throws AnalysisException {
Type commonType;
switch (op) {
case MULTIPLY:
case ADD:
case SUBTRACT:
if (t1.isDecimalV2() || t2.isDecimalV2()) {
castBinaryOp(findCommonType(t1, t2));
}
if (isConstant()) {
castUpperInteger(t1, t2);
}
break;
case MOD:
if (t1.isDecimalV2() || t2.isDecimalV2()) {
castBinaryOp(findCommonType(t1, t2));
} else if ((t1.isFloatingPointType() || t2.isFloatingPointType()) && !t1.equals(t2)) {
castBinaryOp(Type.DOUBLE);
}
break;
case INT_DIVIDE:
if (!t1.isFixedPointType() || !t2.isFixedPointType()) {
castBinaryOp(Type.BIGINT);
}
break;
case DIVIDE:
t1 = getChild(0).getType().getNumResultType();
t2 = getChild(1).getType().getNumResultType();
commonType = findCommonType(t1, t2);
if (commonType.getPrimitiveType() == PrimitiveType.BIGINT
|| commonType.getPrimitiveType() == PrimitiveType.LARGEINT) {
commonType = Type.DOUBLE;
}
castBinaryOp(commonType);
break;
case BITAND:
case BITOR:
case BITXOR:
if (t1 == Type.BOOLEAN && t2 == Type.BOOLEAN) {
t1 = Type.TINYINT;
t2 = Type.TINYINT;
}
commonType = Type.getAssignmentCompatibleType(t1, t2, false, false);
if (commonType.getPrimitiveType().ordinal() > PrimitiveType.LARGEINT.ordinal()) {
commonType = Type.BIGINT;
}
type = castBinaryOp(commonType);
break;
default:
Preconditions.checkState(false,
"Unknown arithmetic operation " + op.toString() + " in: " + this.toSql());
break;
}
}
/**
* Convert integer type to decimal type.
*/
public static Type convertIntToDecimalV3Type(Type type) throws AnalysisException {
if (type.isLargeIntType()) {
return ScalarType.createDecimalV3Type(ScalarType.MAX_DECIMAL128_PRECISION, 0);
} else if (type.isBigIntType()) {
return ScalarType.createDecimalV3Type(ScalarType.MAX_DECIMAL64_PRECISION, 0);
} else if (type.isInteger32Type()) {
return ScalarType.createDecimalV3Type(ScalarType.MAX_DECIMAL32_PRECISION, 0);
} else {
Preconditions.checkState(false,
"Implicit converting to decimal for arithmetic operations only support integer");
return Type.INVALID;
}
}
public static Type convertDecimalV2ToDecimalV3Type(ScalarType type) {
return ScalarType.createDecimalV3Type(type.decimalPrecision(), type.decimalScale());
}
private void analyzeDecimalV3Op(Type t1, Type t2) throws AnalysisException {
Type t1TargetType = t1;
Type t2TargetType = t2;
switch (op) {
case MULTIPLY:
case ADD:
case SUBTRACT:
case MOD:
case DIVIDE:
if (t1.isFloatingPointType() || t2.isFloatingPointType()) {
type = castBinaryOp(ScalarType.DOUBLE);
break;
}
if (t1.isFixedPointType()) {
t1TargetType = convertIntToDecimalV3Type(t1);
castChild(t1TargetType, 0);
}
if (t2.isFixedPointType()) {
t2TargetType = convertIntToDecimalV3Type(t2);
castChild(t2TargetType, 1);
}
if (t1.isDecimalV2()) {
t1TargetType = convertDecimalV2ToDecimalV3Type((ScalarType) t1);
castChild(t1TargetType, 0);
}
if (t2.isDecimalV2()) {
t2TargetType = convertDecimalV2ToDecimalV3Type((ScalarType) t2);
castChild(t2TargetType, 1);
}
final int t1Precision = ((ScalarType) t1TargetType).getScalarPrecision();
final int t2Precision = ((ScalarType) t2TargetType).getScalarPrecision();
final int t1Scale = ((ScalarType) t1TargetType).getScalarScale();
final int t2Scale = ((ScalarType) t2TargetType).getScalarScale();
int precision = Math.max(t1Precision, t2Precision);
int scale = Math.max(t1Scale, t2Scale);
int widthOfIntPart1 = t1Precision - t1Scale;
int widthOfIntPart2 = t2Precision - t2Scale;
if (op == Operator.MULTIPLY) {
scale = t1Scale + t2Scale;
precision = t1Precision + t2Precision;
} else if (op == Operator.DIVIDE) {
precision = t1TargetType.getPrecision() + t2Scale + Config.div_precision_increment;
scale = t1Scale + Config.div_precision_increment;
} else if (op == Operator.ADD || op == Operator.SUBTRACT) {
scale = Math.max(t1Scale, t2Scale);
precision = Math.max(widthOfIntPart1, widthOfIntPart2) + scale + 1;
} else {
scale = Math.max(t1Scale, t2Scale);
precision = widthOfIntPart2 + scale;
}
if (precision > ScalarType.MAX_DECIMAL128_PRECISION) {
precision = ScalarType.MAX_DECIMAL128_PRECISION;
}
if (precision < scale) {
type = castBinaryOp(Type.DOUBLE);
break;
}
type = ScalarType.createDecimalV3Type(precision, scale);
if (op == Operator.ADD || op == Operator.SUBTRACT) {
if (((ScalarType) type).getScalarScale() != ((ScalarType) children.get(0).type).getScalarScale()) {
castChild(type, 0);
}
if (((ScalarType) type).getScalarScale() != ((ScalarType) children.get(1).type).getScalarScale()) {
castChild(type, 1);
}
} else if (op == Operator.DIVIDE && (t1TargetType.isDecimalV3())) {
int leftPrecision = t1Precision + t2Scale + Config.div_precision_increment;
int leftScale = t1Scale + t2Scale + Config.div_precision_increment;
if (leftPrecision < leftScale || leftPrecision > ScalarType.MAX_DECIMAL128_PRECISION) {
type = castBinaryOp(Type.DOUBLE);
break;
}
Expr child = getChild(0);
if (child instanceof DecimalLiteral) {
DecimalLiteral literalChild = (DecimalLiteral) child;
Expr newChild = literalChild
.castToDecimalV3ByDivde(ScalarType.createDecimalV3Type(leftPrecision, leftScale));
setChild(0, newChild);
} else {
castChild(ScalarType.createDecimalV3Type(leftPrecision, leftScale), 0);
}
} else if (op == Operator.MOD) {
precision = Math.max(widthOfIntPart1, widthOfIntPart2) + scale;
if (precision > ScalarType.MAX_DECIMAL128_PRECISION) {
type = castBinaryOp(Type.DOUBLE);
} else {
type = ScalarType.createDecimalV3Type(precision, scale);
castChild(type, 0);
castChild(type, 1);
}
}
break;
case INT_DIVIDE:
case BITAND:
case BITOR:
case BITXOR:
type = castBinaryOp(Type.BIGINT);
break;
case BITNOT:
case FACTORIAL:
break;
default:
Preconditions.checkState(false,
"Unknown arithmetic operation " + op + " in: " + this.toSql());
break;
}
}
@Override
public void analyzeImpl(Analyzer analyzer) throws AnalysisException {
if (op == Operator.BITNOT) {
Type t = getChild(0).getType();
if (t.getPrimitiveType().ordinal() > PrimitiveType.LARGEINT.ordinal()) {
type = Type.BIGINT;
castChild(type, 0);
} else {
type = t;
}
fn = getBuiltinFunction(op.getName(), collectChildReturnTypes(), Function.CompareMode.IS_SUPERTYPE_OF);
if (fn == null) {
Preconditions.checkState(false, String.format("No match for op with operand types", toSql()));
}
return;
}
analyzeSubqueryInChildren();
if (contains(Subquery.class)) {
return;
}
Type t1 = getChild(0).getType();
Type t2 = getChild(1).getType();
if (t1.isNull() || t2.isNull()) {
castBinaryOp(t1.isNull() ? t2 : t1);
t1 = getChild(0).getType();
t2 = getChild(1).getType();
}
if (!t1.isNumericType()) {
castChild(t1.getNumResultType(), 0);
t1 = t1.getNumResultType();
}
if (!t2.isNumericType()) {
castChild(t2.getNumResultType(), 1);
t2 = t2.getNumResultType();
}
if (t1.isDecimalV3() || t2.isDecimalV3()) {
analyzeDecimalV3Op(t1, t2);
} else {
analyzeNoneDecimalOp(t1, t2);
}
fn = getBuiltinFunction(op.name, collectChildReturnTypes(), Function.CompareMode.IS_IDENTICAL);
if (fn == null) {
Preconditions.checkState(false,
String.format("No match for vec function '%s' with operand types %s and %s", toSql(), t1, t2));
}
if (!fn.getReturnType().isDecimalV3()) {
type = fn.getReturnType();
}
}
public void analyzeSubqueryInChildren() throws AnalysisException {
for (Expr child : children) {
if (child instanceof Subquery) {
Subquery subquery = (Subquery) child;
if (!subquery.returnsScalarColumn()) {
String msg = "Subquery of arithmetic expr must return a single column: " + child.toSql();
throw new AnalysisException(msg);
}
/**
* Situation: The expr is a binary predicate and the type of subquery is not scalar type.
* Add assert: The stmt of subquery is added an assert condition (return error if row count > 1).
* Input params:
* expr: 0.9*(select k1 from t2)
* subquery stmt: select k1 from t2
* Output params:
* new expr: 0.9 * (select k1 from t2 (assert row count: return error if row count > 1 ))
* subquery stmt: select k1 from t2 (assert row count: return error if row count > 1 )
*/
if (!subquery.getType().isScalarType()) {
subquery.getStatement().setAssertNumRowsElement(1, AssertNumRowsElement.Assertion.LE);
}
}
}
}
@Override
public int hashCode() {
return 31 * super.hashCode() + Objects.hashCode(op);
}
@Override
protected void compactForLiteral(Type type) throws AnalysisException {
super.compactForLiteral(type);
Type t1 = getChild(0).getType();
Type t2 = getChild(1).getType();
if (t1.isDecimalV3() || t2.isDecimalV3()) {
analyzeDecimalV3Op(t1, t2);
}
}
@Override
public void write(DataOutput out) throws IOException {
Text.writeString(out, op.name());
out.writeInt(children.size());
for (Expr expr : children) {
Expr.writeTo(expr, out);
}
}
public static ArithmeticExpr read(DataInput in) throws IOException {
Operator op = Operator.valueOf(Text.readString(in));
int childNum = in.readInt();
Preconditions.checkState(childNum <= 2, childNum);
Expr child1 = null;
Expr child2 = null;
if (childNum > 0) {
child1 = Expr.readIn(in);
}
if (childNum > 1) {
child2 = Expr.readIn(in);
}
return new ArithmeticExpr(op, child1, child2);
}
} | class ArithmeticExpr extends Expr {
enum OperatorPosition {
BINARY_INFIX,
UNARY_PREFIX,
UNARY_POSTFIX,
}
public enum Operator {
MULTIPLY("*", "multiply", OperatorPosition.BINARY_INFIX, TExprOpcode.MULTIPLY),
DIVIDE("/", "divide", OperatorPosition.BINARY_INFIX, TExprOpcode.DIVIDE),
MOD("%", "mod", OperatorPosition.BINARY_INFIX, TExprOpcode.MOD),
INT_DIVIDE("DIV", "int_divide", OperatorPosition.BINARY_INFIX, TExprOpcode.INT_DIVIDE),
ADD("+", "add", OperatorPosition.BINARY_INFIX, TExprOpcode.ADD),
SUBTRACT("-", "subtract", OperatorPosition.BINARY_INFIX, TExprOpcode.SUBTRACT),
BITAND("&", "bitand", OperatorPosition.BINARY_INFIX, TExprOpcode.BITAND),
BITOR("|", "bitor", OperatorPosition.BINARY_INFIX, TExprOpcode.BITOR),
BITXOR("^", "bitxor", OperatorPosition.BINARY_INFIX, TExprOpcode.BITXOR),
BITNOT("~", "bitnot", OperatorPosition.UNARY_PREFIX, TExprOpcode.BITNOT),
FACTORIAL("!", "factorial", OperatorPosition.UNARY_POSTFIX, TExprOpcode.FACTORIAL);
private final String description;
private final String name;
private final OperatorPosition pos;
private final TExprOpcode opcode;
Operator(String description, String name, OperatorPosition pos, TExprOpcode opcode) {
this.description = description;
this.name = name;
this.pos = pos;
this.opcode = opcode;
}
@Override
public String toString() {
return description;
}
public String getName() {
return name;
}
public OperatorPosition getPos() {
return pos;
}
public TExprOpcode getOpcode() {
return opcode;
}
public boolean isUnary() {
return pos == OperatorPosition.UNARY_PREFIX
|| pos == OperatorPosition.UNARY_POSTFIX;
}
public boolean isBinary() {
return pos == OperatorPosition.BINARY_INFIX;
}
}
private final Operator op;
public ArithmeticExpr(Operator op, Expr e1, Expr e2) {
super();
this.op = op;
Preconditions.checkNotNull(e1);
children.add(e1);
Preconditions.checkArgument(
op == Operator.BITNOT && e2 == null || op != Operator.BITNOT && e2 != null);
if (e2 != null) {
children.add(e2);
}
}
/**
* constructor only used for Nereids.
*/
public ArithmeticExpr(Operator op, Expr e1, Expr e2, Type returnType, NullableMode nullableMode) {
this(op, e1, e2);
List<Type> argTypes;
if (e2 == null) {
argTypes = Lists.newArrayList(e1.getType());
} else {
argTypes = Lists.newArrayList(e1.getType(), e2.getType());
}
fn = new Function(new FunctionName(op.getName()), argTypes, returnType, false, true, nullableMode);
type = returnType;
}
/**
* Copy c'tor used in clone().
*/
protected ArithmeticExpr(ArithmeticExpr other) {
super(other);
this.op = other.op;
}
@Override
public String toString() {
return toSql();
}
@Override
public Expr clone() {
return new ArithmeticExpr(this);
}
@Override
public String toSqlImpl() {
if (children.size() == 1) {
return op.toString() + " " + getChild(0).toSql();
} else {
return "(" + getChild(0).toSql() + " " + op.toString() + " " + getChild(1).toSql() + ")";
}
}
@Override
public String toDigestImpl() {
if (children.size() == 1) {
return op.toString() + " " + getChild(0).toDigest();
} else {
return getChild(0).toDigest() + " " + op.toString() + " " + getChild(1).toDigest();
}
}
@Override
protected void toThrift(TExprNode msg) {
msg.node_type = TExprNodeType.ARITHMETIC_EXPR;
if (!(type.isDecimalV2() || type.isDecimalV3())) {
msg.setOpcode(op.getOpcode());
msg.setOutputColumn(outputColumn);
}
}
@Override
public boolean equals(Object obj) {
if (!super.equals(obj)) {
return false;
}
return ((ArithmeticExpr) obj).opcode == opcode;
}
@Override
public void computeOutputColumn(Analyzer analyzer) {
super.computeOutputColumn(analyzer);
List<TupleId> tupleIds = Lists.newArrayList();
getIds(tupleIds, null);
Preconditions.checkArgument(tupleIds.size() == 1);
}
private Type findCommonType(Type t1, Type t2) {
PrimitiveType pt1 = t1.getPrimitiveType();
PrimitiveType pt2 = t2.getPrimitiveType();
if (pt1 == PrimitiveType.DOUBLE || pt2 == PrimitiveType.DOUBLE) {
return Type.DOUBLE;
} else if (pt1 == PrimitiveType.DECIMALV2 || pt2 == PrimitiveType.DECIMALV2) {
return pt1 == PrimitiveType.DECIMALV2 && pt2 == PrimitiveType.DECIMALV2
|| (ConnectContext.get() != null
&& ConnectContext.get().getSessionVariable().roundPreciseDecimalV2Value)
? Type.MAX_DECIMALV2_TYPE : Type.DOUBLE;
} else if (pt1 == PrimitiveType.DECIMAL32 || pt2 == PrimitiveType.DECIMAL32) {
return pt1 == PrimitiveType.DECIMAL32 && pt2 == PrimitiveType.DECIMAL32 ? Type.DECIMAL32 : Type.DOUBLE;
} else if (pt1 == PrimitiveType.DECIMAL64 || pt2 == PrimitiveType.DECIMAL64) {
return pt1 == PrimitiveType.DECIMAL64 && pt2 == PrimitiveType.DECIMAL64 ? Type.DECIMAL64 : Type.DOUBLE;
} else if (pt1 == PrimitiveType.DECIMAL128 || pt2 == PrimitiveType.DECIMAL128) {
return pt1 == PrimitiveType.DECIMAL128 && pt2 == PrimitiveType.DECIMAL128 ? Type.DECIMAL128 : Type.DOUBLE;
} else if (pt1 == PrimitiveType.LARGEINT || pt2 == PrimitiveType.LARGEINT) {
return Type.LARGEINT;
} else {
if (pt1 != PrimitiveType.BIGINT && pt2 != PrimitiveType.BIGINT) {
return Type.INVALID;
}
return Type.BIGINT;
}
}
private boolean castIfHaveSameType(Type t1, Type t2, Type target) throws AnalysisException {
if (t1 == target || t2 == target) {
castChild(target, 0);
castChild(target, 1);
return true;
}
return false;
}
private void castUpperInteger(Type t1, Type t2) throws AnalysisException {
if (!t1.isIntegerType() || !t2.isIntegerType()) {
return;
}
if (castIfHaveSameType(t1, t2, Type.BIGINT)) {
return;
}
if (castIfHaveSameType(t1, t2, Type.INT)) {
return;
}
if (castIfHaveSameType(t1, t2, Type.SMALLINT)) {
return;
}
if (castIfHaveSameType(t1, t2, Type.TINYINT)) {
return;
}
}
private void analyzeNoneDecimalOp(Type t1, Type t2) throws AnalysisException {
Type commonType;
switch (op) {
case MULTIPLY:
case ADD:
case SUBTRACT:
if (t1.isDecimalV2() || t2.isDecimalV2()) {
castBinaryOp(findCommonType(t1, t2));
}
if (isConstant()) {
castUpperInteger(t1, t2);
}
break;
case MOD:
if (t1.isDecimalV2() || t2.isDecimalV2()) {
castBinaryOp(findCommonType(t1, t2));
} else if ((t1.isFloatingPointType() || t2.isFloatingPointType()) && !t1.equals(t2)) {
castBinaryOp(Type.DOUBLE);
}
break;
case INT_DIVIDE:
if (!t1.isFixedPointType() || !t2.isFixedPointType()) {
castBinaryOp(Type.BIGINT);
}
break;
case DIVIDE:
t1 = getChild(0).getType().getNumResultType();
t2 = getChild(1).getType().getNumResultType();
commonType = findCommonType(t1, t2);
if (commonType.getPrimitiveType() == PrimitiveType.BIGINT
|| commonType.getPrimitiveType() == PrimitiveType.LARGEINT) {
commonType = Type.DOUBLE;
}
castBinaryOp(commonType);
break;
case BITAND:
case BITOR:
case BITXOR:
if (t1 == Type.BOOLEAN && t2 == Type.BOOLEAN) {
t1 = Type.TINYINT;
t2 = Type.TINYINT;
}
commonType = Type.getAssignmentCompatibleType(t1, t2, false, SessionVariable.getEnableDecimal256());
if (commonType.getPrimitiveType().ordinal() > PrimitiveType.LARGEINT.ordinal()) {
commonType = Type.BIGINT;
}
type = castBinaryOp(commonType);
break;
default:
Preconditions.checkState(false,
"Unknown arithmetic operation " + op.toString() + " in: " + this.toSql());
break;
}
}
/**
* Convert integer type to decimal type.
*/
public static Type convertIntToDecimalV3Type(Type type) throws AnalysisException {
if (type.isLargeIntType()) {
return ScalarType.createDecimalV3Type(ScalarType.MAX_DECIMAL128_PRECISION, 0);
} else if (type.isBigIntType()) {
return ScalarType.createDecimalV3Type(ScalarType.MAX_DECIMAL64_PRECISION, 0);
} else if (type.isInteger32Type()) {
return ScalarType.createDecimalV3Type(ScalarType.MAX_DECIMAL32_PRECISION, 0);
} else {
Preconditions.checkState(false,
"Implicit converting to decimal for arithmetic operations only support integer");
return Type.INVALID;
}
}
public static Type convertDecimalV2ToDecimalV3Type(ScalarType type) {
return ScalarType.createDecimalV3Type(type.decimalPrecision(), type.decimalScale());
}
private void analyzeDecimalV3Op(Type t1, Type t2) throws AnalysisException {
Type t1TargetType = t1;
Type t2TargetType = t2;
switch (op) {
case MULTIPLY:
case ADD:
case SUBTRACT:
case MOD:
case DIVIDE:
if (t1.isFloatingPointType() || t2.isFloatingPointType()) {
type = castBinaryOp(ScalarType.DOUBLE);
break;
}
if (t1.isFixedPointType()) {
t1TargetType = convertIntToDecimalV3Type(t1);
castChild(t1TargetType, 0);
}
if (t2.isFixedPointType()) {
t2TargetType = convertIntToDecimalV3Type(t2);
castChild(t2TargetType, 1);
}
if (t1.isDecimalV2()) {
t1TargetType = convertDecimalV2ToDecimalV3Type((ScalarType) t1);
castChild(t1TargetType, 0);
}
if (t2.isDecimalV2()) {
t2TargetType = convertDecimalV2ToDecimalV3Type((ScalarType) t2);
castChild(t2TargetType, 1);
}
final int t1Precision = ((ScalarType) t1TargetType).getScalarPrecision();
final int t2Precision = ((ScalarType) t2TargetType).getScalarPrecision();
final int t1Scale = ((ScalarType) t1TargetType).getScalarScale();
final int t2Scale = ((ScalarType) t2TargetType).getScalarScale();
int precision = Math.max(t1Precision, t2Precision);
int scale = Math.max(t1Scale, t2Scale);
int widthOfIntPart1 = t1Precision - t1Scale;
int widthOfIntPart2 = t2Precision - t2Scale;
if (op == Operator.MULTIPLY) {
scale = t1Scale + t2Scale;
precision = t1Precision + t2Precision;
} else if (op == Operator.DIVIDE) {
precision = t1TargetType.getPrecision() + t2Scale + Config.div_precision_increment;
scale = t1Scale + Config.div_precision_increment;
} else if (op == Operator.ADD || op == Operator.SUBTRACT) {
scale = Math.max(t1Scale, t2Scale);
precision = Math.max(widthOfIntPart1, widthOfIntPart2) + scale + 1;
} else {
scale = Math.max(t1Scale, t2Scale);
precision = widthOfIntPart2 + scale;
}
if (precision > ScalarType.MAX_DECIMAL128_PRECISION) {
precision = ScalarType.MAX_DECIMAL128_PRECISION;
}
if (precision < scale) {
type = castBinaryOp(Type.DOUBLE);
break;
}
type = ScalarType.createDecimalV3Type(precision, scale);
if (op == Operator.ADD || op == Operator.SUBTRACT) {
if (((ScalarType) type).getScalarScale() != ((ScalarType) children.get(0).type).getScalarScale()) {
castChild(type, 0);
}
if (((ScalarType) type).getScalarScale() != ((ScalarType) children.get(1).type).getScalarScale()) {
castChild(type, 1);
}
} else if (op == Operator.DIVIDE && (t1TargetType.isDecimalV3())) {
int leftPrecision = t1Precision + t2Scale + Config.div_precision_increment;
int leftScale = t1Scale + t2Scale + Config.div_precision_increment;
if (leftPrecision < leftScale || leftPrecision > ScalarType.MAX_DECIMAL128_PRECISION) {
type = castBinaryOp(Type.DOUBLE);
break;
}
Expr child = getChild(0);
if (child instanceof DecimalLiteral) {
DecimalLiteral literalChild = (DecimalLiteral) child;
Expr newChild = literalChild
.castToDecimalV3ByDivde(ScalarType.createDecimalV3Type(leftPrecision, leftScale));
setChild(0, newChild);
} else {
castChild(ScalarType.createDecimalV3Type(leftPrecision, leftScale), 0);
}
} else if (op == Operator.MOD) {
precision = Math.max(widthOfIntPart1, widthOfIntPart2) + scale;
if (precision > ScalarType.MAX_DECIMAL128_PRECISION) {
type = castBinaryOp(Type.DOUBLE);
} else {
type = ScalarType.createDecimalV3Type(precision, scale);
castChild(type, 0);
castChild(type, 1);
}
}
break;
case INT_DIVIDE:
case BITAND:
case BITOR:
case BITXOR:
type = castBinaryOp(Type.BIGINT);
break;
case BITNOT:
case FACTORIAL:
break;
default:
Preconditions.checkState(false,
"Unknown arithmetic operation " + op + " in: " + this.toSql());
break;
}
}
@Override
public void analyzeImpl(Analyzer analyzer) throws AnalysisException {
if (op == Operator.BITNOT) {
Type t = getChild(0).getType();
if (t.getPrimitiveType().ordinal() > PrimitiveType.LARGEINT.ordinal()) {
type = Type.BIGINT;
castChild(type, 0);
} else {
type = t;
}
fn = getBuiltinFunction(op.getName(), collectChildReturnTypes(), Function.CompareMode.IS_SUPERTYPE_OF);
if (fn == null) {
Preconditions.checkState(false, String.format("No match for op with operand types", toSql()));
}
return;
}
analyzeSubqueryInChildren();
if (contains(Subquery.class)) {
return;
}
Type t1 = getChild(0).getType();
Type t2 = getChild(1).getType();
if (t1.isNull() || t2.isNull()) {
castBinaryOp(t1.isNull() ? t2 : t1);
t1 = getChild(0).getType();
t2 = getChild(1).getType();
}
if (!t1.isNumericType()) {
castChild(t1.getNumResultType(), 0);
t1 = t1.getNumResultType();
}
if (!t2.isNumericType()) {
castChild(t2.getNumResultType(), 1);
t2 = t2.getNumResultType();
}
if (t1.isDecimalV3() || t2.isDecimalV3()) {
analyzeDecimalV3Op(t1, t2);
} else {
analyzeNoneDecimalOp(t1, t2);
}
fn = getBuiltinFunction(op.name, collectChildReturnTypes(), Function.CompareMode.IS_IDENTICAL);
if (fn == null) {
Preconditions.checkState(false,
String.format("No match for vec function '%s' with operand types %s and %s", toSql(), t1, t2));
}
if (!fn.getReturnType().isDecimalV3()) {
type = fn.getReturnType();
}
}
public void analyzeSubqueryInChildren() throws AnalysisException {
for (Expr child : children) {
if (child instanceof Subquery) {
Subquery subquery = (Subquery) child;
if (!subquery.returnsScalarColumn()) {
String msg = "Subquery of arithmetic expr must return a single column: " + child.toSql();
throw new AnalysisException(msg);
}
/**
* Situation: The expr is a binary predicate and the type of subquery is not scalar type.
* Add assert: The stmt of subquery is added an assert condition (return error if row count > 1).
* Input params:
* expr: 0.9*(select k1 from t2)
* subquery stmt: select k1 from t2
* Output params:
* new expr: 0.9 * (select k1 from t2 (assert row count: return error if row count > 1 ))
* subquery stmt: select k1 from t2 (assert row count: return error if row count > 1 )
*/
if (!subquery.getType().isScalarType()) {
subquery.getStatement().setAssertNumRowsElement(1, AssertNumRowsElement.Assertion.LE);
}
}
}
}
@Override
public int hashCode() {
return 31 * super.hashCode() + Objects.hashCode(op);
}
@Override
protected void compactForLiteral(Type type) throws AnalysisException {
super.compactForLiteral(type);
Type t1 = getChild(0).getType();
Type t2 = getChild(1).getType();
if (t1.isDecimalV3() || t2.isDecimalV3()) {
analyzeDecimalV3Op(t1, t2);
}
}
@Override
public void write(DataOutput out) throws IOException {
Text.writeString(out, op.name());
out.writeInt(children.size());
for (Expr expr : children) {
Expr.writeTo(expr, out);
}
}
public static ArithmeticExpr read(DataInput in) throws IOException {
Operator op = Operator.valueOf(Text.readString(in));
int childNum = in.readInt();
Preconditions.checkState(childNum <= 2, childNum);
Expr child1 = null;
Expr child2 = null;
if (childNum > 0) {
child1 = Expr.readIn(in);
}
if (childNum > 1) {
child2 = Expr.readIn(in);
}
return new ArithmeticExpr(op, child1, child2);
}
} |
Can't we check for a flag instead? ENUM_MEMBER for example. | public void visit(BLangFiniteTypeNode finiteTypeNode, AnalyzerData data) {
boolean foundUnaryExpr = false;
boolean isErroredExprInFiniteType = false;
NodeKind valueKind;
BLangExpression value;
for (int i = 0; i < finiteTypeNode.valueSpace.size(); i++) {
value = finiteTypeNode.valueSpace.get(i);
valueKind = value.getKind();
if (valueKind == NodeKind.UNARY_EXPR) {
foundUnaryExpr = true;
BType resultType = typeChecker.checkExpr(value, data.env, symTable.noType, data.prevEnvs);
if (resultType == symTable.semanticError) {
isErroredExprInFiniteType = true;
}
BLangNumericLiteral newNumericLiteral =
Types.constructNumericLiteralFromUnaryExpr((BLangUnaryExpr) value);
finiteTypeNode.valueSpace.set(i, newNumericLiteral);
} else if ((valueKind == NodeKind.LITERAL || valueKind == NodeKind.NUMERIC_LITERAL) &&
((BLangLiteral) value).originalValue == null) {
continue;
} else {
analyzeNode(value, data);
}
}
BFiniteType finiteType = (BFiniteType) finiteTypeNode.getBType();
if (foundUnaryExpr && finiteType != null) {
if (isErroredExprInFiniteType) {
finiteTypeNode.setBType(symTable.semanticError);
} else {
finiteType.setValueSpace(new LinkedHashSet<>(finiteTypeNode.valueSpace));
}
}
} | public void visit(BLangFiniteTypeNode finiteTypeNode, AnalyzerData data) {
boolean foundUnaryExpr = false;
boolean isErroredExprInFiniteType = false;
NodeKind valueKind;
BLangExpression value;
for (int i = 0; i < finiteTypeNode.valueSpace.size(); i++) {
value = finiteTypeNode.valueSpace.get(i);
valueKind = value.getKind();
if (valueKind == NodeKind.UNARY_EXPR) {
foundUnaryExpr = true;
BType resultType = typeChecker.checkExpr(value, data.env, symTable.noType, data.prevEnvs);
if (resultType == symTable.semanticError) {
isErroredExprInFiniteType = true;
}
BLangNumericLiteral newNumericLiteral =
Types.constructNumericLiteralFromUnaryExpr((BLangUnaryExpr) value);
finiteTypeNode.valueSpace.set(i, newNumericLiteral);
} else if ((valueKind == NodeKind.LITERAL || valueKind == NodeKind.NUMERIC_LITERAL) &&
((BLangLiteral) value).originalValue == null) {
continue;
} else {
analyzeNode(value, data);
}
}
if (foundUnaryExpr && isErroredExprInFiniteType) {
finiteTypeNode.setBType(symTable.semanticError);
}
} | class representing a service-decl or object-ctor with service prefix
AttachPoint.Point attachedPoint;
Set<Flag> flagSet = classDefinition.flagSet;
if (flagSet.contains(Flag.OBJECT_CTOR) && flagSet.contains(Flag.SERVICE)) {
attachedPoint = AttachPoint.Point.SERVICE;
} | class representing a service-decl or object-ctor with service prefix
AttachPoint.Point attachedPoint;
Set<Flag> flagSet = classDefinition.flagSet;
if (flagSet.contains(Flag.OBJECT_CTOR) && flagSet.contains(Flag.SERVICE)) {
attachedPoint = AttachPoint.Point.SERVICE;
} | |
Or you could just use a if statement, and then you would not be using any metaspace or incurring a runtime cost. | public Object getReference(Bean<?> bean, Type beanType, CreationalContext<?> ctx) {
Objects.requireNonNull(bean, () -> "Managed Bean [" + beanType + "] is null");
Objects.requireNonNull(ctx, "CreationalContext is null");
if (bean instanceof InjectableBean && ctx instanceof CreationalContextImpl) {
return ArcContainerImpl.instance().beanInstanceHandle((InjectableBean) bean, (CreationalContextImpl) ctx).get();
}
throw new IllegalArgumentException(
"Arguments must be instances of " + InjectableBean.class + " and " + CreationalContextImpl.class + ": \nbean: " + bean + "\nctx: " + ctx);
} | Objects.requireNonNull(bean, () -> "Managed Bean [" + beanType + "] is null"); | public Object getReference(Bean<?> bean, Type beanType, CreationalContext<?> ctx) {
if (bean == null) {
throw new NullPointerException("Managed Bean [" + beanType + "] is null");
}
Objects.requireNonNull(ctx, "CreationalContext is null");
if (bean instanceof InjectableBean && ctx instanceof CreationalContextImpl) {
return ArcContainerImpl.instance().beanInstanceHandle((InjectableBean) bean, (CreationalContextImpl) ctx).get();
}
throw new IllegalArgumentException(
"Arguments must be instances of " + InjectableBean.class + " and " + CreationalContextImpl.class + ": \nbean: " + bean + "\nctx: " + ctx);
} | class BeanManagerImpl implements BeanManager {
static final LazyValue<BeanManagerImpl> INSTANCE = new LazyValue<>(BeanManagerImpl::new);
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
@Override
public Object getInjectableReference(InjectionPoint ij, CreationalContext<?> ctx) {
throw new UnsupportedOperationException();
}
@Override
public <T> CreationalContext<T> createCreationalContext(Contextual<T> contextual) {
return new CreationalContextImpl<>();
}
@Override
public Set<Bean<?>> getBeans(Type beanType, Annotation... qualifiers) {
return ArcContainerImpl.instance().getBeans(Objects.requireNonNull(beanType), qualifiers);
}
@Override
public Set<Bean<?>> getBeans(String name) {
return ArcContainerImpl.instance().getBeans(Objects.requireNonNull(name));
}
@Override
public Bean<?> getPassivationCapableBean(String id) {
throw new UnsupportedOperationException();
}
@Override
public <X> Bean<? extends X> resolve(Set<Bean<? extends X>> beans) {
return ArcContainerImpl.resolve(beans);
}
@Override
public void validate(InjectionPoint injectionPoint) {
throw new UnsupportedOperationException();
}
@Override
public void fireEvent(Object event, Annotation... qualifiers) {
getEvent().select(qualifiers).fire(event);
}
@Override
public <T> Set<ObserverMethod<? super T>> resolveObserverMethods(T event, Annotation... qualifiers) {
throw new UnsupportedOperationException();
}
@Override
public List<Decorator<?>> resolveDecorators(Set<Type> types, Annotation... qualifiers) {
throw new UnsupportedOperationException();
}
@Override
public List<Interceptor<?>> resolveInterceptors(InterceptionType type, Annotation... interceptorBindings) {
return ArcContainerImpl.instance().resolveInterceptors(Objects.requireNonNull(type), interceptorBindings);
}
@Override
public boolean isScope(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isNormalScope(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isPassivatingScope(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isQualifier(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isInterceptorBinding(Class<? extends Annotation> annotationType) {
return annotationType.isAnnotationPresent(InterceptorBinding.class);
}
@Override
public boolean isStereotype(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public Set<Annotation> getInterceptorBindingDefinition(Class<? extends Annotation> bindingType) {
throw new UnsupportedOperationException();
}
@Override
public Set<Annotation> getStereotypeDefinition(Class<? extends Annotation> stereotype) {
throw new UnsupportedOperationException();
}
@Override
public boolean areQualifiersEquivalent(Annotation qualifier1, Annotation qualifier2) {
throw new UnsupportedOperationException();
}
@Override
public boolean areInterceptorBindingsEquivalent(Annotation interceptorBinding1, Annotation interceptorBinding2) {
throw new UnsupportedOperationException();
}
@Override
public int getQualifierHashCode(Annotation qualifier) {
throw new UnsupportedOperationException();
}
@Override
public int getInterceptorBindingHashCode(Annotation interceptorBinding) {
throw new UnsupportedOperationException();
}
@Override
public Context getContext(Class<? extends Annotation> scopeType) {
return Arc.container().getContext(scopeType);
}
@Override
public ELResolver getELResolver() {
throw new UnsupportedOperationException();
}
@Override
public ExpressionFactory wrapExpressionFactory(ExpressionFactory expressionFactory) {
throw new UnsupportedOperationException();
}
@Override
public <T> AnnotatedType<T> createAnnotatedType(Class<T> type) {
throw new UnsupportedOperationException();
}
@Override
public <T> InjectionTarget<T> createInjectionTarget(AnnotatedType<T> type) {
throw new UnsupportedOperationException();
}
@Override
public <T> InjectionTargetFactory<T> getInjectionTargetFactory(AnnotatedType<T> annotatedType) {
throw new UnsupportedOperationException();
}
@Override
public <X> ProducerFactory<X> getProducerFactory(AnnotatedField<? super X> field, Bean<X> declaringBean) {
throw new UnsupportedOperationException();
}
@Override
public <X> ProducerFactory<X> getProducerFactory(AnnotatedMethod<? super X> method, Bean<X> declaringBean) {
throw new UnsupportedOperationException();
}
@Override
public <T> BeanAttributes<T> createBeanAttributes(AnnotatedType<T> type) {
throw new UnsupportedOperationException();
}
@Override
public BeanAttributes<?> createBeanAttributes(AnnotatedMember<?> type) {
throw new UnsupportedOperationException();
}
@Override
public <T> Bean<T> createBean(BeanAttributes<T> attributes, Class<T> beanClass, InjectionTargetFactory<T> injectionTargetFactory) {
throw new UnsupportedOperationException();
}
@Override
public <T, X> Bean<T> createBean(BeanAttributes<T> attributes, Class<X> beanClass, ProducerFactory<X> producerFactory) {
throw new UnsupportedOperationException();
}
@Override
public InjectionPoint createInjectionPoint(AnnotatedField<?> field) {
throw new UnsupportedOperationException();
}
@Override
public InjectionPoint createInjectionPoint(AnnotatedParameter<?> parameter) {
throw new UnsupportedOperationException();
}
@Override
public <T extends Extension> T getExtension(Class<T> extensionClass) {
throw new UnsupportedOperationException();
}
@Override
public <T> InterceptionFactory<T> createInterceptionFactory(CreationalContext<T> ctx, Class<T> clazz) {
throw new UnsupportedOperationException();
}
@Override
public Event<Object> getEvent() {
return new EventImpl<>(Object.class, new HashSet<>());
}
@Override
public Instance<Object> createInstance() {
return new InstanceImpl<>(Object.class, null, new CreationalContextImpl<>());
}
} | class BeanManagerImpl implements BeanManager {
static final LazyValue<BeanManagerImpl> INSTANCE = new LazyValue<>(BeanManagerImpl::new);
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
@Override
public Object getInjectableReference(InjectionPoint ij, CreationalContext<?> ctx) {
throw new UnsupportedOperationException();
}
@Override
public <T> CreationalContext<T> createCreationalContext(Contextual<T> contextual) {
return new CreationalContextImpl<>();
}
@Override
public Set<Bean<?>> getBeans(Type beanType, Annotation... qualifiers) {
return ArcContainerImpl.instance().getBeans(Objects.requireNonNull(beanType), qualifiers);
}
@Override
public Set<Bean<?>> getBeans(String name) {
return ArcContainerImpl.instance().getBeans(Objects.requireNonNull(name));
}
@Override
public Bean<?> getPassivationCapableBean(String id) {
throw new UnsupportedOperationException();
}
@Override
public <X> Bean<? extends X> resolve(Set<Bean<? extends X>> beans) {
return ArcContainerImpl.resolve(beans);
}
@Override
public void validate(InjectionPoint injectionPoint) {
throw new UnsupportedOperationException();
}
@Override
public void fireEvent(Object event, Annotation... qualifiers) {
getEvent().select(qualifiers).fire(event);
}
@Override
public <T> Set<ObserverMethod<? super T>> resolveObserverMethods(T event, Annotation... qualifiers) {
throw new UnsupportedOperationException();
}
@Override
public List<Decorator<?>> resolveDecorators(Set<Type> types, Annotation... qualifiers) {
throw new UnsupportedOperationException();
}
@Override
public List<Interceptor<?>> resolveInterceptors(InterceptionType type, Annotation... interceptorBindings) {
return ArcContainerImpl.instance().resolveInterceptors(Objects.requireNonNull(type), interceptorBindings);
}
@Override
public boolean isScope(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isNormalScope(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isPassivatingScope(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isQualifier(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isInterceptorBinding(Class<? extends Annotation> annotationType) {
return annotationType.isAnnotationPresent(InterceptorBinding.class);
}
@Override
public boolean isStereotype(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public Set<Annotation> getInterceptorBindingDefinition(Class<? extends Annotation> bindingType) {
throw new UnsupportedOperationException();
}
@Override
public Set<Annotation> getStereotypeDefinition(Class<? extends Annotation> stereotype) {
throw new UnsupportedOperationException();
}
@Override
public boolean areQualifiersEquivalent(Annotation qualifier1, Annotation qualifier2) {
throw new UnsupportedOperationException();
}
@Override
public boolean areInterceptorBindingsEquivalent(Annotation interceptorBinding1, Annotation interceptorBinding2) {
throw new UnsupportedOperationException();
}
@Override
public int getQualifierHashCode(Annotation qualifier) {
throw new UnsupportedOperationException();
}
@Override
public int getInterceptorBindingHashCode(Annotation interceptorBinding) {
throw new UnsupportedOperationException();
}
@Override
public Context getContext(Class<? extends Annotation> scopeType) {
return Arc.container().getContext(scopeType);
}
@Override
public ELResolver getELResolver() {
throw new UnsupportedOperationException();
}
@Override
public ExpressionFactory wrapExpressionFactory(ExpressionFactory expressionFactory) {
throw new UnsupportedOperationException();
}
@Override
public <T> AnnotatedType<T> createAnnotatedType(Class<T> type) {
throw new UnsupportedOperationException();
}
@Override
public <T> InjectionTarget<T> createInjectionTarget(AnnotatedType<T> type) {
throw new UnsupportedOperationException();
}
@Override
public <T> InjectionTargetFactory<T> getInjectionTargetFactory(AnnotatedType<T> annotatedType) {
throw new UnsupportedOperationException();
}
@Override
public <X> ProducerFactory<X> getProducerFactory(AnnotatedField<? super X> field, Bean<X> declaringBean) {
throw new UnsupportedOperationException();
}
@Override
public <X> ProducerFactory<X> getProducerFactory(AnnotatedMethod<? super X> method, Bean<X> declaringBean) {
throw new UnsupportedOperationException();
}
@Override
public <T> BeanAttributes<T> createBeanAttributes(AnnotatedType<T> type) {
throw new UnsupportedOperationException();
}
@Override
public BeanAttributes<?> createBeanAttributes(AnnotatedMember<?> type) {
throw new UnsupportedOperationException();
}
@Override
public <T> Bean<T> createBean(BeanAttributes<T> attributes, Class<T> beanClass, InjectionTargetFactory<T> injectionTargetFactory) {
throw new UnsupportedOperationException();
}
@Override
public <T, X> Bean<T> createBean(BeanAttributes<T> attributes, Class<X> beanClass, ProducerFactory<X> producerFactory) {
throw new UnsupportedOperationException();
}
@Override
public InjectionPoint createInjectionPoint(AnnotatedField<?> field) {
throw new UnsupportedOperationException();
}
@Override
public InjectionPoint createInjectionPoint(AnnotatedParameter<?> parameter) {
throw new UnsupportedOperationException();
}
@Override
public <T extends Extension> T getExtension(Class<T> extensionClass) {
throw new UnsupportedOperationException();
}
@Override
public <T> InterceptionFactory<T> createInterceptionFactory(CreationalContext<T> ctx, Class<T> clazz) {
throw new UnsupportedOperationException();
}
@Override
public Event<Object> getEvent() {
return new EventImpl<>(Object.class, new HashSet<>());
}
@Override
public Instance<Object> createInstance() {
return new InstanceImpl<>(Object.class, null, new CreationalContextImpl<>());
}
} |
I followed the same pattern used for other test cases in this specific test | public void testInferredArrayInitWithInGrpExpr() {
Object[] args = {};
Object returns = BRunUtil.invoke(compileResult, "testInferredArrayInitWithInGrpExpr", args);
Assert.assertTrue(returns instanceof BArray);
BArray arrayValue = (BArray) returns;
Assert.assertEquals(arrayValue.size(), 3);
Assert.assertEquals(arrayValue.getInt(0), 1);
Assert.assertEquals(arrayValue.getInt(1), 2);
Assert.assertEquals(arrayValue.getBString(2).getValue(), "a");
} | Assert.assertEquals(arrayValue.getBString(2).getValue(), "a"); | public void testInferredArrayInitWithInGrpExpr() {
BRunUtil.invoke(compileResult, "testInferredArrayInitWithInGrpExpr");
} | class ArrayInitializerExprTest {
private CompileResult compileResult;
@BeforeClass
public void setup() {
compileResult = BCompileUtil.compile("test-src/statements/arrays/array-initializer-expr.bal");
}
@Test(description = "Test arrays initializer expression")
public void testArrayInitExpr() {
Object[] args = {};
Object returns = BRunUtil.invoke(compileResult, "arrayInitTest", args);
Assert.assertSame(returns.getClass(), Long.class);
long actual = (long) returns;
long expected = 110;
Assert.assertEquals(actual, expected);
}
@Test(description = "Test arrays return value")
public void testArrayReturnValueTest() {
Object[] args = {};
Object returns = BRunUtil.invoke(compileResult, "arrayReturnTest", args);
Assert.assertTrue(returns instanceof BArray);
BArray arrayValue = (BArray) returns;
Assert.assertEquals(arrayValue.size(), 6);
Assert.assertEquals(arrayValue.getString(0), "Lion");
Assert.assertEquals(arrayValue.getString(1), "Cat");
Assert.assertEquals(arrayValue.getString(5), "Croc");
}
@Test(description = "Test array of finite type and nil")
public void finiteTypeArray() {
Object returns = BRunUtil.invoke(compileResult, "finiteTypeArray");
Assert.assertEquals(returns.toString(), "Terminating");
}
@Test
public void testMultiTypeMapInit() {
CompileResult result = BCompileUtil.compile("test-src/statements/arrays/multi-type-array-initializer.bal");
BAssertUtil.validateError(result, 0, "incompatible types: expected 'int', found 'string'", 3, 10);
}
@Test(description = "Test nested array inline initializing")
public void testNestedArrayInit() {
Object[] args = {};
Object returns = BRunUtil.invoke(compileResult, "testNestedArrayInit", args);
Assert.assertTrue(returns instanceof BArray);
BArray arrayValue = (BArray) returns;
Assert.assertEquals(arrayValue.size(), 2);
Object element = arrayValue.getRefValue(0);
Assert.assertTrue(element instanceof BArray);
BArray elementArray = (BArray) element;
Assert.assertEquals(elementArray.size(), 3);
Assert.assertEquals(elementArray.getInt(0), 1);
Assert.assertEquals(elementArray.getInt(1), 2);
Assert.assertEquals(elementArray.getInt(2), 3);
element = arrayValue.getRefValue(1);
Assert.assertTrue(element instanceof BArray);
elementArray = (BArray) element;
Assert.assertEquals(elementArray.size(), 4);
Assert.assertEquals(elementArray.getInt(0), 6);
Assert.assertEquals(elementArray.getInt(1), 7);
Assert.assertEquals(elementArray.getInt(2), 8);
Assert.assertEquals(elementArray.getInt(3), 9);
}
@Test(description = "Test array of maps inline initializing")
public void testArrayOfMapsInit() {
Object[] args = {};
Object returns = BRunUtil.invoke(compileResult, "testArrayOfMapsInit", args);
Assert.assertTrue(returns instanceof BArray);
BArray arrayValue = (BArray) returns;
Assert.assertEquals(arrayValue.size(), 3);
Object adrs1 = arrayValue.getRefValue(0);
Assert.assertTrue(adrs1 instanceof BMap<?, ?>);
Object address = ((BMap) adrs1).get(StringUtils.fromString("address"));
Assert.assertTrue(address instanceof BMap<?, ?>);
Assert.assertEquals(((BMap) address).get(StringUtils.fromString("city")).toString(), "Colombo");
Object adrs2 = arrayValue.getRefValue(1);
Assert.assertTrue(adrs2 instanceof BMap<?, ?>);
address = ((BMap) adrs2).get(StringUtils.fromString("address"));
Assert.assertTrue(address instanceof BMap<?, ?>);
Assert.assertEquals(((BMap) address).get(StringUtils.fromString("city")).toString(), "Kandy");
Object adrs3 = arrayValue.getRefValue(2);
Assert.assertTrue(adrs3 instanceof BMap<?, ?>);
address = ((BMap) adrs3).get(StringUtils.fromString("address"));
Assert.assertTrue(address instanceof BMap<?, ?>);
Assert.assertEquals(((BMap) address).get(StringUtils.fromString("city")).toString(), "Galle");
}
@Test(description = "Test float array initialization with integer values")
public void testFloatArrayInitWithIntExpr() {
Object[] args = {};
Object returns = BRunUtil.invoke(compileResult, "floatArrayInitWithInt", args);
Assert.assertTrue(returns instanceof BArray);
BArray arrayValue = (BArray) returns;
Assert.assertEquals(arrayValue.size(), 3);
Assert.assertEquals(arrayValue.getFloat(0), 2.0);
Assert.assertEquals(arrayValue.getFloat(1), 4.0);
Assert.assertEquals(arrayValue.getFloat(2), 5.0);
}
@Test
@AfterClass
public void tearDown() {
compileResult = null;
}
} | class ArrayInitializerExprTest {
private CompileResult compileResult;
@BeforeClass
public void setup() {
compileResult = BCompileUtil.compile("test-src/statements/arrays/array-initializer-expr.bal");
}
@Test(description = "Test arrays initializer expression")
public void testArrayInitExpr() {
Object[] args = {};
Object returns = BRunUtil.invoke(compileResult, "arrayInitTest", args);
Assert.assertSame(returns.getClass(), Long.class);
long actual = (long) returns;
long expected = 110;
Assert.assertEquals(actual, expected);
}
@Test(description = "Test arrays return value")
public void testArrayReturnValueTest() {
Object[] args = {};
Object returns = BRunUtil.invoke(compileResult, "arrayReturnTest", args);
Assert.assertTrue(returns instanceof BArray);
BArray arrayValue = (BArray) returns;
Assert.assertEquals(arrayValue.size(), 6);
Assert.assertEquals(arrayValue.getString(0), "Lion");
Assert.assertEquals(arrayValue.getString(1), "Cat");
Assert.assertEquals(arrayValue.getString(5), "Croc");
}
@Test(description = "Test array of finite type and nil")
public void finiteTypeArray() {
Object returns = BRunUtil.invoke(compileResult, "finiteTypeArray");
Assert.assertEquals(returns.toString(), "Terminating");
}
@Test
public void testMultiTypeMapInit() {
CompileResult result = BCompileUtil.compile("test-src/statements/arrays/multi-type-array-initializer.bal");
BAssertUtil.validateError(result, 0, "incompatible types: expected 'int', found 'string'", 3, 10);
}
@Test(description = "Test nested array inline initializing")
public void testNestedArrayInit() {
Object[] args = {};
Object returns = BRunUtil.invoke(compileResult, "testNestedArrayInit", args);
Assert.assertTrue(returns instanceof BArray);
BArray arrayValue = (BArray) returns;
Assert.assertEquals(arrayValue.size(), 2);
Object element = arrayValue.getRefValue(0);
Assert.assertTrue(element instanceof BArray);
BArray elementArray = (BArray) element;
Assert.assertEquals(elementArray.size(), 3);
Assert.assertEquals(elementArray.getInt(0), 1);
Assert.assertEquals(elementArray.getInt(1), 2);
Assert.assertEquals(elementArray.getInt(2), 3);
element = arrayValue.getRefValue(1);
Assert.assertTrue(element instanceof BArray);
elementArray = (BArray) element;
Assert.assertEquals(elementArray.size(), 4);
Assert.assertEquals(elementArray.getInt(0), 6);
Assert.assertEquals(elementArray.getInt(1), 7);
Assert.assertEquals(elementArray.getInt(2), 8);
Assert.assertEquals(elementArray.getInt(3), 9);
}
@Test(description = "Test array of maps inline initializing")
public void testArrayOfMapsInit() {
Object[] args = {};
Object returns = BRunUtil.invoke(compileResult, "testArrayOfMapsInit", args);
Assert.assertTrue(returns instanceof BArray);
BArray arrayValue = (BArray) returns;
Assert.assertEquals(arrayValue.size(), 3);
Object adrs1 = arrayValue.getRefValue(0);
Assert.assertTrue(adrs1 instanceof BMap<?, ?>);
Object address = ((BMap) adrs1).get(StringUtils.fromString("address"));
Assert.assertTrue(address instanceof BMap<?, ?>);
Assert.assertEquals(((BMap) address).get(StringUtils.fromString("city")).toString(), "Colombo");
Object adrs2 = arrayValue.getRefValue(1);
Assert.assertTrue(adrs2 instanceof BMap<?, ?>);
address = ((BMap) adrs2).get(StringUtils.fromString("address"));
Assert.assertTrue(address instanceof BMap<?, ?>);
Assert.assertEquals(((BMap) address).get(StringUtils.fromString("city")).toString(), "Kandy");
Object adrs3 = arrayValue.getRefValue(2);
Assert.assertTrue(adrs3 instanceof BMap<?, ?>);
address = ((BMap) adrs3).get(StringUtils.fromString("address"));
Assert.assertTrue(address instanceof BMap<?, ?>);
Assert.assertEquals(((BMap) address).get(StringUtils.fromString("city")).toString(), "Galle");
}
@Test(description = "Test float array initialization with integer values")
public void testFloatArrayInitWithIntExpr() {
Object[] args = {};
Object returns = BRunUtil.invoke(compileResult, "floatArrayInitWithInt", args);
Assert.assertTrue(returns instanceof BArray);
BArray arrayValue = (BArray) returns;
Assert.assertEquals(arrayValue.size(), 3);
Assert.assertEquals(arrayValue.getFloat(0), 2.0);
Assert.assertEquals(arrayValue.getFloat(1), 4.0);
Assert.assertEquals(arrayValue.getFloat(2), 5.0);
}
@Test
@AfterClass
public void tearDown() {
compileResult = null;
}
} |
"Result". But this message is in itself quite fishy. Could we make the error conditions here more crisp? | static boolean warmup(Linguistics linguistics) {
Query query = new Query("search/?yql=select%20*%20from%20sources%20where%20title%20contains%20'xyz';");
Result result = insertQuery(query, new ParserEnvironment().setLinguistics(linguistics));
if (result != null) {
log.warning("Something fishy. Reult = " + result.toString());
return false;
}
if ( ! "select * from sources where title contains \"xyz\";".equals(query.yqlRepresentation())) {
log.warning("Unexpected yql: " + query.yqlRepresentation());
return false;
}
return true;
} | log.warning("Something fishy. Reult = " + result.toString()); | static boolean warmup(Linguistics linguistics) {
Query query = new Query("search/?yql=select%20*%20from%20sources%20where%20title%20contains%20'xyz';");
Result result = insertQuery(query, new ParserEnvironment().setLinguistics(linguistics));
if (result != null) {
log.warning("Warmup code trigger an error. Error = " + result.toString());
return false;
}
if ( ! "select * from sources where title contains \"xyz\";".equals(query.yqlRepresentation())) {
log.warning("Warmup code generated unexpected yql: " + query.yqlRepresentation());
return false;
}
return true;
} | class MinimalQueryInserter extends Searcher {
public static final String EXTERNAL_YQL = "ExternalYql";
public static final CompoundName YQL = new CompoundName("yql");
private static final CompoundName MAX_HITS = new CompoundName("maxHits");
private static final CompoundName MAX_OFFSET = new CompoundName("maxOffset");
private static Logger log = Logger.getLogger(MinimalQueryInserter.class.getName());
@Inject
public MinimalQueryInserter(Linguistics linguistics) {
warmup(linguistics);
}
MinimalQueryInserter() {
this(new SimpleLinguistics());
}
static boolean warmup() {
return warmup(new SimpleLinguistics());
}
private static Result insertQuery(Query query, ParserEnvironment env) {
YqlParser parser = (YqlParser) ParserFactory.newInstance(Query.Type.YQL, env);
parser.setQueryParser(false);
parser.setUserQuery(query);
QueryTree newTree;
try {
Parsable parsable = Parsable.fromQueryModel(query.getModel()).setQuery(query.properties().getString(YQL));
newTree = parser.parse(parsable);
} catch (RuntimeException e) {
return new Result(query, ErrorMessage.createInvalidQueryParameter("Could not instantiate query from YQL", e));
}
if (parser.getOffset() != null) {
int maxHits = query.properties().getInteger(MAX_HITS);
int maxOffset = query.properties().getInteger(MAX_OFFSET);
if (parser.getOffset() > maxOffset) {
return new Result(query, ErrorMessage.createInvalidQueryParameter("Requested offset " + parser.getOffset()
+ ", but the max offset allowed is " + maxOffset + "."));
}
if (parser.getHits() > maxHits) {
return new Result(query, ErrorMessage.createInvalidQueryParameter("Requested " + parser.getHits()
+ " hits returned, but max hits allowed is " + maxHits + "."));
}
}
query.getModel().getQueryTree().setRoot(newTree.getRoot());
query.getPresentation().getSummaryFields().addAll(parser.getYqlSummaryFields());
for (VespaGroupingStep step : parser.getGroupingSteps()) {
GroupingRequest.newInstance(query)
.setRootOperation(step.getOperation())
.continuations().addAll(step.continuations());
}
if (parser.getYqlSources().size() == 0) {
query.getModel().getSources().clear();
} else {
query.getModel().getSources().addAll(parser.getYqlSources());
}
if (parser.getOffset() != null) {
query.setOffset(parser.getOffset());
query.setHits(parser.getHits());
}
if (parser.getTimeout() != null) {
query.setTimeout(parser.getTimeout().longValue());
}
if (parser.getSorting() != null) {
query.getRanking().setSorting(parser.getSorting());
}
query.trace("YQL+ query parsed", true, 2);
return null;
}
@Override
public Result search(Query query, Execution execution) {
if (query.properties().get(YQL) == null) return execution.search(query);
Result result = insertQuery(query, ParserEnvironment.fromExecutionContext(execution.context()));
return (result == null) ? execution.search(query) : result;
}
} | class MinimalQueryInserter extends Searcher {
public static final String EXTERNAL_YQL = "ExternalYql";
public static final CompoundName YQL = new CompoundName("yql");
private static final CompoundName MAX_HITS = new CompoundName("maxHits");
private static final CompoundName MAX_OFFSET = new CompoundName("maxOffset");
private static Logger log = Logger.getLogger(MinimalQueryInserter.class.getName());
@Inject
public MinimalQueryInserter(Linguistics linguistics) {
warmup(linguistics);
}
MinimalQueryInserter() {
this(new SimpleLinguistics());
}
static boolean warmup() {
return warmup(new SimpleLinguistics());
}
private
private static Result insertQuery(Query query, ParserEnvironment env) {
YqlParser parser = (YqlParser) ParserFactory.newInstance(Query.Type.YQL, env);
parser.setQueryParser(false);
parser.setUserQuery(query);
QueryTree newTree;
try {
Parsable parsable = Parsable.fromQueryModel(query.getModel()).setQuery(query.properties().getString(YQL));
newTree = parser.parse(parsable);
} catch (RuntimeException e) {
return new Result(query, ErrorMessage.createInvalidQueryParameter("Could not instantiate query from YQL", e));
}
if (parser.getOffset() != null) {
int maxHits = query.properties().getInteger(MAX_HITS);
int maxOffset = query.properties().getInteger(MAX_OFFSET);
if (parser.getOffset() > maxOffset) {
return new Result(query, ErrorMessage.createInvalidQueryParameter("Requested offset " + parser.getOffset()
+ ", but the max offset allowed is " + maxOffset + "."));
}
if (parser.getHits() > maxHits) {
return new Result(query, ErrorMessage.createInvalidQueryParameter("Requested " + parser.getHits()
+ " hits returned, but max hits allowed is " + maxHits + "."));
}
}
query.getModel().getQueryTree().setRoot(newTree.getRoot());
query.getPresentation().getSummaryFields().addAll(parser.getYqlSummaryFields());
for (VespaGroupingStep step : parser.getGroupingSteps()) {
GroupingRequest.newInstance(query)
.setRootOperation(step.getOperation())
.continuations().addAll(step.continuations());
}
if (parser.getYqlSources().size() == 0) {
query.getModel().getSources().clear();
} else {
query.getModel().getSources().addAll(parser.getYqlSources());
}
if (parser.getOffset() != null) {
query.setOffset(parser.getOffset());
query.setHits(parser.getHits());
}
if (parser.getTimeout() != null) {
query.setTimeout(parser.getTimeout().longValue());
}
if (parser.getSorting() != null) {
query.getRanking().setSorting(parser.getSorting());
}
query.trace("YQL+ query parsed", true, 2);
return null;
}
@Override
public Result search(Query query, Execution execution) {
if (query.properties().get(YQL) == null) return execution.search(query);
Result result = insertQuery(query, ParserEnvironment.fromExecutionContext(execution.context()));
return (result == null) ? execution.search(query) : result;
}
} |
Yes, `hasEntity` is returning `true` because the is an entityStream | public void handle(ResteasyReactiveRequestContext requestContext) throws Exception {
Object result = requestContext.getResult();
if (result instanceof Response) {
boolean mediaTypeAlreadyExists = false;
ResponseBuilderImpl responseBuilder;
Response existing = (Response) result;
if (existing.getEntity() instanceof GenericEntity) {
GenericEntity<?> genericEntity = (GenericEntity<?>) existing.getEntity();
requestContext.setGenericReturnType(genericEntity.getType());
responseBuilder = fromResponse(existing);
responseBuilder.entity(genericEntity.getEntity());
} else {
if (existing.hasEntity() && (existing.getEntity() != null))
requestContext.setGenericReturnType(existing.getEntity().getClass());
responseBuilder = fromResponse((Response) result);
if ((result instanceof ResponseImpl)) {
ResponseImpl responseImpl = (ResponseImpl) result;
if (responseImpl.getEntityAnnotations() != null) {
requestContext.setAdditionalAnnotations(responseImpl.getEntityAnnotations());
}
if (responseBuilder.getEntity() == null) {
if (responseImpl.getEntityStream() instanceof ByteArrayInputStream) {
ByteArrayInputStream byteArrayInputStream = (ByteArrayInputStream) responseImpl.getEntityStream();
responseBuilder.entity(byteArrayInputStream.readAllBytes());
}
}
}
}
if (existing.getMediaType() != null) {
requestContext.setResponseContentType(existing.getMediaType());
mediaTypeAlreadyExists = true;
}
EncodedMediaType produces = requestContext.getResponseContentType();
if (!mediaTypeAlreadyExists && produces != null) {
responseBuilder.header(HttpHeaders.CONTENT_TYPE, produces.toString());
}
if ((responseBuilder instanceof ResponseBuilderImpl)) {
requestContext
.setResponse(
new LazyResponse.Existing(((ResponseBuilderImpl) responseBuilder).build(false)));
} else {
requestContext.setResponse(new LazyResponse.Existing(responseBuilder.build()));
}
} else {
requestContext.setResponse(new LazyResponse() {
Response response;
@Override
public Response get() {
if (response == null) {
Response.ResponseBuilder responseBuilder;
if (result instanceof GenericEntity) {
GenericEntity<?> genericEntity = (GenericEntity<?>) result;
requestContext.setGenericReturnType(genericEntity.getType());
responseBuilder = ResponseImpl.ok(genericEntity.getEntity());
} else if (result == null) {
responseBuilder = ResponseImpl.noContent();
} else {
responseBuilder = ResponseImpl.ok(result);
}
EncodedMediaType produces = requestContext.getResponseContentType();
if (produces != null) {
responseBuilder.header(HttpHeaders.CONTENT_TYPE, produces.toString());
}
if ((responseBuilder instanceof ResponseBuilderImpl)) {
response = ((ResponseBuilderImpl) responseBuilder).build(false);
} else {
response = responseBuilder.build();
}
}
return response;
}
@Override
public boolean isCreated() {
return response != null;
}
});
}
} | if (existing.hasEntity() && (existing.getEntity() != null)) | public void handle(ResteasyReactiveRequestContext requestContext) throws Exception {
Object result = requestContext.getResult();
if (result instanceof Response) {
boolean mediaTypeAlreadyExists = false;
ResponseBuilderImpl responseBuilder;
Response existing = (Response) result;
if (existing.getEntity() instanceof GenericEntity) {
GenericEntity<?> genericEntity = (GenericEntity<?>) existing.getEntity();
requestContext.setGenericReturnType(genericEntity.getType());
responseBuilder = fromResponse(existing);
responseBuilder.entity(genericEntity.getEntity());
} else {
if (existing.hasEntity() && (existing.getEntity() != null))
requestContext.setGenericReturnType(existing.getEntity().getClass());
responseBuilder = fromResponse((Response) result);
if ((result instanceof ResponseImpl)) {
ResponseImpl responseImpl = (ResponseImpl) result;
if (responseImpl.getEntityAnnotations() != null) {
requestContext.setAdditionalAnnotations(responseImpl.getEntityAnnotations());
}
if (responseBuilder.getEntity() == null) {
if (responseImpl.getEntityStream() instanceof ByteArrayInputStream) {
ByteArrayInputStream byteArrayInputStream = (ByteArrayInputStream) responseImpl.getEntityStream();
responseBuilder.entity(byteArrayInputStream.readAllBytes());
}
}
}
}
if (existing.getMediaType() != null) {
requestContext.setResponseContentType(existing.getMediaType());
mediaTypeAlreadyExists = true;
}
EncodedMediaType produces = requestContext.getResponseContentType();
if (!mediaTypeAlreadyExists && produces != null) {
responseBuilder.header(HttpHeaders.CONTENT_TYPE, produces.toString());
}
if ((responseBuilder instanceof ResponseBuilderImpl)) {
requestContext
.setResponse(
new LazyResponse.Existing(((ResponseBuilderImpl) responseBuilder).build(false)));
} else {
requestContext.setResponse(new LazyResponse.Existing(responseBuilder.build()));
}
} else {
requestContext.setResponse(new LazyResponse() {
Response response;
@Override
public Response get() {
if (response == null) {
Response.ResponseBuilder responseBuilder;
if (result instanceof GenericEntity) {
GenericEntity<?> genericEntity = (GenericEntity<?>) result;
requestContext.setGenericReturnType(genericEntity.getType());
responseBuilder = ResponseImpl.ok(genericEntity.getEntity());
} else if (result == null) {
responseBuilder = ResponseImpl.noContent();
} else {
responseBuilder = ResponseImpl.ok(result);
}
EncodedMediaType produces = requestContext.getResponseContentType();
if (produces != null) {
responseBuilder.header(HttpHeaders.CONTENT_TYPE, produces.toString());
}
if ((responseBuilder instanceof ResponseBuilderImpl)) {
response = ((ResponseBuilderImpl) responseBuilder).build(false);
} else {
response = responseBuilder.build();
}
}
return response;
}
@Override
public boolean isCreated() {
return response != null;
}
});
}
} | class ResponseHandler implements ServerRestHandler {
@Override
private ResponseBuilderImpl fromResponse(Response response) {
Response.ResponseBuilder b = new ResponseBuilderImpl().status(response.getStatus());
if (response.hasEntity()) {
b.entity(response.getEntity());
}
for (String headerName : response.getHeaders().keySet()) {
List<Object> headerValues = response.getHeaders().get(headerName);
for (Object headerValue : headerValues) {
b.header(headerName, headerValue);
}
}
return (ResponseBuilderImpl) b;
}
} | class ResponseHandler implements ServerRestHandler {
@Override
private ResponseBuilderImpl fromResponse(Response response) {
Response.ResponseBuilder b = new ResponseBuilderImpl().status(response.getStatus());
if (response.hasEntity()) {
b.entity(response.getEntity());
}
for (String headerName : response.getHeaders().keySet()) {
List<Object> headerValues = response.getHeaders().get(headerName);
for (Object headerValue : headerValues) {
b.header(headerName, headerValue);
}
}
return (ResponseBuilderImpl) b;
}
} |
```suggestion return (int) nodes.stream().filter(node -> node.isWorking()).count(); ``` | public int workingNodes() {
return (int) nodes.stream().filter(node -> node.isWorking() == Boolean.TRUE).count();
} | return (int) nodes.stream().filter(node -> node.isWorking() == Boolean.TRUE).count(); | public int workingNodes() {
return (int) nodes.stream().filter(node -> node.isWorking() == Boolean.TRUE).count();
} | class Group {
private final int id;
private final ImmutableList<Node> nodes;
private final AtomicBoolean hasSufficientCoverage = new AtomicBoolean(true);
private final AtomicBoolean hasFullCoverage = new AtomicBoolean(true);
private final AtomicLong activeDocuments = new AtomicLong(0);
private final AtomicBoolean isBlockingWrites = new AtomicBoolean(false);
private final AtomicBoolean isContentWellBalanced = new AtomicBoolean(true);
private final static double MAX_UNBALANCE = 0.10;
public Group(int id, List<Node> nodes) {
this.id = id;
this.nodes = ImmutableList.copyOf(nodes);
int idx = 0;
for(var node: nodes) {
node.setPathIndex(idx);
idx++;
}
}
/** Returns the unique identity of this group */
public int id() { return id; }
/** Returns the nodes in this group as an immutable list */
public ImmutableList<Node> nodes() { return nodes; }
/**
* Returns whether this group has sufficient active documents
* (compared to other groups) that is should receive traffic
*/
public boolean hasSufficientCoverage() {
return hasSufficientCoverage.get();
}
void setHasSufficientCoverage(boolean sufficientCoverage) {
hasSufficientCoverage.lazySet(sufficientCoverage);
}
void aggregateNodeValues() {
long activeDocs = nodes.stream().filter(node -> node.isWorking() == Boolean.TRUE).mapToLong(Node::getActiveDocuments).sum();
activeDocuments.set(activeDocs);
isBlockingWrites.set(nodes.stream().anyMatch(Node::isBlockingWrites));
int numWorkingNodes = workingNodes();
if (numWorkingNodes > 0) {
long average = activeDocs / numWorkingNodes;
long deviation = nodes.stream().filter(node -> node.isWorking() == Boolean.TRUE).mapToLong(node -> Math.abs(node.getActiveDocuments() - average)).sum();
isContentWellBalanced.set(deviation <= (activeDocs * MAX_UNBALANCE));
} else {
isContentWellBalanced.set(true);
}
}
/** Returns the active documents on this groip. If unknown, 0 is returned. */
long getActiveDocuments() { return activeDocuments.get(); }
/** Returns whether any node in this group is currently blocking write operations */
public boolean isBlockingWrites() { return isBlockingWrites.get(); }
public boolean isContentWellBalanced() { return isContentWellBalanced.get(); }
public boolean isFullCoverageStatusChanged(boolean hasFullCoverageNow) {
boolean previousState = hasFullCoverage.getAndSet(hasFullCoverageNow);
return previousState != hasFullCoverageNow;
}
@Override
public String toString() { return "search group " + id; }
@Override
public int hashCode() { return id; }
@Override
public boolean equals(Object other) {
if (other == this) return true;
if (!(other instanceof Group)) return false;
return ((Group) other).id == this.id;
}
} | class Group {
private final int id;
private final ImmutableList<Node> nodes;
private final AtomicBoolean hasSufficientCoverage = new AtomicBoolean(true);
private final AtomicBoolean hasFullCoverage = new AtomicBoolean(true);
private final AtomicLong activeDocuments = new AtomicLong(0);
private final AtomicBoolean isBlockingWrites = new AtomicBoolean(false);
private final AtomicBoolean isContentWellBalanced = new AtomicBoolean(true);
private final static double MAX_UNBALANCE = 0.10;
public Group(int id, List<Node> nodes) {
this.id = id;
this.nodes = ImmutableList.copyOf(nodes);
int idx = 0;
for(var node: nodes) {
node.setPathIndex(idx);
idx++;
}
}
/** Returns the unique identity of this group */
public int id() { return id; }
/** Returns the nodes in this group as an immutable list */
public ImmutableList<Node> nodes() { return nodes; }
/**
* Returns whether this group has sufficient active documents
* (compared to other groups) that is should receive traffic
*/
public boolean hasSufficientCoverage() {
return hasSufficientCoverage.get();
}
void setHasSufficientCoverage(boolean sufficientCoverage) {
hasSufficientCoverage.lazySet(sufficientCoverage);
}
void aggregateNodeValues() {
long activeDocs = nodes.stream().filter(node -> node.isWorking() == Boolean.TRUE).mapToLong(Node::getActiveDocuments).sum();
activeDocuments.set(activeDocs);
isBlockingWrites.set(nodes.stream().anyMatch(Node::isBlockingWrites));
int numWorkingNodes = workingNodes();
if (numWorkingNodes > 0) {
long average = activeDocs / numWorkingNodes;
long deviation = nodes.stream().filter(node -> node.isWorking() == Boolean.TRUE).mapToLong(node -> Math.abs(node.getActiveDocuments() - average)).sum();
isContentWellBalanced.set(deviation <= (activeDocs * MAX_UNBALANCE));
} else {
isContentWellBalanced.set(true);
}
}
/** Returns the active documents on this group. If unknown, 0 is returned. */
long getActiveDocuments() { return activeDocuments.get(); }
/** Returns whether any node in this group is currently blocking write operations */
public boolean isBlockingWrites() { return isBlockingWrites.get(); }
public boolean isContentWellBalanced() { return isContentWellBalanced.get(); }
public boolean isFullCoverageStatusChanged(boolean hasFullCoverageNow) {
boolean previousState = hasFullCoverage.getAndSet(hasFullCoverageNow);
return previousState != hasFullCoverageNow;
}
@Override
public String toString() { return "search group " + id; }
@Override
public int hashCode() { return id; }
@Override
public boolean equals(Object other) {
if (other == this) return true;
if (!(other instanceof Group)) return false;
return ((Group) other).id == this.id;
}
} |
Yes. Thing is that we would need to know if a refresh happened before to make the second point work. I think is easier to just let them refresh and get a new set of tokens. | public void accept(UniEmitter<? super SecurityIdentity> emitter) {
OAuth2TokenImpl token = new OAuth2TokenImpl(configContext.auth, new JsonObject());
token.principal().put("refresh_token", entry.getToken());
token.refresh(new Handler<AsyncResult<Void>>() {
@Override
public void handle(AsyncResult<Void> result) {
if (result.succeeded()) {
authenticate(identityProviderManager,
new IdTokenCredential(token.opaqueIdToken(), context))
.subscribe().with(new Consumer<SecurityIdentity>() {
@Override
public void accept(SecurityIdentity identity) {
processSuccessfulAuthentication(context, configContext, token,
identity);
entry.setToken(token.opaqueRefreshToken());
emitter.complete(
augmentIdentity(identity, token.opaqueAccessToken(),
token.opaqueRefreshToken(),
context));
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) {
emitter.fail(throwable);
}
});
} else {
emitter.fail(new AuthenticationFailedException(result.cause()));
}
}
});
} | context)); | public void accept(UniEmitter<? super SecurityIdentity> emitter) {
OAuth2TokenImpl token = new OAuth2TokenImpl(configContext.auth, new JsonObject());
token.principal().put("refresh_token", refreshToken);
token.refresh(new Handler<AsyncResult<Void>>() {
@Override
public void handle(AsyncResult<Void> result) {
if (result.succeeded()) {
authenticate(identityProviderManager,
new IdTokenCredential(token.opaqueIdToken(), context))
.subscribe().with(new Consumer<SecurityIdentity>() {
@Override
public void accept(SecurityIdentity identity) {
processSuccessfulAuthentication(context, configContext, token,
identity);
emitter.complete(
augmentIdentity(identity, token.opaqueAccessToken(),
token.opaqueRefreshToken(),
context));
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) {
emitter.fail(throwable);
}
});
} else {
emitter.fail(new AuthenticationFailedException(result.cause()));
}
}
});
} | class CodeAuthenticationMechanism extends AbstractOidcAuthenticationMechanism {
private static final Logger LOG = Logger.getLogger(CodeAuthenticationMechanism.class);
private static final String STATE_COOKIE_NAME = "q_auth";
private static final String SESSION_COOKIE_NAME = "q_session";
private static final String COOKIE_DELIM = "___";
private static QuarkusSecurityIdentity augmentIdentity(SecurityIdentity securityIdentity,
String accessToken,
String refreshToken,
RoutingContext context) {
IdTokenCredential idTokenCredential = securityIdentity.getCredential(IdTokenCredential.class);
RefreshToken refreshTokenCredential = new RefreshToken(refreshToken);
return QuarkusSecurityIdentity.builder()
.setPrincipal(securityIdentity.getPrincipal())
.addCredential(idTokenCredential)
.addCredential(new AccessTokenCredential(accessToken, refreshTokenCredential, context))
.addCredential(refreshTokenCredential)
.addRoles(securityIdentity.getRoles())
.addAttributes(securityIdentity.getAttributes())
.addPermissionChecker(new Function<Permission, Uni<Boolean>>() {
@Override
public Uni<Boolean> apply(Permission permission) {
return securityIdentity.checkPermission(permission);
}
}).build();
}
/**
* A bounded map that limits the number of refresh tokens we can process concurrently
*/
private final Map<String, TokenEntry> refreshingTokens = Collections
.synchronizedMap(new LinkedHashMap<String, TokenEntry>(16, 0.75f, true) {
@Override
protected boolean removeEldestEntry(Map.Entry eldest) {
return size() > 30;
}
});
public Uni<SecurityIdentity> authenticate(RoutingContext context,
IdentityProviderManager identityProviderManager,
DefaultTenantConfigResolver resolver) {
Cookie sessionCookie = context.request().getCookie(SESSION_COOKIE_NAME);
TenantConfigContext configContext = resolver.resolve(context, true);
if (sessionCookie != null) {
String[] tokens = sessionCookie.getValue().split(COOKIE_DELIM);
String idToken = tokens[0];
String accessToken = tokens[1];
String refreshToken = tokens[2];
return authenticate(identityProviderManager, new IdTokenCredential(tokens[0], context))
.map(new Function<SecurityIdentity, SecurityIdentity>() {
@Override
public SecurityIdentity apply(SecurityIdentity identity) {
if (isLogout(context, configContext)) {
redirectToLogoutEndpoint(context, configContext, idToken);
}
return augmentIdentity(identity, accessToken, refreshToken, context);
}
}).on().failure().recoverWithItem(new Function<Throwable, SecurityIdentity>() {
@Override
public SecurityIdentity apply(Throwable throwable) {
Throwable cause = throwable.getCause();
if (cause != null && !cause.getMessage().equalsIgnoreCase("expired token")) {
throw new AuthenticationFailedException(throwable);
}
SecurityIdentity identity = null;
if (isLogout(context, configContext) ||
(identity = trySilentRefresh(configContext, idToken, refreshToken, context,
identityProviderManager)) == null) {
redirectToLogoutEndpoint(context, configContext, idToken);
}
return identity;
}
});
}
return performCodeFlow(identityProviderManager, context, resolver);
}
public Uni<ChallengeData> getChallenge(RoutingContext context, DefaultTenantConfigResolver resolver) {
TenantConfigContext configContext = resolver.resolve(context, false);
removeCookie(context, configContext, SESSION_COOKIE_NAME);
ChallengeData challenge;
JsonObject params = new JsonObject();
List<Object> scopes = new ArrayList<>();
scopes.add("openid");
configContext.oidcConfig.getAuthentication().scopes.ifPresent(scopes::addAll);
params.put("scopes", new JsonArray(scopes));
String redirectPath = getRedirectPath(configContext, context);
String redirectUriParam = buildUri(context, redirectPath);
LOG.debugf("Authentication request redirect_uri parameter: %s", redirectUriParam);
params.put("redirect_uri", redirectUriParam);
params.put("state", generateState(context, configContext, redirectPath));
if (configContext.oidcConfig.authentication.getExtraParams() != null) {
for (Map.Entry<String, String> entry : configContext.oidcConfig.authentication.getExtraParams().entrySet()) {
params.put(entry.getKey(), entry.getValue());
}
}
challenge = new ChallengeData(HttpResponseStatus.FOUND.code(), HttpHeaders.LOCATION,
configContext.auth.authorizeURL(params));
return Uni.createFrom().item(challenge);
}
private Uni<SecurityIdentity> performCodeFlow(IdentityProviderManager identityProviderManager,
RoutingContext context, DefaultTenantConfigResolver resolver) {
TenantConfigContext configContext = resolver.resolve(context, true);
JsonObject params = new JsonObject();
String code = context.request().getParam("code");
if (code == null) {
return Uni.createFrom().optional(Optional.empty());
}
URI absoluteUri = URI.create(context.request().absoluteURI());
Cookie stateCookie = context.getCookie(STATE_COOKIE_NAME);
if (stateCookie != null) {
List<String> values = context.queryParam("state");
if (values.size() != 1) {
LOG.debug("State parameter can not be empty or multi-valued");
return Uni.createFrom().failure(new AuthenticationCompletionException());
} else if (!stateCookie.getValue().startsWith(values.get(0))) {
LOG.debug("State cookie does not match the state parameter");
return Uni.createFrom().failure(new AuthenticationCompletionException());
} else if (context.queryParam("pathChecked").isEmpty()) {
String[] pair = stateCookie.getValue().split(COOKIE_DELIM);
if (pair.length == 2) {
String extraPath = pair[1];
String extraQuery = "?pathChecked=true";
if (absoluteUri.getRawQuery() != null) {
extraQuery += ("&" + absoluteUri.getRawQuery());
}
String localRedirectUri = buildUri(context, extraPath + extraQuery);
LOG.debugf("Local redirect URI: %s", localRedirectUri);
return Uni.createFrom().failure(new AuthenticationRedirectException(localRedirectUri));
}
removeCookie(context, configContext, STATE_COOKIE_NAME);
} else {
removeCookie(context, configContext, STATE_COOKIE_NAME);
}
} else {
LOG.debug("The state cookie is missing after a redirect from IDP");
return Uni.createFrom().failure(new AuthenticationCompletionException());
}
params.put("code", code);
String redirectPath = getRedirectPath(configContext, context);
String redirectUriParam = buildUri(context, redirectPath);
LOG.debugf("Token request redirect_uri parameter: %s", redirectUriParam);
params.put("redirect_uri", redirectUriParam);
Credentials creds = configContext.oidcConfig.getCredentials();
if (creds.clientSecret.value.isPresent() && creds.clientSecret.method.isPresent()
&& Secret.Method.POST == creds.clientSecret.method.get()) {
params.put("client_secret", creds.clientSecret.value.get());
}
return Uni.createFrom().emitter(new Consumer<UniEmitter<? super SecurityIdentity>>() {
@Override
public void accept(UniEmitter<? super SecurityIdentity> uniEmitter) {
configContext.auth.authenticate(params, userAsyncResult -> {
if (userAsyncResult.failed()) {
if (userAsyncResult.cause() != null) {
LOG.debugf("Exception during the code to token exchange: %s", userAsyncResult.cause().getMessage());
}
uniEmitter.fail(new AuthenticationCompletionException(userAsyncResult.cause()));
} else {
AccessToken result = AccessToken.class.cast(userAsyncResult.result());
authenticate(identityProviderManager, new IdTokenCredential(result.opaqueIdToken(), context))
.subscribe().with(new Consumer<SecurityIdentity>() {
@Override
public void accept(SecurityIdentity identity) {
if (!result.idToken().containsKey("exp") || !result.idToken().containsKey("iat")) {
LOG.debug("ID Token is required to contain 'exp' and 'iat' claims");
uniEmitter.fail(new AuthenticationCompletionException());
}
processSuccessfulAuthentication(context, configContext, result, identity);
uniEmitter.complete(augmentIdentity(identity, result.opaqueAccessToken(),
result.opaqueRefreshToken(), context));
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) {
uniEmitter.fail(throwable);
}
});
}
});
}
});
}
private void processSuccessfulAuthentication(RoutingContext context, TenantConfigContext configContext,
AccessToken result, SecurityIdentity securityIdentity) {
removeCookie(context, configContext, SESSION_COOKIE_NAME);
CookieImpl cookie = new CookieImpl(SESSION_COOKIE_NAME, new StringBuilder(result.opaqueIdToken())
.append(COOKIE_DELIM)
.append(result.opaqueAccessToken())
.append(COOKIE_DELIM)
.append(result.opaqueRefreshToken()).toString());
long maxAge = result.idToken().getLong("exp") - result.idToken().getLong("iat");
if (configContext.oidcConfig.token.expirationGrace.isPresent()) {
maxAge += configContext.oidcConfig.token.expirationGrace.get();
}
LOG.debugf("Session cookie 'max-age' parameter is set to %d", maxAge);
cookie.setMaxAge(maxAge);
cookie.setSecure(context.request().isSSL());
cookie.setHttpOnly(true);
if (configContext.oidcConfig.authentication.cookiePath.isPresent()) {
cookie.setPath(configContext.oidcConfig.authentication.cookiePath.get());
}
context.response().addCookie(cookie);
}
private String getRedirectPath(TenantConfigContext configContext, RoutingContext context) {
Authentication auth = configContext.oidcConfig.getAuthentication();
return auth.getRedirectPath().isPresent() ? auth.getRedirectPath().get() : context.request().path();
}
private String generateState(RoutingContext context, TenantConfigContext configContext,
String redirectPath) {
String uuid = UUID.randomUUID().toString();
String cookieValue = uuid;
Authentication auth = configContext.oidcConfig.getAuthentication();
if (auth.isRestorePathAfterRedirect() && !redirectPath.equals(context.request().path())) {
cookieValue += (COOKIE_DELIM + context.request().path());
}
CookieImpl cookie = new CookieImpl(STATE_COOKIE_NAME, cookieValue);
cookie.setHttpOnly(true);
cookie.setSecure(context.request().isSSL());
cookie.setMaxAge(60 * 30);
if (auth.cookiePath.isPresent()) {
cookie.setPath(auth.getCookiePath().get());
}
context.response().addCookie(cookie);
return uuid;
}
private String buildUri(RoutingContext context, String path) {
return new StringBuilder(context.request().scheme()).append(":
.append(URI.create(context.request().absoluteURI()).getAuthority())
.append(path)
.toString();
}
private void removeCookie(RoutingContext context, TenantConfigContext configContext, String cookieName) {
ServerCookie cookie = (ServerCookie) context.cookieMap().get(cookieName);
if (cookie != null) {
cookie.setValue("");
cookie.setMaxAge(0);
Authentication auth = configContext.oidcConfig.getAuthentication();
if (auth.cookiePath.isPresent()) {
cookie.setPath(auth.cookiePath.get());
}
}
}
private boolean isLogout(RoutingContext context, TenantConfigContext configContext) {
Optional<String> logoutPath = configContext.oidcConfig.logout.path;
if (logoutPath.isPresent()) {
return context.request().absoluteURI().equals(
buildUri(context, logoutPath.get()));
}
return false;
}
private SecurityIdentity trySilentRefresh(TenantConfigContext configContext, String idToken, String refreshToken,
RoutingContext context, IdentityProviderManager identityProviderManager) {
OidcTenantConfig config = configContext.oidcConfig;
if (config.token.refreshTimeout.isPresent()) {
OAuth2AuthProviderImpl auth = ((OAuth2AuthProviderImpl) configContext.auth);
JWT jwt = auth.getJWT();
JsonObject tokenJson;
try {
tokenJson = jwt.decode(idToken);
} catch (Exception cause) {
throw new AuthenticationCompletionException(cause);
}
Long iat = tokenJson.getLong("iat");
long now = System.currentTimeMillis() / 1000;
Integer timeout = config.token.refreshTimeout.get();
if (now - iat >= timeout) {
String sub = tokenJson.getString("sub");
TokenEntry entry = refreshingTokens.computeIfAbsent(sub,
new Function<String, TokenEntry>() {
@Override
public TokenEntry apply(String token) {
return new TokenEntry(refreshToken);
}
});
synchronized (entry) {
Uni<SecurityIdentity> cf = Uni.createFrom().emitter(new Consumer<UniEmitter<? super SecurityIdentity>>() {
@Override
});
return cf.await().indefinitely();
}
}
}
return null;
}
private String buildLogoutRedirectUri(TenantConfigContext configContext, String idToken, RoutingContext context) {
String logoutPath = OAuth2AuthProviderImpl.class.cast(configContext.auth).getConfig().getLogoutPath();
StringBuilder logoutUri = new StringBuilder(logoutPath).append("?").append("id_token_hint=").append(idToken);
if (configContext.oidcConfig.logout.postLogoutPath.isPresent()) {
logoutUri.append("&post_logout_redirect_uri=").append(
buildUri(context, configContext.oidcConfig.logout.postLogoutPath.get()));
}
return logoutUri.toString();
}
private void redirectToLogoutEndpoint(RoutingContext context, TenantConfigContext configContext,
String idToken) {
removeCookie(context, configContext, SESSION_COOKIE_NAME);
throw new AuthenticationRedirectException(buildLogoutRedirectUri(configContext, idToken, context));
}
} | class CodeAuthenticationMechanism extends AbstractOidcAuthenticationMechanism {
private static final Logger LOG = Logger.getLogger(CodeAuthenticationMechanism.class);
private static final String STATE_COOKIE_NAME = "q_auth";
private static final String SESSION_COOKIE_NAME = "q_session";
private static final String COOKIE_DELIM = "___";
private static QuarkusSecurityIdentity augmentIdentity(SecurityIdentity securityIdentity,
String accessToken,
String refreshToken,
RoutingContext context) {
IdTokenCredential idTokenCredential = securityIdentity.getCredential(IdTokenCredential.class);
RefreshToken refreshTokenCredential = new RefreshToken(refreshToken);
return QuarkusSecurityIdentity.builder()
.setPrincipal(securityIdentity.getPrincipal())
.addCredential(idTokenCredential)
.addCredential(new AccessTokenCredential(accessToken, refreshTokenCredential, context))
.addCredential(refreshTokenCredential)
.addRoles(securityIdentity.getRoles())
.addAttributes(securityIdentity.getAttributes())
.addPermissionChecker(new Function<Permission, Uni<Boolean>>() {
@Override
public Uni<Boolean> apply(Permission permission) {
return securityIdentity.checkPermission(permission);
}
}).build();
}
public Uni<SecurityIdentity> authenticate(RoutingContext context,
IdentityProviderManager identityProviderManager,
DefaultTenantConfigResolver resolver) {
Cookie sessionCookie = context.request().getCookie(SESSION_COOKIE_NAME);
TenantConfigContext configContext = resolver.resolve(context, true);
if (sessionCookie != null) {
String[] tokens = sessionCookie.getValue().split(COOKIE_DELIM);
String idToken = tokens[0];
String accessToken = tokens[1];
String refreshToken = tokens[2];
return authenticate(identityProviderManager, new IdTokenCredential(tokens[0], context))
.map(new Function<SecurityIdentity, SecurityIdentity>() {
@Override
public SecurityIdentity apply(SecurityIdentity identity) {
if (isLogout(context, configContext)) {
throw redirectToLogoutEndpoint(context, configContext, idToken);
}
return augmentIdentity(identity, accessToken, refreshToken, context);
}
}).on().failure().recoverWithItem(new Function<Throwable, SecurityIdentity>() {
@Override
public SecurityIdentity apply(Throwable throwable) {
if (throwable instanceof AuthenticationRedirectException) {
throw AuthenticationRedirectException.class.cast(throwable);
}
Throwable cause = throwable.getCause();
if (cause != null && !cause.getMessage().equalsIgnoreCase("expired token")) {
throw new AuthenticationCompletionException(throwable);
}
SecurityIdentity identity = null;
if (configContext.oidcConfig.token.refreshExpired) {
identity = trySilentRefresh(configContext, idToken, refreshToken, context,
identityProviderManager);
}
if (identity == null) {
throw new AuthenticationFailedException(throwable);
}
return identity;
}
});
}
return performCodeFlow(identityProviderManager, context, resolver);
}
public Uni<ChallengeData> getChallenge(RoutingContext context, DefaultTenantConfigResolver resolver) {
TenantConfigContext configContext = resolver.resolve(context, false);
removeCookie(context, configContext, SESSION_COOKIE_NAME);
ChallengeData challenge;
JsonObject params = new JsonObject();
List<Object> scopes = new ArrayList<>();
scopes.add("openid");
configContext.oidcConfig.getAuthentication().scopes.ifPresent(scopes::addAll);
params.put("scopes", new JsonArray(scopes));
String redirectPath = getRedirectPath(configContext, context);
String redirectUriParam = buildUri(context, redirectPath);
LOG.debugf("Authentication request redirect_uri parameter: %s", redirectUriParam);
params.put("redirect_uri", redirectUriParam);
params.put("state", generateState(context, configContext, redirectPath));
if (configContext.oidcConfig.authentication.getExtraParams() != null) {
for (Map.Entry<String, String> entry : configContext.oidcConfig.authentication.getExtraParams().entrySet()) {
params.put(entry.getKey(), entry.getValue());
}
}
challenge = new ChallengeData(HttpResponseStatus.FOUND.code(), HttpHeaders.LOCATION,
configContext.auth.authorizeURL(params));
return Uni.createFrom().item(challenge);
}
private Uni<SecurityIdentity> performCodeFlow(IdentityProviderManager identityProviderManager,
RoutingContext context, DefaultTenantConfigResolver resolver) {
TenantConfigContext configContext = resolver.resolve(context, true);
JsonObject params = new JsonObject();
String code = context.request().getParam("code");
if (code == null) {
return Uni.createFrom().optional(Optional.empty());
}
Cookie stateCookie = context.getCookie(STATE_COOKIE_NAME);
if (stateCookie != null) {
List<String> values = context.queryParam("state");
if (values.size() != 1) {
LOG.debug("State parameter can not be empty or multi-valued");
return Uni.createFrom().failure(new AuthenticationCompletionException());
} else if (!stateCookie.getValue().startsWith(values.get(0))) {
LOG.debug("State cookie does not match the state parameter");
return Uni.createFrom().failure(new AuthenticationCompletionException());
} else if (context.queryParam("pathChecked").isEmpty()) {
String[] pair = stateCookie.getValue().split(COOKIE_DELIM);
if (pair.length == 2) {
String extraPath = pair[1];
String extraQuery = "?pathChecked=true";
if (context.request().query() != null) {
extraQuery += ("&" + context.request().query());
}
String localRedirectUri = buildUri(context, extraPath + extraQuery);
LOG.debugf("Local redirect URI: %s", localRedirectUri);
return Uni.createFrom().failure(new AuthenticationRedirectException(localRedirectUri));
}
removeCookie(context, configContext, STATE_COOKIE_NAME);
} else {
removeCookie(context, configContext, STATE_COOKIE_NAME);
}
} else {
LOG.debug("The state cookie is missing after a redirect from IDP");
return Uni.createFrom().failure(new AuthenticationCompletionException());
}
params.put("code", code);
String redirectPath = getRedirectPath(configContext, context);
String redirectUriParam = buildUri(context, redirectPath);
LOG.debugf("Token request redirect_uri parameter: %s", redirectUriParam);
params.put("redirect_uri", redirectUriParam);
Credentials creds = configContext.oidcConfig.getCredentials();
if (creds.clientSecret.value.isPresent() && Secret.Method.POST == creds.clientSecret.method.orElse(null)) {
params.put("client_secret", creds.clientSecret.value.get());
} else if (creds.jwt.secret.isPresent()) {
params.put("client_assertion_type", "urn:ietf:params:oauth:client-assertion-type:jwt-bearer");
params.put("client_assertion", signJwtWithClientSecret(configContext.oidcConfig));
}
return Uni.createFrom().emitter(new Consumer<UniEmitter<? super SecurityIdentity>>() {
@Override
public void accept(UniEmitter<? super SecurityIdentity> uniEmitter) {
configContext.auth.authenticate(params, userAsyncResult -> {
if (userAsyncResult.failed()) {
if (userAsyncResult.cause() != null) {
LOG.debugf("Exception during the code to token exchange: %s", userAsyncResult.cause().getMessage());
}
uniEmitter.fail(new AuthenticationCompletionException(userAsyncResult.cause()));
} else {
AccessToken result = AccessToken.class.cast(userAsyncResult.result());
authenticate(identityProviderManager, new IdTokenCredential(result.opaqueIdToken(), context))
.subscribe().with(new Consumer<SecurityIdentity>() {
@Override
public void accept(SecurityIdentity identity) {
if (!result.idToken().containsKey("exp") || !result.idToken().containsKey("iat")) {
LOG.debug("ID Token is required to contain 'exp' and 'iat' claims");
uniEmitter.fail(new AuthenticationCompletionException());
}
processSuccessfulAuthentication(context, configContext, result, identity);
uniEmitter.complete(augmentIdentity(identity, result.opaqueAccessToken(),
result.opaqueRefreshToken(), context));
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) {
uniEmitter.fail(throwable);
}
});
}
});
}
});
}
private String signJwtWithClientSecret(OidcTenantConfig cfg) {
final byte[] keyBytes = cfg.credentials.jwt.secret.get().getBytes(StandardCharsets.UTF_8);
SecretKey key = new SecretKeySpec(keyBytes, 0, keyBytes.length, "HMACSHA256");
final long iat = (System.currentTimeMillis() / 1000);
final long exp = iat + cfg.credentials.jwt.lifespan;
return Jwt.claims()
.issuer(cfg.clientId.get())
.subject(cfg.clientId.get())
.audience(cfg.authServerUrl.get())
.issuedAt(iat)
.expiresAt(exp)
.sign(key);
}
private void processSuccessfulAuthentication(RoutingContext context, TenantConfigContext configContext,
AccessToken result, SecurityIdentity securityIdentity) {
removeCookie(context, configContext, SESSION_COOKIE_NAME);
CookieImpl cookie = new CookieImpl(SESSION_COOKIE_NAME, new StringBuilder(result.opaqueIdToken())
.append(COOKIE_DELIM)
.append(result.opaqueAccessToken())
.append(COOKIE_DELIM)
.append(result.opaqueRefreshToken()).toString());
long maxAge = result.idToken().getLong("exp") - result.idToken().getLong("iat");
if (configContext.oidcConfig.token.expirationGrace.isPresent()) {
maxAge += configContext.oidcConfig.token.expirationGrace.get();
}
LOG.debugf("Session cookie 'max-age' parameter is set to %d", maxAge);
cookie.setMaxAge(maxAge);
cookie.setSecure(context.request().isSSL());
cookie.setHttpOnly(true);
if (configContext.oidcConfig.authentication.cookiePath.isPresent()) {
cookie.setPath(configContext.oidcConfig.authentication.cookiePath.get());
}
context.response().addCookie(cookie);
}
private String getRedirectPath(TenantConfigContext configContext, RoutingContext context) {
Authentication auth = configContext.oidcConfig.getAuthentication();
return auth.getRedirectPath().isPresent() ? auth.getRedirectPath().get() : context.request().path();
}
private String generateState(RoutingContext context, TenantConfigContext configContext,
String redirectPath) {
String uuid = UUID.randomUUID().toString();
String cookieValue = uuid;
Authentication auth = configContext.oidcConfig.getAuthentication();
if (auth.isRestorePathAfterRedirect() && !redirectPath.equals(context.request().path())) {
cookieValue += (COOKIE_DELIM + context.request().path());
}
CookieImpl cookie = new CookieImpl(STATE_COOKIE_NAME, cookieValue);
cookie.setHttpOnly(true);
cookie.setSecure(context.request().isSSL());
cookie.setMaxAge(60 * 30);
if (auth.cookiePath.isPresent()) {
cookie.setPath(auth.getCookiePath().get());
}
context.response().addCookie(cookie);
return uuid;
}
private String buildUri(RoutingContext context, String path) {
return new StringBuilder(context.request().scheme()).append(":
.append(URI.create(context.request().absoluteURI()).getAuthority())
.append(path)
.toString();
}
private void removeCookie(RoutingContext context, TenantConfigContext configContext, String cookieName) {
ServerCookie cookie = (ServerCookie) context.cookieMap().get(cookieName);
if (cookie != null) {
cookie.setValue("");
cookie.setMaxAge(0);
Authentication auth = configContext.oidcConfig.getAuthentication();
if (auth.cookiePath.isPresent()) {
cookie.setPath(auth.cookiePath.get());
}
}
}
private boolean isLogout(RoutingContext context, TenantConfigContext configContext) {
Optional<String> logoutPath = configContext.oidcConfig.logout.path;
if (logoutPath.isPresent()) {
return context.request().absoluteURI().equals(
buildUri(context, logoutPath.get()));
}
return false;
}
private SecurityIdentity trySilentRefresh(TenantConfigContext configContext, String idToken, String refreshToken,
RoutingContext context, IdentityProviderManager identityProviderManager) {
Uni<SecurityIdentity> cf = Uni.createFrom().emitter(new Consumer<UniEmitter<? super SecurityIdentity>>() {
@Override
});
return cf.await().indefinitely();
}
private String buildLogoutRedirectUri(TenantConfigContext configContext, String idToken, RoutingContext context) {
String logoutPath = configContext.oidcConfig.getEndSessionPath()
.orElse(OAuth2AuthProviderImpl.class.cast(configContext.auth).getConfig().getLogoutPath());
StringBuilder logoutUri = new StringBuilder(logoutPath).append("?").append("id_token_hint=").append(idToken);
if (configContext.oidcConfig.logout.postLogoutPath.isPresent()) {
logoutUri.append("&post_logout_redirect_uri=").append(
buildUri(context, configContext.oidcConfig.logout.postLogoutPath.get()));
}
return logoutUri.toString();
}
private AuthenticationRedirectException redirectToLogoutEndpoint(RoutingContext context, TenantConfigContext configContext,
String idToken) {
removeCookie(context, configContext, SESSION_COOKIE_NAME);
return new AuthenticationRedirectException(buildLogoutRedirectUri(configContext, idToken, context));
}
} |
How about describing the root cause here? something like: > sort state handles by offsets to avoid building `SnappyFramedInputStream` with EOF stream. And IIUC, we only need to sort the state handles when compression is enabled? | public Void restore() throws Exception {
if (stateHandles.isEmpty()) {
return null;
}
for (OperatorStateHandle stateHandle : stateHandles) {
if (stateHandle == null) {
continue;
}
FSDataInputStream in = stateHandle.openInputStream();
closeStreamOnCancelRegistry.registerCloseable(in);
ClassLoader restoreClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(userClassloader);
OperatorBackendSerializationProxy backendSerializationProxy =
new OperatorBackendSerializationProxy(userClassloader);
backendSerializationProxy.read(new DataInputViewStreamWrapper(in));
List<StateMetaInfoSnapshot> restoredOperatorMetaInfoSnapshots =
backendSerializationProxy.getOperatorStateMetaInfoSnapshots();
for (StateMetaInfoSnapshot restoredSnapshot : restoredOperatorMetaInfoSnapshots) {
final RegisteredOperatorStateBackendMetaInfo<?> restoredMetaInfo =
new RegisteredOperatorStateBackendMetaInfo<>(restoredSnapshot);
if (restoredMetaInfo.getPartitionStateSerializer()
instanceof UnloadableDummyTypeSerializer) {
throw new IOException(
"Unable to restore operator state ["
+ restoredSnapshot.getName()
+ "]."
+ " The previous typeSerializer of the operator state must be present; the typeSerializer could"
+ " have been removed from the classpath, or its implementation have changed and could"
+ " not be loaded. This is a temporary restriction that will be fixed in future versions.");
}
PartitionableListState<?> listState =
registeredOperatorStates.get(restoredSnapshot.getName());
if (null == listState) {
listState = new PartitionableListState<>(restoredMetaInfo);
registeredOperatorStates.put(
listState.getStateMetaInfo().getName(), listState);
} else {
}
}
List<StateMetaInfoSnapshot> restoredBroadcastMetaInfoSnapshots =
backendSerializationProxy.getBroadcastStateMetaInfoSnapshots();
for (StateMetaInfoSnapshot restoredSnapshot : restoredBroadcastMetaInfoSnapshots) {
final RegisteredBroadcastStateBackendMetaInfo<?, ?> restoredMetaInfo =
new RegisteredBroadcastStateBackendMetaInfo<>(restoredSnapshot);
if (restoredMetaInfo.getKeySerializer() instanceof UnloadableDummyTypeSerializer
|| restoredMetaInfo.getValueSerializer()
instanceof UnloadableDummyTypeSerializer) {
throw new IOException(
"Unable to restore broadcast state ["
+ restoredSnapshot.getName()
+ "]."
+ " The previous key and value serializers of the state must be present; the serializers could"
+ " have been removed from the classpath, or their implementations have changed and could"
+ " not be loaded. This is a temporary restriction that will be fixed in future versions.");
}
BackendWritableBroadcastState<?, ?> broadcastState =
registeredBroadcastStates.get(restoredSnapshot.getName());
if (broadcastState == null) {
broadcastState = new HeapBroadcastState<>(restoredMetaInfo);
registeredBroadcastStates.put(
broadcastState.getStateMetaInfo().getName(), broadcastState);
} else {
}
}
List<Map.Entry<String, OperatorStateHandle.StateMetaInfo>> entries =
stateHandle.getStateNameToPartitionOffsets().entrySet().stream()
.sorted(
Comparator.comparingLong(
entry -> {
OperatorStateHandle.StateMetaInfo
stateMetaInfo = entry.getValue();
long[] offsets = stateMetaInfo.getOffsets();
if (offsets == null || offsets.length == 0) {
return Long.MIN_VALUE;
} else {
return offsets[0];
}
}))
.collect(Collectors.toList());
for (Map.Entry<String, OperatorStateHandle.StateMetaInfo> nameToOffsets : entries) {
final String stateName = nameToOffsets.getKey();
PartitionableListState<?> listStateForName =
registeredOperatorStates.get(stateName);
final StreamCompressionDecorator compressionDecorator =
backendSerializationProxy.isUsingStateCompression()
? SnappyStreamCompressionDecorator.INSTANCE
: UncompressedStreamCompressionDecorator.INSTANCE;
try (final CompressibleFSDataInputStream compressedIn =
new CompressibleFSDataInputStream(
in,
compressionDecorator)) {
if (listStateForName == null) {
BackendWritableBroadcastState<?, ?> broadcastStateForName =
registeredBroadcastStates.get(stateName);
Preconditions.checkState(
broadcastStateForName != null,
"Found state without "
+ "corresponding meta info: "
+ stateName);
deserializeBroadcastStateValues(
broadcastStateForName, compressedIn, nameToOffsets.getValue());
} else {
deserializeOperatorStateValues(
listStateForName, compressedIn, nameToOffsets.getValue());
}
}
}
} finally {
Thread.currentThread().setContextClassLoader(restoreClassLoader);
if (closeStreamOnCancelRegistry.unregisterCloseable(in)) {
IOUtils.closeQuietly(in);
}
}
}
return null;
} | List<Map.Entry<String, OperatorStateHandle.StateMetaInfo>> entries = | public Void restore() throws Exception {
if (stateHandles.isEmpty()) {
return null;
}
for (OperatorStateHandle stateHandle : stateHandles) {
if (stateHandle == null) {
continue;
}
FSDataInputStream in = stateHandle.openInputStream();
closeStreamOnCancelRegistry.registerCloseable(in);
ClassLoader restoreClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(userClassloader);
OperatorBackendSerializationProxy backendSerializationProxy =
new OperatorBackendSerializationProxy(userClassloader);
backendSerializationProxy.read(new DataInputViewStreamWrapper(in));
List<StateMetaInfoSnapshot> restoredOperatorMetaInfoSnapshots =
backendSerializationProxy.getOperatorStateMetaInfoSnapshots();
for (StateMetaInfoSnapshot restoredSnapshot : restoredOperatorMetaInfoSnapshots) {
final RegisteredOperatorStateBackendMetaInfo<?> restoredMetaInfo =
new RegisteredOperatorStateBackendMetaInfo<>(restoredSnapshot);
if (restoredMetaInfo.getPartitionStateSerializer()
instanceof UnloadableDummyTypeSerializer) {
throw new IOException(
"Unable to restore operator state ["
+ restoredSnapshot.getName()
+ "]."
+ " The previous typeSerializer of the operator state must be present; the typeSerializer could"
+ " have been removed from the classpath, or its implementation have changed and could"
+ " not be loaded. This is a temporary restriction that will be fixed in future versions.");
}
PartitionableListState<?> listState =
registeredOperatorStates.get(restoredSnapshot.getName());
if (null == listState) {
listState = new PartitionableListState<>(restoredMetaInfo);
registeredOperatorStates.put(
listState.getStateMetaInfo().getName(), listState);
} else {
}
}
List<StateMetaInfoSnapshot> restoredBroadcastMetaInfoSnapshots =
backendSerializationProxy.getBroadcastStateMetaInfoSnapshots();
for (StateMetaInfoSnapshot restoredSnapshot : restoredBroadcastMetaInfoSnapshots) {
final RegisteredBroadcastStateBackendMetaInfo<?, ?> restoredMetaInfo =
new RegisteredBroadcastStateBackendMetaInfo<>(restoredSnapshot);
if (restoredMetaInfo.getKeySerializer() instanceof UnloadableDummyTypeSerializer
|| restoredMetaInfo.getValueSerializer()
instanceof UnloadableDummyTypeSerializer) {
throw new IOException(
"Unable to restore broadcast state ["
+ restoredSnapshot.getName()
+ "]."
+ " The previous key and value serializers of the state must be present; the serializers could"
+ " have been removed from the classpath, or their implementations have changed and could"
+ " not be loaded. This is a temporary restriction that will be fixed in future versions.");
}
BackendWritableBroadcastState<?, ?> broadcastState =
registeredBroadcastStates.get(restoredSnapshot.getName());
if (broadcastState == null) {
broadcastState = new HeapBroadcastState<>(restoredMetaInfo);
registeredBroadcastStates.put(
broadcastState.getStateMetaInfo().getName(), broadcastState);
} else {
}
}
List<Map.Entry<String, OperatorStateHandle.StateMetaInfo>> entries =
new ArrayList<>(stateHandle.getStateNameToPartitionOffsets().entrySet());
if (backendSerializationProxy.isUsingStateCompression()) {
entries =
entries.stream()
.sorted(
Comparator.comparingLong(
entry -> {
OperatorStateHandle.StateMetaInfo
stateMetaInfo = entry.getValue();
long[] offsets = stateMetaInfo.getOffsets();
if (offsets == null
|| offsets.length == 0) {
return Long.MIN_VALUE;
} else {
return offsets[0];
}
}))
.collect(Collectors.toList());
}
for (Map.Entry<String, OperatorStateHandle.StateMetaInfo> nameToOffsets : entries) {
final String stateName = nameToOffsets.getKey();
PartitionableListState<?> listStateForName =
registeredOperatorStates.get(stateName);
final StreamCompressionDecorator compressionDecorator =
backendSerializationProxy.isUsingStateCompression()
? SnappyStreamCompressionDecorator.INSTANCE
: UncompressedStreamCompressionDecorator.INSTANCE;
try (final CompressibleFSDataInputStream compressedIn =
new CompressibleFSDataInputStream(
in,
compressionDecorator)) {
if (listStateForName == null) {
BackendWritableBroadcastState<?, ?> broadcastStateForName =
registeredBroadcastStates.get(stateName);
Preconditions.checkState(
broadcastStateForName != null,
"Found state without "
+ "corresponding meta info: "
+ stateName);
deserializeBroadcastStateValues(
broadcastStateForName, compressedIn, nameToOffsets.getValue());
} else {
deserializeOperatorStateValues(
listStateForName, compressedIn, nameToOffsets.getValue());
}
}
}
} finally {
Thread.currentThread().setContextClassLoader(restoreClassLoader);
if (closeStreamOnCancelRegistry.unregisterCloseable(in)) {
IOUtils.closeQuietly(in);
}
}
}
return null;
} | class OperatorStateRestoreOperation implements RestoreOperation<Void> {
private final CloseableRegistry closeStreamOnCancelRegistry;
private final ClassLoader userClassloader;
private final Map<String, PartitionableListState<?>> registeredOperatorStates;
private final Map<String, BackendWritableBroadcastState<?, ?>> registeredBroadcastStates;
private final Collection<OperatorStateHandle> stateHandles;
public OperatorStateRestoreOperation(
CloseableRegistry closeStreamOnCancelRegistry,
ClassLoader userClassloader,
Map<String, PartitionableListState<?>> registeredOperatorStates,
Map<String, BackendWritableBroadcastState<?, ?>> registeredBroadcastStates,
@Nonnull Collection<OperatorStateHandle> stateHandles) {
this.closeStreamOnCancelRegistry = closeStreamOnCancelRegistry;
this.userClassloader = userClassloader;
this.registeredOperatorStates = registeredOperatorStates;
this.registeredBroadcastStates = registeredBroadcastStates;
this.stateHandles = stateHandles;
}
@Override
private <S> void deserializeOperatorStateValues(
PartitionableListState<S> stateListForName,
FSDataInputStream in,
OperatorStateHandle.StateMetaInfo metaInfo)
throws IOException {
if (null != metaInfo) {
long[] offsets = metaInfo.getOffsets();
if (null != offsets) {
DataInputView div = new DataInputViewStreamWrapper(in);
TypeSerializer<S> serializer =
stateListForName.getStateMetaInfo().getPartitionStateSerializer();
for (long offset : offsets) {
in.seek(offset);
stateListForName.add(serializer.deserialize(div));
}
}
}
}
private <K, V> void deserializeBroadcastStateValues(
final BackendWritableBroadcastState<K, V> broadcastStateForName,
final FSDataInputStream in,
final OperatorStateHandle.StateMetaInfo metaInfo)
throws Exception {
if (metaInfo != null) {
long[] offsets = metaInfo.getOffsets();
if (offsets != null) {
TypeSerializer<K> keySerializer =
broadcastStateForName.getStateMetaInfo().getKeySerializer();
TypeSerializer<V> valueSerializer =
broadcastStateForName.getStateMetaInfo().getValueSerializer();
in.seek(offsets[0]);
DataInputView div = new DataInputViewStreamWrapper(in);
int size = div.readInt();
for (int i = 0; i < size; i++) {
broadcastStateForName.put(
keySerializer.deserialize(div), valueSerializer.deserialize(div));
}
}
}
}
} | class OperatorStateRestoreOperation implements RestoreOperation<Void> {
private final CloseableRegistry closeStreamOnCancelRegistry;
private final ClassLoader userClassloader;
private final Map<String, PartitionableListState<?>> registeredOperatorStates;
private final Map<String, BackendWritableBroadcastState<?, ?>> registeredBroadcastStates;
private final Collection<OperatorStateHandle> stateHandles;
public OperatorStateRestoreOperation(
CloseableRegistry closeStreamOnCancelRegistry,
ClassLoader userClassloader,
Map<String, PartitionableListState<?>> registeredOperatorStates,
Map<String, BackendWritableBroadcastState<?, ?>> registeredBroadcastStates,
@Nonnull Collection<OperatorStateHandle> stateHandles) {
this.closeStreamOnCancelRegistry = closeStreamOnCancelRegistry;
this.userClassloader = userClassloader;
this.registeredOperatorStates = registeredOperatorStates;
this.registeredBroadcastStates = registeredBroadcastStates;
this.stateHandles = stateHandles;
}
@Override
private <S> void deserializeOperatorStateValues(
PartitionableListState<S> stateListForName,
FSDataInputStream in,
OperatorStateHandle.StateMetaInfo metaInfo)
throws IOException {
if (null != metaInfo) {
long[] offsets = metaInfo.getOffsets();
if (null != offsets) {
DataInputView div = new DataInputViewStreamWrapper(in);
TypeSerializer<S> serializer =
stateListForName.getStateMetaInfo().getPartitionStateSerializer();
for (long offset : offsets) {
in.seek(offset);
stateListForName.add(serializer.deserialize(div));
}
}
}
}
private <K, V> void deserializeBroadcastStateValues(
final BackendWritableBroadcastState<K, V> broadcastStateForName,
final FSDataInputStream in,
final OperatorStateHandle.StateMetaInfo metaInfo)
throws Exception {
if (metaInfo != null) {
long[] offsets = metaInfo.getOffsets();
if (offsets != null) {
TypeSerializer<K> keySerializer =
broadcastStateForName.getStateMetaInfo().getKeySerializer();
TypeSerializer<V> valueSerializer =
broadcastStateForName.getStateMetaInfo().getValueSerializer();
in.seek(offsets[0]);
DataInputView div = new DataInputViewStreamWrapper(in);
int size = div.readInt();
for (int i = 0; i < size; i++) {
broadcastStateForName.put(
keySerializer.deserialize(div), valueSerializer.deserialize(div));
}
}
}
}
} |
I think this is completely wonderful. I believe then the code for `completeRequest()` should be: ``` private void completeRequest(List<RequestEntryT> failedRequestEntries, long requestStartTime) { // do completeRequest stuff including reducing the inFlightRequestsCount, etc. mailboxExecutor.tryYield(); nonBlockingFlush(); } ``` We would get all the previously alluded to benefits. My only question (and maybe worry) would be if we had a very large number of in flight requests that have all completed (say 100+? since it's customisable by user). Once one `completeRequest` is triggered, the `mailboxExecutor.tryYield()` would repeatedly yield to the next completed request in a daisy chain of length equal to the number of completed in flight requests. I imagine the state of the mailbox thread would have to live alongside the others' states during that time and I was wondering would that risk us using more stack/heap than otherwise and thereby risking an overflow? | private void flush() {
while (inFlightRequestsCount >= maxInFlightRequests) {
try {
mailboxExecutor.yield();
} catch (InterruptedException e) {
getFatalExceptionCons()
.accept(
new InterruptedException(
"The mailbox thread was interrupted while waiting for asynchronous write operations to complete."));
}
}
List<RequestEntryT> batch = createNextAvailableBatch();
if (batch.size() == 0) {
return;
}
long timestampOfRequest = System.currentTimeMillis();
Consumer<List<RequestEntryT>> requestResult =
failedRequestEntries ->
mailboxExecutor.execute(
() -> completeRequest(failedRequestEntries, timestampOfRequest),
"Mark in-flight request as completed and requeue %d request entries",
failedRequestEntries.size());
inFlightRequestsCount++;
submitRequestEntries(batch, requestResult);
} | failedRequestEntries -> | private void flush() throws InterruptedException {
while (isInFlightRequestOrMessageLimitExceeded()) {
mailboxExecutor.yield();
}
List<RequestEntryT> batch = createNextAvailableBatch();
int batchSize = batch.size();
if (batch.size() == 0) {
return;
}
long timestampOfRequest = System.currentTimeMillis();
Consumer<List<RequestEntryT>> requestResult =
failedRequestEntries ->
mailboxExecutor.execute(
() ->
completeRequest(
failedRequestEntries,
batchSize,
timestampOfRequest),
"Mark in-flight request as completed and requeue %d request entries",
failedRequestEntries.size());
inFlightRequestsCount++;
inFlightMessages += batchSize;
submitRequestEntries(batch, requestResult);
} | class AsyncSinkWriter<InputT, RequestEntryT extends Serializable>
implements StatefulSink.StatefulSinkWriter<InputT, BufferedRequestState<RequestEntryT>> {
private final MailboxExecutor mailboxExecutor;
private final ProcessingTimeService timeService;
/* The timestamp of the previous batch of records was sent from this sink. */
private long lastSendTimestamp = 0;
/* The timestamp of the response to the previous request from this sink. */
private long ackTime = Long.MAX_VALUE;
/* The sink writer metric group. */
private final SinkWriterMetricGroup metrics;
/* Counter for number of bytes this sink has attempted to send to the destination. */
private final Counter numBytesOutCounter;
/* Counter for number of records this sink has attempted to send to the destination. */
private final Counter numRecordsOutCounter;
private final int maxBatchSize;
private final int maxInFlightRequests;
private final int maxBufferedRequests;
private final long maxBatchSizeInBytes;
private final long maxTimeInBufferMS;
private final long maxRecordSizeInBytes;
/**
* The ElementConverter provides a mapping between for the elements of a stream to request
* entries that can be sent to the destination.
*
* <p>The resulting request entry is buffered by the AsyncSinkWriter and sent to the destination
* when the {@code submitRequestEntries} method is invoked.
*/
private final ElementConverter<InputT, RequestEntryT> elementConverter;
/**
* Buffer to hold request entries that should be persisted into the destination, along with its
* size in bytes.
*
* <p>A request entry contain all relevant details to make a call to the destination. Eg, for
* Kinesis Data Streams a request entry contains the payload and partition key.
*
* <p>It seems more natural to buffer InputT, ie, the events that should be persisted, rather
* than RequestEntryT. However, in practice, the response of a failed request call can make it
* very hard, if not impossible, to reconstruct the original event. It is much easier, to just
* construct a new (retry) request entry from the response and add that back to the queue for
* later retry.
*/
private final Deque<RequestEntryWrapper<RequestEntryT>> bufferedRequestEntries =
new ArrayDeque<>();
/**
* Tracks all pending async calls that have been executed since the last checkpoint. Calls that
* completed (successfully or unsuccessfully) are automatically decrementing the counter. Any
* request entry that was not successfully persisted needs to be handled and retried by the
* logic in {@code submitRequestsToApi}.
*
* <p>There is a limit on the number of concurrent (async) requests that can be handled by the
* client library. This limit is enforced by checking the queue size before accepting a new
* element into the queue.
*
* <p>To complete a checkpoint, we need to make sure that no requests are in flight, as they may
* fail, which could then lead to data loss.
*/
private int inFlightRequestsCount;
/**
* Tracks the cumulative size of all elements in {@code bufferedRequestEntries} to facilitate
* the criterion for flushing after {@code maxBatchSizeInBytes} is reached.
*/
private double bufferedRequestEntriesTotalSizeInBytes;
private boolean existsActiveTimerCallback = false;
/**
* The {@code accept} method should be called on this Consumer if the processing of the {@code
* requestEntries} raises an exception that should not be retried. Specifically, any action that
* we are sure will result in the same exception no matter how many times we retry should raise
* a {@code RuntimeException} here. For example, wrong user credentials. However, it is possible
* intermittent failures will recover, e.g. flaky network connections, in which case, some other
* mechanism may be more appropriate.
*/
private final Consumer<Exception> fatalExceptionCons;
/**
* This method specifies how to persist buffered request entries into the destination. It is
* implemented when support for a new destination is added.
*
* <p>The method is invoked with a set of request entries according to the buffering hints (and
* the valid limits of the destination). The logic then needs to create and execute the request
* asynchronously against the destination (ideally by batching together multiple request entries
* to increase efficiency). The logic also needs to identify individual request entries that
* were not persisted successfully and resubmit them using the {@code requestResult} callback.
*
* <p>From a threading perspective, the mailbox thread will call this method and initiate the
* asynchronous request to persist the {@code requestEntries}. NOTE: The client must support
* asynchronous requests and the method called to persist the records must asynchronously
* execute and return a future with the results of that request. A thread from the destination
* client thread pool should complete the request and submit the failed entries that should be
* retried. The {@code requestResult} will then trigger the mailbox thread to requeue the
* unsuccessful elements.
*
* <p>An example implementation of this method is included:
*
* <pre>{@code
* @Override
* protected void submitRequestEntries
* (List<RequestEntryT> records, Consumer<Collection<RequestEntryT>> requestResult) {
* Future<Response> response = destinationClient.putRecords(records);
* response.whenComplete(
* (response, error) -> {
* if(error){
* List<RequestEntryT> retryableFailedRecords = getRetryableFailed(response);
* requestResult.accept(retryableFailedRecords);
* }else{
* requestResult.accept(Collections.emptyList());
* }
* }
* );
* }
*
* }</pre>
*
* <p>During checkpointing, the sink needs to ensure that there are no outstanding in-flight
* requests.
*
* @param requestEntries a set of request entries that should be sent to the destination
* @param requestResult the {@code accept} method should be called on this Consumer once the
* processing of the {@code requestEntries} are complete. Any entries that encountered
* difficulties in persisting should be re-queued through {@code requestResult} by including
* that element in the collection of {@code RequestEntryT}s passed to the {@code accept}
* method. All other elements are assumed to have been successfully persisted.
*/
protected abstract void submitRequestEntries(
List<RequestEntryT> requestEntries, Consumer<List<RequestEntryT>> requestResult);
/**
* This method allows the getting of the size of a {@code RequestEntryT} in bytes. The size in
* this case is measured as the total bytes that is written to the destination as a result of
* persisting this particular {@code RequestEntryT} rather than the serialized length (which may
* be the same).
*
* @param requestEntry the requestEntry for which we want to know the size
* @return the size of the requestEntry, as defined previously
*/
protected abstract long getSizeInBytes(RequestEntryT requestEntry);
public AsyncSinkWriter(
ElementConverter<InputT, RequestEntryT> elementConverter,
Sink.InitContext context,
int maxBatchSize,
int maxInFlightRequests,
int maxBufferedRequests,
long maxBatchSizeInBytes,
long maxTimeInBufferMS,
long maxRecordSizeInBytes) {
this(
elementConverter,
context,
maxBatchSize,
maxInFlightRequests,
maxBufferedRequests,
maxBatchSizeInBytes,
maxTimeInBufferMS,
maxRecordSizeInBytes,
Collections.emptyList());
}
public AsyncSinkWriter(
ElementConverter<InputT, RequestEntryT> elementConverter,
Sink.InitContext context,
int maxBatchSize,
int maxInFlightRequests,
int maxBufferedRequests,
long maxBatchSizeInBytes,
long maxTimeInBufferMS,
long maxRecordSizeInBytes,
Collection<BufferedRequestState<RequestEntryT>> states) {
this.elementConverter = elementConverter;
this.mailboxExecutor = context.getMailboxExecutor();
this.timeService = context.getProcessingTimeService();
Preconditions.checkNotNull(elementConverter);
Preconditions.checkArgument(maxBatchSize > 0);
Preconditions.checkArgument(maxBufferedRequests > 0);
Preconditions.checkArgument(maxInFlightRequests > 0);
Preconditions.checkArgument(maxBatchSizeInBytes > 0);
Preconditions.checkArgument(maxTimeInBufferMS > 0);
Preconditions.checkArgument(maxRecordSizeInBytes > 0);
Preconditions.checkArgument(
maxBufferedRequests > maxBatchSize,
"The maximum number of requests that may be buffered should be strictly"
+ " greater than the maximum number of requests per batch.");
Preconditions.checkArgument(
maxBatchSizeInBytes >= maxRecordSizeInBytes,
"The maximum allowed size in bytes per flush must be greater than or equal to the"
+ " maximum allowed size in bytes of a single record.");
this.maxBatchSize = maxBatchSize;
this.maxInFlightRequests = maxInFlightRequests;
this.maxBufferedRequests = maxBufferedRequests;
this.maxBatchSizeInBytes = maxBatchSizeInBytes;
this.maxTimeInBufferMS = maxTimeInBufferMS;
this.maxRecordSizeInBytes = maxRecordSizeInBytes;
this.inFlightRequestsCount = 0;
this.bufferedRequestEntriesTotalSizeInBytes = 0;
this.metrics = context.metricGroup();
this.metrics.setCurrentSendTimeGauge(() -> this.ackTime - this.lastSendTimestamp);
this.numBytesOutCounter = this.metrics.getIOMetricGroup().getNumBytesOutCounter();
this.numRecordsOutCounter = this.metrics.getIOMetricGroup().getNumRecordsOutCounter();
this.fatalExceptionCons =
exception ->
mailboxExecutor.execute(
() -> {
throw exception;
},
"A fatal exception occurred in the sink that cannot be recovered from or should not be retried.");
initializeState(states);
}
private void registerCallback() {
ProcessingTimeService.ProcessingTimeCallback ptc =
instant -> {
existsActiveTimerCallback = false;
while (!bufferedRequestEntries.isEmpty()) {
flush();
}
};
timeService.registerTimer(timeService.getCurrentProcessingTime() + maxTimeInBufferMS, ptc);
existsActiveTimerCallback = true;
}
@Override
public void write(InputT element, Context context) throws IOException, InterruptedException {
while (bufferedRequestEntries.size() >= maxBufferedRequests) {
flush();
}
addEntryToBuffer(elementConverter.apply(element, context), false);
nonBlockingFlush();
}
/**
* Determines if a call to flush will be non-blocking (i.e. {@code inFlightRequestsCount} is
* strictly smaller than {@code maxInFlightRequests}). Also requires one of the following
* requirements to be met:
*
* <ul>
* <li>The number of elements buffered is greater than or equal to the {@code maxBatchSize}
* <li>The sum of the size in bytes of all records in the buffer is greater than or equal to
* {@code maxBatchSizeInBytes}
* </ul>
*/
private void nonBlockingFlush() {
boolean uncompletedInFlightResponses = true;
while (uncompletedInFlightResponses) {
uncompletedInFlightResponses = mailboxExecutor.tryYield();
}
while (inFlightRequestsCount < maxInFlightRequests
&& (bufferedRequestEntries.size() >= maxBatchSize
|| bufferedRequestEntriesTotalSizeInBytes >= maxBatchSizeInBytes)) {
flush();
}
}
/**
* Persists buffered RequestsEntries into the destination by invoking {@code
* submitRequestEntries} with batches according to the user specified buffering hints.
*
* <p>The method blocks if too many async requests are in flight.
*/
/**
* Creates the next batch of request entries while respecting the {@code maxBatchSize} and
* {@code maxBatchSizeInBytes}. Also adds these to the metrics counters.
*/
private List<RequestEntryT> createNextAvailableBatch() {
int batchSize = Math.min(maxBatchSize, bufferedRequestEntries.size());
List<RequestEntryT> batch = new ArrayList<>(batchSize);
int batchSizeBytes = 0;
for (int i = 0; i < batchSize; i++) {
long requestEntrySize = bufferedRequestEntries.peek().getSize();
if (batchSizeBytes + requestEntrySize > maxBatchSizeInBytes) {
break;
}
RequestEntryWrapper<RequestEntryT> elem = bufferedRequestEntries.remove();
batch.add(elem.getRequestEntry());
bufferedRequestEntriesTotalSizeInBytes -= requestEntrySize;
batchSizeBytes += requestEntrySize;
}
numRecordsOutCounter.inc(batch.size());
numBytesOutCounter.inc(batchSizeBytes);
return batch;
}
/**
* Marks an in-flight request as completed and prepends failed requestEntries back to the
* internal requestEntry buffer for later retry.
*
* @param failedRequestEntries requestEntries that need to be retried
*/
private void completeRequest(List<RequestEntryT> failedRequestEntries, long requestStartTime) {
lastSendTimestamp = requestStartTime;
ackTime = System.currentTimeMillis();
inFlightRequestsCount--;
ListIterator<RequestEntryT> iterator =
failedRequestEntries.listIterator(failedRequestEntries.size());
while (iterator.hasPrevious()) {
addEntryToBuffer(iterator.previous(), true);
}
}
private void addEntryToBuffer(RequestEntryT entry, boolean insertAtHead) {
if (bufferedRequestEntries.isEmpty() && !existsActiveTimerCallback) {
registerCallback();
}
RequestEntryWrapper<RequestEntryT> wrappedEntry =
new RequestEntryWrapper<>(entry, getSizeInBytes(entry));
if (wrappedEntry.getSize() > maxRecordSizeInBytes) {
throw new IllegalArgumentException(
String.format(
"The request entry sent to the buffer was of size [%s], when the maxRecordSizeInBytes was set to [%s].",
wrappedEntry.getSize(), maxRecordSizeInBytes));
}
if (insertAtHead) {
bufferedRequestEntries.addFirst(wrappedEntry);
} else {
bufferedRequestEntries.add(wrappedEntry);
}
bufferedRequestEntriesTotalSizeInBytes += wrappedEntry.getSize();
}
/**
* In flight requests will be retried if the sink is still healthy. But if in-flight requests
* fail after a checkpoint has been triggered and Flink needs to recover from the checkpoint,
* the (failed) in-flight requests are gone and cannot be retried. Hence, there cannot be any
* outstanding in-flight requests when a commit is initialized.
*
* <p>To this end, all in-flight requests need to completed before proceeding with the commit.
*/
@Override
public void flush(boolean flush) {
while (inFlightRequestsCount > 0 || (bufferedRequestEntries.size() > 0 && flush)) {
mailboxExecutor.tryYield();
if (flush) {
flush();
}
}
}
/**
* All in-flight requests that are relevant for the snapshot have been completed, but there may
* still be request entries in the internal buffers that are yet to be sent to the endpoint.
* These request entries are stored in the snapshot state so that they don't get lost in case of
* a failure/restart of the application.
*/
@Override
public List<BufferedRequestState<RequestEntryT>> snapshotState(long checkpointId) {
return Collections.singletonList(new BufferedRequestState<>((bufferedRequestEntries)));
}
private void initializeState(Collection<BufferedRequestState<RequestEntryT>> states) {
for (BufferedRequestState<RequestEntryT> state : states) {
initializeState(state);
}
}
private void initializeState(BufferedRequestState<RequestEntryT> state) {
this.bufferedRequestEntries.addAll(state.getBufferedRequestEntries());
for (RequestEntryWrapper<RequestEntryT> wrapper : bufferedRequestEntries) {
if (wrapper.getSize() > maxRecordSizeInBytes) {
throw new IllegalStateException(
String.format(
"State contains record of size %d which exceeds sink maximum record size %d.",
wrapper.getSize(), maxRecordSizeInBytes));
}
}
this.bufferedRequestEntriesTotalSizeInBytes += state.getStateSize();
}
@Override
public void close() {}
protected Consumer<Exception> getFatalExceptionCons() {
return fatalExceptionCons;
}
} | class AsyncSinkWriter<InputT, RequestEntryT extends Serializable>
implements StatefulSink.StatefulSinkWriter<InputT, BufferedRequestState<RequestEntryT>> {
private static final int INFLIGHT_MESSAGES_LIMIT_INCREASE_RATE = 10;
private static final double INFLIGHT_MESSAGES_LIMIT_DECREASE_FACTOR = 0.5;
private final MailboxExecutor mailboxExecutor;
private final ProcessingTimeService timeService;
/* The timestamp of the previous batch of records was sent from this sink. */
private long lastSendTimestamp = 0;
/* The timestamp of the response to the previous request from this sink. */
private long ackTime = Long.MAX_VALUE;
/* The sink writer metric group. */
private final SinkWriterMetricGroup metrics;
/* Counter for number of bytes this sink has attempted to send to the destination. */
private final Counter numBytesOutCounter;
/* Counter for number of records this sink has attempted to send to the destination. */
private final Counter numRecordsOutCounter;
/**
* Rate limiting strategy {@code inflightMessages} at any given time, {@code
* rateLimitingStrategy.getRateLimit()} is used to adjust the sink's throughput not to exceed
* destination's throttle rate.
*
* <p>throttled requests should update limit by calling {@code rateLimitingStrategy.scaleDown()}
* and successful requests should update by calling {@code rateLimitingStrategy.scaleUp()}
*
* <p>Failure of throttled request decreases limit resulting in yielding on fewer number of
* messages.
*/
private final AIMDRateLimitingStrategy rateLimitingStrategy;
private final int maxBatchSize;
private final int maxInFlightRequests;
private final int maxBufferedRequests;
private final long maxBatchSizeInBytes;
private final long maxTimeInBufferMS;
private final long maxRecordSizeInBytes;
/**
* The ElementConverter provides a mapping between for the elements of a stream to request
* entries that can be sent to the destination.
*
* <p>The resulting request entry is buffered by the AsyncSinkWriter and sent to the destination
* when the {@code submitRequestEntries} method is invoked.
*/
private final ElementConverter<InputT, RequestEntryT> elementConverter;
/**
* Buffer to hold request entries that should be persisted into the destination, along with its
* size in bytes.
*
* <p>A request entry contain all relevant details to make a call to the destination. Eg, for
* Kinesis Data Streams a request entry contains the payload and partition key.
*
* <p>It seems more natural to buffer InputT, ie, the events that should be persisted, rather
* than RequestEntryT. However, in practice, the response of a failed request call can make it
* very hard, if not impossible, to reconstruct the original event. It is much easier, to just
* construct a new (retry) request entry from the response and add that back to the queue for
* later retry.
*/
private final Deque<RequestEntryWrapper<RequestEntryT>> bufferedRequestEntries =
new ArrayDeque<>();
/**
* Tracks all pending async calls that have been executed since the last checkpoint. Calls that
* completed (successfully or unsuccessfully) are automatically decrementing the counter. Any
* request entry that was not successfully persisted needs to be handled and retried by the
* logic in {@code submitRequestsToApi}.
*
* <p>There is a limit on the number of concurrent (async) requests that can be handled by the
* client library. This limit is enforced by checking the queue size before accepting a new
* element into the queue.
*
* <p>To complete a checkpoint, we need to make sure that no requests are in flight, as they may
* fail, which could then lead to data loss.
*/
private int inFlightRequestsCount;
/**
* Tracks number of messages (request entries) in the inflight requests. This variable is used
* to control rate of outbound messages flow as {@code inFlightMessages} should not exceed
* {@code rateLimitingStrategy}.
*
* <p>{@code inFlightMessages} should also be consistent with {@code inFlightRequestsCount}
* where {@code inFlightMessages} should never exceed {@code inFlightRequestsCount} at any time.
*/
private int inFlightMessages;
/**
* Tracks the cumulative size of all elements in {@code bufferedRequestEntries} to facilitate
* the criterion for flushing after {@code maxBatchSizeInBytes} is reached.
*/
private double bufferedRequestEntriesTotalSizeInBytes;
private boolean existsActiveTimerCallback = false;
/**
* The {@code accept} method should be called on this Consumer if the processing of the {@code
* requestEntries} raises an exception that should not be retried. Specifically, any action that
* we are sure will result in the same exception no matter how many times we retry should raise
* a {@code RuntimeException} here. For example, wrong user credentials. However, it is possible
* intermittent failures will recover, e.g. flaky network connections, in which case, some other
* mechanism may be more appropriate.
*/
private final Consumer<Exception> fatalExceptionCons;
/**
* This method specifies how to persist buffered request entries into the destination. It is
* implemented when support for a new destination is added.
*
* <p>The method is invoked with a set of request entries according to the buffering hints (and
* the valid limits of the destination). The logic then needs to create and execute the request
* asynchronously against the destination (ideally by batching together multiple request entries
* to increase efficiency). The logic also needs to identify individual request entries that
* were not persisted successfully and resubmit them using the {@code requestResult} callback.
*
* <p>From a threading perspective, the mailbox thread will call this method and initiate the
* asynchronous request to persist the {@code requestEntries}. NOTE: The client must support
* asynchronous requests and the method called to persist the records must asynchronously
* execute and return a future with the results of that request. A thread from the destination
* client thread pool should complete the request and submit the failed entries that should be
* retried. The {@code requestResult} will then trigger the mailbox thread to requeue the
* unsuccessful elements.
*
* <p>An example implementation of this method is included:
*
* <pre>{@code
* @Override
* protected void submitRequestEntries
* (List<RequestEntryT> records, Consumer<Collection<RequestEntryT>> requestResult) {
* Future<Response> response = destinationClient.putRecords(records);
* response.whenComplete(
* (response, error) -> {
* if(error){
* List<RequestEntryT> retryableFailedRecords = getRetryableFailed(response);
* requestResult.accept(retryableFailedRecords);
* }else{
* requestResult.accept(Collections.emptyList());
* }
* }
* );
* }
*
* }</pre>
*
* <p>During checkpointing, the sink needs to ensure that there are no outstanding in-flight
* requests.
*
* @param requestEntries a set of request entries that should be sent to the destination
* @param requestResult the {@code accept} method should be called on this Consumer once the
* processing of the {@code requestEntries} are complete. Any entries that encountered
* difficulties in persisting should be re-queued through {@code requestResult} by including
* that element in the collection of {@code RequestEntryT}s passed to the {@code accept}
* method. All other elements are assumed to have been successfully persisted.
*/
protected abstract void submitRequestEntries(
List<RequestEntryT> requestEntries, Consumer<List<RequestEntryT>> requestResult);
/**
* This method allows the getting of the size of a {@code RequestEntryT} in bytes. The size in
* this case is measured as the total bytes that is written to the destination as a result of
* persisting this particular {@code RequestEntryT} rather than the serialized length (which may
* be the same).
*
* @param requestEntry the requestEntry for which we want to know the size
* @return the size of the requestEntry, as defined previously
*/
protected abstract long getSizeInBytes(RequestEntryT requestEntry);
public AsyncSinkWriter(
ElementConverter<InputT, RequestEntryT> elementConverter,
Sink.InitContext context,
int maxBatchSize,
int maxInFlightRequests,
int maxBufferedRequests,
long maxBatchSizeInBytes,
long maxTimeInBufferMS,
long maxRecordSizeInBytes) {
this(
elementConverter,
context,
maxBatchSize,
maxInFlightRequests,
maxBufferedRequests,
maxBatchSizeInBytes,
maxTimeInBufferMS,
maxRecordSizeInBytes,
Collections.emptyList());
}
public AsyncSinkWriter(
ElementConverter<InputT, RequestEntryT> elementConverter,
Sink.InitContext context,
int maxBatchSize,
int maxInFlightRequests,
int maxBufferedRequests,
long maxBatchSizeInBytes,
long maxTimeInBufferMS,
long maxRecordSizeInBytes,
Collection<BufferedRequestState<RequestEntryT>> states) {
this.elementConverter = elementConverter;
this.mailboxExecutor = context.getMailboxExecutor();
this.timeService = context.getProcessingTimeService();
Preconditions.checkNotNull(elementConverter);
Preconditions.checkArgument(maxBatchSize > 0);
Preconditions.checkArgument(maxBufferedRequests > 0);
Preconditions.checkArgument(maxInFlightRequests > 0);
Preconditions.checkArgument(maxBatchSizeInBytes > 0);
Preconditions.checkArgument(maxTimeInBufferMS > 0);
Preconditions.checkArgument(maxRecordSizeInBytes > 0);
Preconditions.checkArgument(
maxBufferedRequests > maxBatchSize,
"The maximum number of requests that may be buffered should be strictly"
+ " greater than the maximum number of requests per batch.");
Preconditions.checkArgument(
maxBatchSizeInBytes >= maxRecordSizeInBytes,
"The maximum allowed size in bytes per flush must be greater than or equal to the"
+ " maximum allowed size in bytes of a single record.");
this.maxBatchSize = maxBatchSize;
this.maxInFlightRequests = maxInFlightRequests;
this.maxBufferedRequests = maxBufferedRequests;
this.maxBatchSizeInBytes = maxBatchSizeInBytes;
this.maxTimeInBufferMS = maxTimeInBufferMS;
this.maxRecordSizeInBytes = maxRecordSizeInBytes;
this.inFlightRequestsCount = 0;
this.bufferedRequestEntriesTotalSizeInBytes = 0;
this.inFlightMessages = 0;
this.rateLimitingStrategy =
new AIMDRateLimitingStrategy(
INFLIGHT_MESSAGES_LIMIT_INCREASE_RATE,
INFLIGHT_MESSAGES_LIMIT_DECREASE_FACTOR,
maxBatchSize * maxInFlightRequests,
maxBatchSize * maxInFlightRequests);
this.metrics = context.metricGroup();
this.metrics.setCurrentSendTimeGauge(() -> this.ackTime - this.lastSendTimestamp);
this.numBytesOutCounter = this.metrics.getIOMetricGroup().getNumBytesOutCounter();
this.numRecordsOutCounter = this.metrics.getIOMetricGroup().getNumRecordsOutCounter();
this.fatalExceptionCons =
exception ->
mailboxExecutor.execute(
() -> {
throw exception;
},
"A fatal exception occurred in the sink that cannot be recovered from or should not be retried.");
initializeState(states);
}
private void registerCallback() {
ProcessingTimeService.ProcessingTimeCallback ptc =
instant -> {
existsActiveTimerCallback = false;
while (!bufferedRequestEntries.isEmpty()) {
flush();
}
};
timeService.registerTimer(timeService.getCurrentProcessingTime() + maxTimeInBufferMS, ptc);
existsActiveTimerCallback = true;
}
@Override
public void write(InputT element, Context context) throws IOException, InterruptedException {
while (bufferedRequestEntries.size() >= maxBufferedRequests) {
flush();
}
addEntryToBuffer(elementConverter.apply(element, context), false);
nonBlockingFlush();
}
/**
* Determines if a call to flush will be non-blocking (i.e. {@code inFlightRequestsCount} is
* strictly smaller than {@code maxInFlightRequests}). Also requires one of the following
* requirements to be met:
*
* <ul>
* <li>The number of elements buffered is greater than or equal to the {@code maxBatchSize}
* <li>The sum of the size in bytes of all records in the buffer is greater than or equal to
* {@code maxBatchSizeInBytes}
* </ul>
*/
private void nonBlockingFlush() throws InterruptedException {
while (!isInFlightRequestOrMessageLimitExceeded()
&& (bufferedRequestEntries.size() >= getNextBatchSizeLimit()
|| bufferedRequestEntriesTotalSizeInBytes >= maxBatchSizeInBytes)) {
flush();
}
}
/**
* Determines if the sink should block and complete existing in flight requests before it may
* prudently create any new ones. This is exactly determined by if the number of requests
* currently in flight exceeds the maximum supported by the sink OR if the number of in flight
* messages exceeds the maximum determined to be appropriate by the rate limiting strategy.
*/
private boolean isInFlightRequestOrMessageLimitExceeded() {
return inFlightRequestsCount >= maxInFlightRequests
|| inFlightMessages >= rateLimitingStrategy.getRateLimit();
}
/**
* Persists buffered RequestsEntries into the destination by invoking {@code
* submitRequestEntries} with batches according to the user specified buffering hints.
*
* <p>The method blocks if too many async requests are in flight.
*/
/**
* Creates the next batch of request entries while respecting the {@code maxBatchSize} and
* {@code maxBatchSizeInBytes}. Also adds these to the metrics counters.
*/
private List<RequestEntryT> createNextAvailableBatch() {
int batchSize = Math.min(getNextBatchSizeLimit(), bufferedRequestEntries.size());
List<RequestEntryT> batch = new ArrayList<>(batchSize);
int batchSizeBytes = 0;
for (int i = 0; i < batchSize; i++) {
long requestEntrySize = bufferedRequestEntries.peek().getSize();
if (batchSizeBytes + requestEntrySize > maxBatchSizeInBytes) {
break;
}
RequestEntryWrapper<RequestEntryT> elem = bufferedRequestEntries.remove();
batch.add(elem.getRequestEntry());
bufferedRequestEntriesTotalSizeInBytes -= requestEntrySize;
batchSizeBytes += requestEntrySize;
}
numRecordsOutCounter.inc(batch.size());
numBytesOutCounter.inc(batchSizeBytes);
return batch;
}
/**
* Marks an in-flight request as completed and prepends failed requestEntries back to the
* internal requestEntry buffer for later retry.
*
* @param failedRequestEntries requestEntries that need to be retried
*/
private void completeRequest(
List<RequestEntryT> failedRequestEntries, int batchSize, long requestStartTime)
throws InterruptedException {
lastSendTimestamp = requestStartTime;
ackTime = System.currentTimeMillis();
inFlightRequestsCount--;
inFlightMessages -= batchSize;
updateInFlightMessagesLimit(failedRequestEntries.size() == 0);
ListIterator<RequestEntryT> iterator =
failedRequestEntries.listIterator(failedRequestEntries.size());
while (iterator.hasPrevious()) {
addEntryToBuffer(iterator.previous(), true);
}
nonBlockingFlush();
}
private void updateInFlightMessagesLimit(boolean isSuccessfulRequest) {
if (isSuccessfulRequest) {
rateLimitingStrategy.scaleUp();
} else {
rateLimitingStrategy.scaleDown();
}
}
private void addEntryToBuffer(RequestEntryT entry, boolean insertAtHead) {
if (bufferedRequestEntries.isEmpty() && !existsActiveTimerCallback) {
registerCallback();
}
RequestEntryWrapper<RequestEntryT> wrappedEntry =
new RequestEntryWrapper<>(entry, getSizeInBytes(entry));
if (wrappedEntry.getSize() > maxRecordSizeInBytes) {
throw new IllegalArgumentException(
String.format(
"The request entry sent to the buffer was of size [%s], when the maxRecordSizeInBytes was set to [%s].",
wrappedEntry.getSize(), maxRecordSizeInBytes));
}
if (insertAtHead) {
bufferedRequestEntries.addFirst(wrappedEntry);
} else {
bufferedRequestEntries.add(wrappedEntry);
}
bufferedRequestEntriesTotalSizeInBytes += wrappedEntry.getSize();
}
/**
* In flight requests will be retried if the sink is still healthy. But if in-flight requests
* fail after a checkpoint has been triggered and Flink needs to recover from the checkpoint,
* the (failed) in-flight requests are gone and cannot be retried. Hence, there cannot be any
* outstanding in-flight requests when a commit is initialized.
*
* <p>To this end, all in-flight requests need to completed before proceeding with the commit.
*/
@Override
public void flush(boolean flush) throws InterruptedException {
while (inFlightRequestsCount > 0 || (bufferedRequestEntries.size() > 0 && flush)) {
yieldIfThereExistsInFlightRequests();
if (flush) {
flush();
}
}
}
private void yieldIfThereExistsInFlightRequests() throws InterruptedException {
if (inFlightRequestsCount > 0) {
mailboxExecutor.yield();
}
}
/**
* All in-flight requests that are relevant for the snapshot have been completed, but there may
* still be request entries in the internal buffers that are yet to be sent to the endpoint.
* These request entries are stored in the snapshot state so that they don't get lost in case of
* a failure/restart of the application.
*/
@Override
public List<BufferedRequestState<RequestEntryT>> snapshotState(long checkpointId) {
return Collections.singletonList(new BufferedRequestState<>((bufferedRequestEntries)));
}
private void initializeState(Collection<BufferedRequestState<RequestEntryT>> states) {
for (BufferedRequestState<RequestEntryT> state : states) {
initializeState(state);
}
}
private void initializeState(BufferedRequestState<RequestEntryT> state) {
this.bufferedRequestEntries.addAll(state.getBufferedRequestEntries());
for (RequestEntryWrapper<RequestEntryT> wrapper : bufferedRequestEntries) {
if (wrapper.getSize() > maxRecordSizeInBytes) {
throw new IllegalStateException(
String.format(
"State contains record of size %d which exceeds sink maximum record size %d.",
wrapper.getSize(), maxRecordSizeInBytes));
}
}
this.bufferedRequestEntriesTotalSizeInBytes += state.getStateSize();
}
@Override
public void close() {}
private int getNextBatchSizeLimit() {
return Math.min(maxBatchSize, rateLimitingStrategy.getRateLimit());
}
protected Consumer<Exception> getFatalExceptionCons() {
return fatalExceptionCons;
}
} |
maybe use `assertThat` instead for `assertEquals` | private void testPartitionReleaseAfterFinished(Consumer<Execution> postFinishedExecutionAction) throws Exception {
final Tuple2<JobID, Collection<ResultPartitionID>> releasedPartitions = Tuple2.of(null, null);
final SimpleAckingTaskManagerGateway taskManagerGateway = new SimpleAckingTaskManagerGateway();
taskManagerGateway.setReleasePartitionsConsumer(releasedPartitions::setFields);
final SimpleSlot slot = new SimpleSlot(
new SingleSlotTestingSlotOwner(),
new LocalTaskManagerLocation(),
0,
taskManagerGateway);
final JobVertex producerVertex = createNoOpJobVertex();
final JobVertex consumerVertex = createNoOpJobVertex();
consumerVertex.connectNewDataSetAsInput(producerVertex, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING);
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(producerVertex.getID(), 0, CompletableFuture.completedFuture(slot));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
producerVertex,
consumerVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(producerVertex.getID());
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
final Execution execution = executionVertex.getCurrentExecutionAttempt();
execution.allocateAndAssignSlotForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
execution.deploy();
execution.switchToRunning();
execution.markFinished();
postFinishedExecutionAction.accept(execution);
assertEquals(executionGraph.getJobID(), releasedPartitions.f0);
assertEquals(executionVertex.getProducedPartitions().size(), releasedPartitions.f1.size());
for (ResultPartitionID partitionId : releasedPartitions.f1) {
IntermediateResultPartition intermediateResultPartition = executionVertex
.getProducedPartitions()
.get(partitionId.getPartitionId());
assertNotNull(intermediateResultPartition);
assertEquals(execution.getAttemptId(), partitionId.getProducerId());
}
} | assertEquals(executionGraph.getJobID(), releasedPartitions.f0); | private void testPartitionReleaseAfterFinished(Consumer<Execution> postFinishedExecutionAction) throws Exception {
final Tuple2<JobID, Collection<ResultPartitionID>> releasedPartitions = Tuple2.of(null, null);
final SimpleAckingTaskManagerGateway taskManagerGateway = new SimpleAckingTaskManagerGateway();
taskManagerGateway.setReleasePartitionsConsumer(releasedPartitions::setFields);
final JobVertex producerVertex = createNoOpJobVertex();
final JobVertex consumerVertex = createNoOpJobVertex();
consumerVertex.connectNewDataSetAsInput(producerVertex, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING);
final SimpleSlot slot = new SimpleSlot(
new SingleSlotTestingSlotOwner(),
new LocalTaskManagerLocation(),
0,
taskManagerGateway);
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(producerVertex.getID(), 0, CompletableFuture.completedFuture(slot));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
producerVertex,
consumerVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(producerVertex.getID());
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
final Execution execution = executionVertex.getCurrentExecutionAttempt();
execution.allocateResourcesForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
execution.deploy();
execution.switchToRunning();
execution.markFinished();
postFinishedExecutionAction.accept(execution);
assertEquals(executionGraph.getJobID(), releasedPartitions.f0);
assertEquals(executionVertex.getProducedPartitions().size(), releasedPartitions.f1.size());
for (ResultPartitionID partitionId : releasedPartitions.f1) {
IntermediateResultPartition intermediateResultPartition = executionVertex
.getProducedPartitions()
.get(partitionId.getPartitionId());
assertNotNull(intermediateResultPartition);
assertEquals(execution.getAttemptId(), partitionId.getProducerId());
}
} | class ExecutionTest extends TestLogger {
@ClassRule
public static final TestingComponentMainThreadExecutor.Resource EXECUTOR_RESOURCE =
new TestingComponentMainThreadExecutor.Resource();
private final TestingComponentMainThreadExecutor testMainThreadUtil =
EXECUTOR_RESOURCE.getComponentMainThreadTestExecutor();
/**
* Tests that slots are released if we cannot assign the allocated resource to the
* Execution.
*/
@Test
public void testSlotReleaseOnFailedResourceAssignment() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final CompletableFuture<LogicalSlot> slotFuture = new CompletableFuture<>();
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(jobVertexId, 0, slotFuture);
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution execution = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway());
final LogicalSlot otherSlot = new TestingLogicalSlot();
CompletableFuture<Execution> allocationFuture = execution.allocateAndAssignSlotForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertFalse(allocationFuture.isDone());
assertEquals(ExecutionState.SCHEDULED, execution.getState());
assertTrue(execution.tryAssignResource(otherSlot));
slotFuture.complete(slot);
assertEquals(slot, slotOwner.getReturnedSlotFuture().get());
}
/**
* Tests that the slot is released in case of a execution cancellation when having
* a slot assigned and being in state SCHEDULED.
*/
@Test
public void testSlotReleaseOnExecutionCancellationInScheduled() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway());
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(jobVertexId, 0, CompletableFuture.completedFuture(slot));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution execution = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
CompletableFuture<Execution> allocationFuture = execution.allocateAndAssignSlotForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertTrue(allocationFuture.isDone());
assertEquals(ExecutionState.SCHEDULED, execution.getState());
assertEquals(slot, execution.getAssignedResource());
execution.cancel();
assertEquals(ExecutionState.CANCELED, execution.getState());
assertEquals(slot, slotOwner.getReturnedSlotFuture().get());
}
/**
* Tests that the slot is released in case of a execution cancellation when being in state
* RUNNING.
*/
@Test
public void testSlotReleaseOnExecutionCancellationInRunning() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway());
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(jobVertexId, 0, CompletableFuture.completedFuture(slot));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution execution = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
CompletableFuture<Execution> allocationFuture = execution.allocateAndAssignSlotForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertTrue(allocationFuture.isDone());
assertEquals(ExecutionState.SCHEDULED, execution.getState());
assertEquals(slot, execution.getAssignedResource());
execution.deploy();
execution.switchToRunning();
execution.cancel();
assertEquals(ExecutionState.CANCELING, execution.getState());
execution.completeCancelling();
assertEquals(slot, slotOwner.getReturnedSlotFuture().get());
}
/**
* Tests that a slot allocation from a {@link SlotProvider} is cancelled if the
* {@link Execution} is cancelled.
*/
@Test
public void testSlotAllocationCancellationWhenExecutionCancelled() throws Exception {
final JobVertexID jobVertexId = new JobVertexID();
final JobVertex jobVertex = new JobVertex("test vertex", jobVertexId);
jobVertex.setInvokableClass(NoOpInvokable.class);
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
final CompletableFuture<LogicalSlot> slotFuture = new CompletableFuture<>();
slotProvider.addSlot(jobVertexId, 0, slotFuture);
final ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
final ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution currentExecutionAttempt = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
final CompletableFuture<Execution> allocationFuture = currentExecutionAttempt.allocateAndAssignSlotForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertThat(allocationFuture.isDone(), is(false));
assertThat(slotProvider.getSlotRequestedFuture(jobVertexId, 0).get(), is(true));
final Set<SlotRequestId> slotRequests = slotProvider.getSlotRequests();
assertThat(slotRequests, hasSize(1));
assertThat(currentExecutionAttempt.getState(), is(ExecutionState.SCHEDULED));
currentExecutionAttempt.cancel();
assertThat(currentExecutionAttempt.getState(), is(ExecutionState.CANCELED));
assertThat(allocationFuture.isCompletedExceptionally(), is(true));
final Set<SlotRequestId> canceledSlotRequests = slotProvider.getCanceledSlotRequests();
assertThat(canceledSlotRequests, equalTo(slotRequests));
}
/**
* Tests that the partitions are released in case of a execution cancellation after the execution is already finished.
*/
@Test
public void testPartitionReleaseOnCancelAfterFinished() throws Exception {
testPartitionReleaseAfterFinished(Execution::cancel);
}
/**
* Tests that the partitions are released in case of a execution suspension after the execution is already finished.
*/
@Test
public void testPartitionReleaseOnSuspendAfterFinished() throws Exception {
testPartitionReleaseAfterFinished(Execution::suspend);
}
/**
* Tests that all preferred locations are calculated.
*/
@Test
public void testAllPreferredLocationCalculation() throws ExecutionException, InterruptedException {
final TaskManagerLocation taskManagerLocation1 = new LocalTaskManagerLocation();
final TaskManagerLocation taskManagerLocation2 = new LocalTaskManagerLocation();
final TaskManagerLocation taskManagerLocation3 = new LocalTaskManagerLocation();
final CompletableFuture<TaskManagerLocation> locationFuture1 = CompletableFuture.completedFuture(taskManagerLocation1);
final CompletableFuture<TaskManagerLocation> locationFuture2 = new CompletableFuture<>();
final CompletableFuture<TaskManagerLocation> locationFuture3 = new CompletableFuture<>();
final Execution execution = SchedulerTestUtils.getTestVertex(Arrays.asList(locationFuture1, locationFuture2, locationFuture3));
CompletableFuture<Collection<TaskManagerLocation>> preferredLocationsFuture = execution.calculatePreferredLocations(LocationPreferenceConstraint.ALL);
assertFalse(preferredLocationsFuture.isDone());
locationFuture3.complete(taskManagerLocation3);
assertFalse(preferredLocationsFuture.isDone());
locationFuture2.complete(taskManagerLocation2);
assertTrue(preferredLocationsFuture.isDone());
final Collection<TaskManagerLocation> preferredLocations = preferredLocationsFuture.get();
assertThat(preferredLocations, containsInAnyOrder(taskManagerLocation1, taskManagerLocation2, taskManagerLocation3));
}
/**
* Tests that any preferred locations are calculated.
*/
@Test
public void testAnyPreferredLocationCalculation() throws ExecutionException, InterruptedException {
final TaskManagerLocation taskManagerLocation1 = new LocalTaskManagerLocation();
final TaskManagerLocation taskManagerLocation3 = new LocalTaskManagerLocation();
final CompletableFuture<TaskManagerLocation> locationFuture1 = CompletableFuture.completedFuture(taskManagerLocation1);
final CompletableFuture<TaskManagerLocation> locationFuture2 = new CompletableFuture<>();
final CompletableFuture<TaskManagerLocation> locationFuture3 = CompletableFuture.completedFuture(taskManagerLocation3);
final Execution execution = SchedulerTestUtils.getTestVertex(Arrays.asList(locationFuture1, locationFuture2, locationFuture3));
CompletableFuture<Collection<TaskManagerLocation>> preferredLocationsFuture = execution.calculatePreferredLocations(LocationPreferenceConstraint.ANY);
assertTrue(preferredLocationsFuture.isDone());
final Collection<TaskManagerLocation> preferredLocations = preferredLocationsFuture.get();
assertThat(preferredLocations, containsInAnyOrder(taskManagerLocation1, taskManagerLocation3));
}
/**
* Checks that the {@link Execution} termination future is only completed after the
* assigned slot has been released.
*
* <p>NOTE: This test only fails spuriously without the fix of this commit. Thus, one has
* to execute this test multiple times to see the failure.
*/
@Test
public void testTerminationFutureIsCompletedAfterSlotRelease() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final ProgrammedSlotProvider slotProvider = createProgrammedSlotProvider(
1,
Collections.singleton(jobVertexId),
slotOwner);
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
executionVertex.scheduleForExecution(slotProvider, false, LocationPreferenceConstraint.ANY, Collections.emptySet()).get();
Execution currentExecutionAttempt = executionVertex.getCurrentExecutionAttempt();
CompletableFuture<LogicalSlot> returnedSlotFuture = slotOwner.getReturnedSlotFuture();
CompletableFuture<?> terminationFuture = executionVertex.cancel();
currentExecutionAttempt.completeCancelling();
CompletableFuture<Boolean> restartFuture = terminationFuture.thenApply(
ignored -> {
assertTrue(returnedSlotFuture.isDone());
return true;
});
restartFuture.get();
}
/**
* Tests that the task restore state is nulled after the {@link Execution} has been
* deployed. See FLINK-9693.
*/
@Test
public void testTaskRestoreStateIsNulledAfterDeployment() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final ProgrammedSlotProvider slotProvider = createProgrammedSlotProvider(
1,
Collections.singleton(jobVertexId),
slotOwner);
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
final Execution execution = executionVertex.getCurrentExecutionAttempt();
final JobManagerTaskRestore taskRestoreState = new JobManagerTaskRestore(1L, new TaskStateSnapshot());
execution.setInitialState(taskRestoreState);
assertThat(execution.getTaskRestore(), is(notNullValue()));
executionVertex.scheduleForExecution(slotProvider, false, LocationPreferenceConstraint.ANY, Collections.emptySet()).get();
assertThat(execution.getTaskRestore(), is(nullValue()));
}
@Test
public void testEagerSchedulingFailureReturnsSlot() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SimpleAckingTaskManagerGateway taskManagerGateway = new SimpleAckingTaskManagerGateway();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final CompletableFuture<SlotRequestId> slotRequestIdFuture = new CompletableFuture<>();
final CompletableFuture<SlotRequestId> returnedSlotFuture = new CompletableFuture<>();
final TestingSlotProvider slotProvider = new TestingSlotProvider(
(SlotRequestId slotRequestId) -> {
slotRequestIdFuture.complete(slotRequestId);
return new CompletableFuture<>();
});
slotProvider.setSlotCanceller(returnedSlotFuture::complete);
slotOwner.getReturnedSlotFuture().thenAccept(
(LogicalSlot logicalSlot) -> returnedSlotFuture.complete(logicalSlot.getSlotRequestId()));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(testMainThreadUtil.getMainThreadExecutor());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
final Execution execution = executionVertex.getCurrentExecutionAttempt();
taskManagerGateway.setCancelConsumer(
executionAttemptID -> {
if (execution.getAttemptId().equals(executionAttemptID)) {
execution.completeCancelling();
}
}
);
slotRequestIdFuture.thenAcceptAsync(
(SlotRequestId slotRequestId) -> {
final SingleLogicalSlot singleLogicalSlot = ExecutionGraphSchedulingTest.createSingleLogicalSlot(
slotOwner,
taskManagerGateway,
slotRequestId);
slotProvider.complete(slotRequestId, singleLogicalSlot);
},
testMainThreadUtil.getMainThreadExecutor());
final CompletableFuture<Void> schedulingFuture = testMainThreadUtil.execute(
() -> execution.scheduleForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ANY,
Collections.emptySet()));
try {
schedulingFuture.get();
testMainThreadUtil.execute(execution::cancel);
} catch (ExecutionException ignored) {
}
assertThat(returnedSlotFuture.get(), is(equalTo(slotRequestIdFuture.get())));
}
/**
* Tests that a slot release will atomically release the assigned {@link Execution}.
*/
@Test
public void testSlotReleaseAtomicallyReleasesExecution() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SingleLogicalSlot slot = ExecutionGraphSchedulingTest.createSingleLogicalSlot(
slotOwner,
new SimpleAckingTaskManagerGateway(),
new SlotRequestId());
final CompletableFuture<LogicalSlot> slotFuture = CompletableFuture.completedFuture(slot);
final CountDownLatch slotRequestLatch = new CountDownLatch(1);
final TestingSlotProvider slotProvider = new TestingSlotProvider(slotRequestId -> {
slotRequestLatch.countDown();
return slotFuture;
});
final ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
final Execution execution = executionGraph.getJobVertex(jobVertex.getID()).getTaskVertices()[0].getCurrentExecutionAttempt();
executionGraph.start(testMainThreadUtil.getMainThreadExecutor());
testMainThreadUtil.execute(executionGraph::scheduleForExecution);
slotRequestLatch.await();
testMainThreadUtil.execute(() -> {
assertThat(execution.getAssignedResource(), is(sameInstance(slot)));
slot.release(new FlinkException("Test exception"));
assertThat(execution.getReleaseFuture().isDone(), is(true));
});
}
@Nonnull
private JobVertex createNoOpJobVertex() {
final JobVertex jobVertex = new JobVertex("Test vertex", new JobVertexID());
jobVertex.setInvokableClass(NoOpInvokable.class);
return jobVertex;
}
@Nonnull
private ProgrammedSlotProvider createProgrammedSlotProvider(
int parallelism,
Collection<JobVertexID> jobVertexIds,
SlotOwner slotOwner) {
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(parallelism);
for (JobVertexID jobVertexId : jobVertexIds) {
for (int i = 0; i < parallelism; i++) {
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway(),
null,
null);
slotProvider.addSlot(jobVertexId, 0, CompletableFuture.completedFuture(slot));
}
}
return slotProvider;
}
/**
* Slot owner which records the first returned slot.
*/
private static final class SingleSlotTestingSlotOwner implements SlotOwner {
final CompletableFuture<LogicalSlot> returnedSlot = new CompletableFuture<>();
public CompletableFuture<LogicalSlot> getReturnedSlotFuture() {
return returnedSlot;
}
@Override
public void returnLogicalSlot(LogicalSlot logicalSlot) {
returnedSlot.complete(logicalSlot);
}
}
} | class ExecutionTest extends TestLogger {
@ClassRule
public static final TestingComponentMainThreadExecutor.Resource EXECUTOR_RESOURCE =
new TestingComponentMainThreadExecutor.Resource();
private final TestingComponentMainThreadExecutor testMainThreadUtil =
EXECUTOR_RESOURCE.getComponentMainThreadTestExecutor();
/**
* Tests that slots are released if we cannot assign the allocated resource to the
* Execution.
*/
@Test
public void testSlotReleaseOnFailedResourceAssignment() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final CompletableFuture<LogicalSlot> slotFuture = new CompletableFuture<>();
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(jobVertexId, 0, slotFuture);
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution execution = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway());
final LogicalSlot otherSlot = new TestingLogicalSlot();
CompletableFuture<Execution> allocationFuture = execution.allocateResourcesForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertFalse(allocationFuture.isDone());
assertEquals(ExecutionState.SCHEDULED, execution.getState());
assertTrue(execution.tryAssignResource(otherSlot));
slotFuture.complete(slot);
assertEquals(slot, slotOwner.getReturnedSlotFuture().get());
}
/**
* Tests that the slot is released in case of a execution cancellation when having
* a slot assigned and being in state SCHEDULED.
*/
@Test
public void testSlotReleaseOnExecutionCancellationInScheduled() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway());
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(jobVertexId, 0, CompletableFuture.completedFuture(slot));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution execution = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
CompletableFuture<Execution> allocationFuture = execution.allocateResourcesForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertTrue(allocationFuture.isDone());
assertEquals(ExecutionState.SCHEDULED, execution.getState());
assertEquals(slot, execution.getAssignedResource());
execution.cancel();
assertEquals(ExecutionState.CANCELED, execution.getState());
assertEquals(slot, slotOwner.getReturnedSlotFuture().get());
}
/**
* Tests that the slot is released in case of a execution cancellation when being in state
* RUNNING.
*/
@Test
public void testSlotReleaseOnExecutionCancellationInRunning() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway());
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(jobVertexId, 0, CompletableFuture.completedFuture(slot));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution execution = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
CompletableFuture<Execution> allocationFuture = execution.allocateResourcesForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertTrue(allocationFuture.isDone());
assertEquals(ExecutionState.SCHEDULED, execution.getState());
assertEquals(slot, execution.getAssignedResource());
execution.deploy();
execution.switchToRunning();
execution.cancel();
assertEquals(ExecutionState.CANCELING, execution.getState());
execution.completeCancelling();
assertEquals(slot, slotOwner.getReturnedSlotFuture().get());
}
/**
* Tests that a slot allocation from a {@link SlotProvider} is cancelled if the
* {@link Execution} is cancelled.
*/
@Test
public void testSlotAllocationCancellationWhenExecutionCancelled() throws Exception {
final JobVertexID jobVertexId = new JobVertexID();
final JobVertex jobVertex = new JobVertex("test vertex", jobVertexId);
jobVertex.setInvokableClass(NoOpInvokable.class);
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
final CompletableFuture<LogicalSlot> slotFuture = new CompletableFuture<>();
slotProvider.addSlot(jobVertexId, 0, slotFuture);
final ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
final ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution currentExecutionAttempt = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
final CompletableFuture<Execution> allocationFuture = currentExecutionAttempt.allocateResourcesForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertThat(allocationFuture.isDone(), is(false));
assertThat(slotProvider.getSlotRequestedFuture(jobVertexId, 0).get(), is(true));
final Set<SlotRequestId> slotRequests = slotProvider.getSlotRequests();
assertThat(slotRequests, hasSize(1));
assertThat(currentExecutionAttempt.getState(), is(ExecutionState.SCHEDULED));
currentExecutionAttempt.cancel();
assertThat(currentExecutionAttempt.getState(), is(ExecutionState.CANCELED));
assertThat(allocationFuture.isCompletedExceptionally(), is(true));
final Set<SlotRequestId> canceledSlotRequests = slotProvider.getCanceledSlotRequests();
assertThat(canceledSlotRequests, equalTo(slotRequests));
}
/**
* Tests that the partitions are released in case of an execution cancellation after the execution is already finished.
*/
@Test
public void testPartitionReleaseOnCancelingAfterBeingFinished() throws Exception {
testPartitionReleaseAfterFinished(Execution::cancel);
}
/**
* Tests that the partitions are released in case of an execution suspension after the execution is already finished.
*/
@Test
public void testPartitionReleaseOnSuspendingAfterBeingFinished() throws Exception {
testPartitionReleaseAfterFinished(Execution::suspend);
}
/**
* Tests that all preferred locations are calculated.
*/
@Test
public void testAllPreferredLocationCalculation() throws ExecutionException, InterruptedException {
final TaskManagerLocation taskManagerLocation1 = new LocalTaskManagerLocation();
final TaskManagerLocation taskManagerLocation2 = new LocalTaskManagerLocation();
final TaskManagerLocation taskManagerLocation3 = new LocalTaskManagerLocation();
final CompletableFuture<TaskManagerLocation> locationFuture1 = CompletableFuture.completedFuture(taskManagerLocation1);
final CompletableFuture<TaskManagerLocation> locationFuture2 = new CompletableFuture<>();
final CompletableFuture<TaskManagerLocation> locationFuture3 = new CompletableFuture<>();
final Execution execution = SchedulerTestUtils.getTestVertex(Arrays.asList(locationFuture1, locationFuture2, locationFuture3));
CompletableFuture<Collection<TaskManagerLocation>> preferredLocationsFuture = execution.calculatePreferredLocations(LocationPreferenceConstraint.ALL);
assertFalse(preferredLocationsFuture.isDone());
locationFuture3.complete(taskManagerLocation3);
assertFalse(preferredLocationsFuture.isDone());
locationFuture2.complete(taskManagerLocation2);
assertTrue(preferredLocationsFuture.isDone());
final Collection<TaskManagerLocation> preferredLocations = preferredLocationsFuture.get();
assertThat(preferredLocations, containsInAnyOrder(taskManagerLocation1, taskManagerLocation2, taskManagerLocation3));
}
/**
* Tests that any preferred locations are calculated.
*/
@Test
public void testAnyPreferredLocationCalculation() throws ExecutionException, InterruptedException {
final TaskManagerLocation taskManagerLocation1 = new LocalTaskManagerLocation();
final TaskManagerLocation taskManagerLocation3 = new LocalTaskManagerLocation();
final CompletableFuture<TaskManagerLocation> locationFuture1 = CompletableFuture.completedFuture(taskManagerLocation1);
final CompletableFuture<TaskManagerLocation> locationFuture2 = new CompletableFuture<>();
final CompletableFuture<TaskManagerLocation> locationFuture3 = CompletableFuture.completedFuture(taskManagerLocation3);
final Execution execution = SchedulerTestUtils.getTestVertex(Arrays.asList(locationFuture1, locationFuture2, locationFuture3));
CompletableFuture<Collection<TaskManagerLocation>> preferredLocationsFuture = execution.calculatePreferredLocations(LocationPreferenceConstraint.ANY);
assertTrue(preferredLocationsFuture.isDone());
final Collection<TaskManagerLocation> preferredLocations = preferredLocationsFuture.get();
assertThat(preferredLocations, containsInAnyOrder(taskManagerLocation1, taskManagerLocation3));
}
/**
* Checks that the {@link Execution} termination future is only completed after the
* assigned slot has been released.
*
* <p>NOTE: This test only fails spuriously without the fix of this commit. Thus, one has
* to execute this test multiple times to see the failure.
*/
@Test
public void testTerminationFutureIsCompletedAfterSlotRelease() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final ProgrammedSlotProvider slotProvider = createProgrammedSlotProvider(
1,
Collections.singleton(jobVertexId),
slotOwner);
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
executionVertex.scheduleForExecution(slotProvider, false, LocationPreferenceConstraint.ANY, Collections.emptySet()).get();
Execution currentExecutionAttempt = executionVertex.getCurrentExecutionAttempt();
CompletableFuture<LogicalSlot> returnedSlotFuture = slotOwner.getReturnedSlotFuture();
CompletableFuture<?> terminationFuture = executionVertex.cancel();
currentExecutionAttempt.completeCancelling();
CompletableFuture<Boolean> restartFuture = terminationFuture.thenApply(
ignored -> {
assertTrue(returnedSlotFuture.isDone());
return true;
});
restartFuture.get();
}
/**
* Tests that the task restore state is nulled after the {@link Execution} has been
* deployed. See FLINK-9693.
*/
@Test
public void testTaskRestoreStateIsNulledAfterDeployment() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final ProgrammedSlotProvider slotProvider = createProgrammedSlotProvider(
1,
Collections.singleton(jobVertexId),
slotOwner);
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
final Execution execution = executionVertex.getCurrentExecutionAttempt();
final JobManagerTaskRestore taskRestoreState = new JobManagerTaskRestore(1L, new TaskStateSnapshot());
execution.setInitialState(taskRestoreState);
assertThat(execution.getTaskRestore(), is(notNullValue()));
executionVertex.scheduleForExecution(slotProvider, false, LocationPreferenceConstraint.ANY, Collections.emptySet()).get();
assertThat(execution.getTaskRestore(), is(nullValue()));
}
@Test
public void testEagerSchedulingFailureReturnsSlot() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SimpleAckingTaskManagerGateway taskManagerGateway = new SimpleAckingTaskManagerGateway();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final CompletableFuture<SlotRequestId> slotRequestIdFuture = new CompletableFuture<>();
final CompletableFuture<SlotRequestId> returnedSlotFuture = new CompletableFuture<>();
final TestingSlotProvider slotProvider = new TestingSlotProvider(
(SlotRequestId slotRequestId) -> {
slotRequestIdFuture.complete(slotRequestId);
return new CompletableFuture<>();
});
slotProvider.setSlotCanceller(returnedSlotFuture::complete);
slotOwner.getReturnedSlotFuture().thenAccept(
(LogicalSlot logicalSlot) -> returnedSlotFuture.complete(logicalSlot.getSlotRequestId()));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(testMainThreadUtil.getMainThreadExecutor());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
final Execution execution = executionVertex.getCurrentExecutionAttempt();
taskManagerGateway.setCancelConsumer(
executionAttemptID -> {
if (execution.getAttemptId().equals(executionAttemptID)) {
execution.completeCancelling();
}
}
);
slotRequestIdFuture.thenAcceptAsync(
(SlotRequestId slotRequestId) -> {
final SingleLogicalSlot singleLogicalSlot = ExecutionGraphSchedulingTest.createSingleLogicalSlot(
slotOwner,
taskManagerGateway,
slotRequestId);
slotProvider.complete(slotRequestId, singleLogicalSlot);
},
testMainThreadUtil.getMainThreadExecutor());
final CompletableFuture<Void> schedulingFuture = testMainThreadUtil.execute(
() -> execution.scheduleForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ANY,
Collections.emptySet()));
try {
schedulingFuture.get();
testMainThreadUtil.execute(execution::cancel);
} catch (ExecutionException ignored) {
}
assertThat(returnedSlotFuture.get(), is(equalTo(slotRequestIdFuture.get())));
}
/**
* Tests that a slot release will atomically release the assigned {@link Execution}.
*/
@Test
public void testSlotReleaseAtomicallyReleasesExecution() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SingleLogicalSlot slot = ExecutionGraphSchedulingTest.createSingleLogicalSlot(
slotOwner,
new SimpleAckingTaskManagerGateway(),
new SlotRequestId());
final CompletableFuture<LogicalSlot> slotFuture = CompletableFuture.completedFuture(slot);
final CountDownLatch slotRequestLatch = new CountDownLatch(1);
final TestingSlotProvider slotProvider = new TestingSlotProvider(slotRequestId -> {
slotRequestLatch.countDown();
return slotFuture;
});
final ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
final Execution execution = executionGraph.getJobVertex(jobVertex.getID()).getTaskVertices()[0].getCurrentExecutionAttempt();
executionGraph.start(testMainThreadUtil.getMainThreadExecutor());
testMainThreadUtil.execute(executionGraph::scheduleForExecution);
slotRequestLatch.await();
testMainThreadUtil.execute(() -> {
assertThat(execution.getAssignedResource(), is(sameInstance(slot)));
slot.release(new FlinkException("Test exception"));
assertThat(execution.getReleaseFuture().isDone(), is(true));
});
}
@Nonnull
private JobVertex createNoOpJobVertex() {
final JobVertex jobVertex = new JobVertex("Test vertex", new JobVertexID());
jobVertex.setInvokableClass(NoOpInvokable.class);
return jobVertex;
}
@Nonnull
private ProgrammedSlotProvider createProgrammedSlotProvider(
int parallelism,
Collection<JobVertexID> jobVertexIds,
SlotOwner slotOwner) {
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(parallelism);
for (JobVertexID jobVertexId : jobVertexIds) {
for (int i = 0; i < parallelism; i++) {
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway(),
null,
null);
slotProvider.addSlot(jobVertexId, 0, CompletableFuture.completedFuture(slot));
}
}
return slotProvider;
}
/**
* Slot owner which records the first returned slot.
*/
private static final class SingleSlotTestingSlotOwner implements SlotOwner {
final CompletableFuture<LogicalSlot> returnedSlot = new CompletableFuture<>();
public CompletableFuture<LogicalSlot> getReturnedSlotFuture() {
return returnedSlot;
}
@Override
public void returnLogicalSlot(LogicalSlot logicalSlot) {
returnedSlot.complete(logicalSlot);
}
}
} |
@manuranga Do we have other construct to create StringValue or is it ok to use this directly? | public static Object bindDataToIntendedType(byte[] data, BType intendedType) {
int dataParamTypeTag = intendedType.getTag();
Object dispatchedData;
switch (dataParamTypeTag) {
case TypeTags.STRING_TAG:
dispatchedData = new BmpStringValue(new String(data, StandardCharsets.UTF_8));
break;
case TypeTags.JSON_TAG:
try {
Object json = JSONParser.parse(new String(data, StandardCharsets.UTF_8));
dispatchedData = json instanceof String ? new BmpStringValue((String) json) : json;
} catch (BallerinaException e) {
throw createNatsError("Error occurred in converting message content to json: " +
e.getMessage());
}
break;
case TypeTags.INT_TAG:
dispatchedData = Integer.valueOf(new String(data, StandardCharsets.UTF_8));
break;
case TypeTags.BOOLEAN_TAG:
dispatchedData = Boolean.valueOf(new String(data, StandardCharsets.UTF_8));
break;
case TypeTags.FLOAT_TAG:
dispatchedData = Double.valueOf(new String(data, StandardCharsets.UTF_8));
break;
case TypeTags.DECIMAL_TAG:
dispatchedData = new DecimalValue(new String(data, StandardCharsets.UTF_8));
break;
case TypeTags.ARRAY_TAG:
dispatchedData = new ArrayValueImpl(data);
break;
case TypeTags.XML_TAG:
dispatchedData = XMLFactory.parse(new String(data, StandardCharsets.UTF_8));
break;
case TypeTags.RECORD_TYPE_TAG:
dispatchedData = JSONUtils.convertJSONToRecord(JSONParser.parse(new String(data,
StandardCharsets.UTF_8)), (BRecordType) intendedType);
break;
default:
throw Utils.createNatsError("Unable to find a supported data type to bind the message data");
}
return dispatchedData;
} | dispatchedData = new BmpStringValue(new String(data, StandardCharsets.UTF_8)); | public static Object bindDataToIntendedType(byte[] data, BType intendedType) {
int dataParamTypeTag = intendedType.getTag();
Object dispatchedData;
switch (dataParamTypeTag) {
case TypeTags.STRING_TAG:
dispatchedData = StringUtils.fromString(new String(data, StandardCharsets.UTF_8));
break;
case TypeTags.JSON_TAG:
try {
Object json = JSONParser.parse(new String(data, StandardCharsets.UTF_8));
dispatchedData = json instanceof String ? StringUtils.fromString((String) json) : json;
} catch (BallerinaException e) {
throw createNatsError("Error occurred in converting message content to json: " +
e.getMessage());
}
break;
case TypeTags.INT_TAG:
dispatchedData = Integer.valueOf(new String(data, StandardCharsets.UTF_8));
break;
case TypeTags.BOOLEAN_TAG:
dispatchedData = Boolean.valueOf(new String(data, StandardCharsets.UTF_8));
break;
case TypeTags.FLOAT_TAG:
dispatchedData = Double.valueOf(new String(data, StandardCharsets.UTF_8));
break;
case TypeTags.DECIMAL_TAG:
dispatchedData = new DecimalValue(new String(data, StandardCharsets.UTF_8));
break;
case TypeTags.ARRAY_TAG:
dispatchedData = new ArrayValueImpl(data);
break;
case TypeTags.XML_TAG:
dispatchedData = XMLFactory.parse(new String(data, StandardCharsets.UTF_8));
break;
case TypeTags.RECORD_TYPE_TAG:
dispatchedData = JSONUtils.convertJSONToRecord(JSONParser.parse(new String(data,
StandardCharsets.UTF_8)), (BRecordType) intendedType);
break;
default:
throw Utils.createNatsError("Unable to find a supported data type to bind the message data");
}
return dispatchedData;
} | class Utils {
public static ErrorValue createNatsError(String nuid, String detailedErrorMessage) {
MapValue<String, Object> errorDetailRecord = BallerinaValues
.createRecordValue(Constants.NATS_PACKAGE_ID, Constants.NATS_ERROR_DETAIL_RECORD);
MapValue<String, Object> populatedDetailRecord = BallerinaValues
.createRecord(errorDetailRecord, nuid, detailedErrorMessage);
return BallerinaErrors.createError(Constants.NATS_ERROR_CODE, populatedDetailRecord);
}
public static ErrorValue createNatsError(String detailedErrorMessage) {
MapValue<String, Object> errorDetailRecord = BallerinaValues
.createRecordValue(Constants.NATS_PACKAGE_ID, Constants.NATS_ERROR_DETAIL_RECORD);
errorDetailRecord.put("message", detailedErrorMessage);
return BallerinaErrors.createError(Constants.NATS_ERROR_CODE, errorDetailRecord);
}
public static ObjectValue getMessageObject(Message message) {
ObjectValue msgObj;
if (message != null) {
ArrayValue msgData = new ArrayValueImpl(message.getData());
msgObj = BallerinaValues.createObjectValue(Constants.NATS_PACKAGE_ID,
Constants.NATS_MESSAGE_OBJ_NAME, message.getSubject(), msgData, message.getReplyTo());
} else {
ArrayValue msgData = new ArrayValueImpl(new byte[0]);
msgObj = BallerinaValues.createObjectValue(Constants.NATS_PACKAGE_ID,
Constants.NATS_MESSAGE_OBJ_NAME, "", msgData, "");
}
return msgObj;
}
public static byte[] convertDataIntoByteArray(Object data) {
BType dataType = TypeChecker.getType(data);
int typeTag = dataType.getTag();
if (typeTag == org.wso2.ballerinalang.compiler.util.TypeTags.STRING) {
return ((String) data).getBytes(StandardCharsets.UTF_8);
} else {
return ((ArrayValue) data).getBytes();
}
}
public static AttachedFunction getAttachedFunction(ObjectValue serviceObject, String functionName) {
AttachedFunction function = null;
AttachedFunction[] resourceFunctions = serviceObject.getType().getAttachedFunctions();
for (AttachedFunction resourceFunction : resourceFunctions) {
if (functionName.equals(resourceFunction.getName())) {
function = resourceFunction;
break;
}
}
return function;
}
@SuppressWarnings("unchecked")
public static MapValue<String, Object> getSubscriptionConfig(Object annotationData) {
MapValue annotationRecord = null;
if (TypeChecker.getType(annotationData).getTag() == TypeTags.RECORD_TYPE_TAG) {
annotationRecord = (MapValue) annotationData;
}
return annotationRecord;
}
} | class Utils {
public static ErrorValue createNatsError(String nuid, String detailedErrorMessage) {
MapValue<String, Object> errorDetailRecord = BallerinaValues
.createRecordValue(Constants.NATS_PACKAGE_ID, Constants.NATS_ERROR_DETAIL_RECORD);
MapValue<String, Object> populatedDetailRecord = BallerinaValues
.createRecord(errorDetailRecord, nuid, detailedErrorMessage);
return BallerinaErrors.createError(Constants.NATS_ERROR_CODE, populatedDetailRecord);
}
public static ErrorValue createNatsError(String detailedErrorMessage) {
MapValue<String, Object> errorDetailRecord = BallerinaValues
.createRecordValue(Constants.NATS_PACKAGE_ID, Constants.NATS_ERROR_DETAIL_RECORD);
errorDetailRecord.put("message", detailedErrorMessage);
return BallerinaErrors.createError(Constants.NATS_ERROR_CODE, errorDetailRecord);
}
public static ObjectValue getMessageObject(Message message) {
ObjectValue msgObj;
if (message != null) {
ArrayValue msgData = new ArrayValueImpl(message.getData());
msgObj = BallerinaValues.createObjectValue(Constants.NATS_PACKAGE_ID,
Constants.NATS_MESSAGE_OBJ_NAME, message.getSubject(), msgData, message.getReplyTo());
} else {
ArrayValue msgData = new ArrayValueImpl(new byte[0]);
msgObj = BallerinaValues.createObjectValue(Constants.NATS_PACKAGE_ID,
Constants.NATS_MESSAGE_OBJ_NAME, "", msgData, "");
}
return msgObj;
}
public static byte[] convertDataIntoByteArray(Object data) {
BType dataType = TypeChecker.getType(data);
int typeTag = dataType.getTag();
if (typeTag == org.wso2.ballerinalang.compiler.util.TypeTags.STRING) {
return ((String) data).getBytes(StandardCharsets.UTF_8);
} else {
return ((ArrayValue) data).getBytes();
}
}
public static AttachedFunction getAttachedFunction(ObjectValue serviceObject, String functionName) {
AttachedFunction function = null;
AttachedFunction[] resourceFunctions = serviceObject.getType().getAttachedFunctions();
for (AttachedFunction resourceFunction : resourceFunctions) {
if (functionName.equals(resourceFunction.getName())) {
function = resourceFunction;
break;
}
}
return function;
}
@SuppressWarnings("unchecked")
public static MapValue<String, Object> getSubscriptionConfig(Object annotationData) {
MapValue annotationRecord = null;
if (TypeChecker.getType(annotationData).getTag() == TypeTags.RECORD_TYPE_TAG) {
annotationRecord = (MapValue) annotationData;
}
return annotationRecord;
}
} |
@michalvavrik Sure, the question is, is this extra check done inside Permission related to the authorization ? | public boolean implies(Permission permission) {
if (permission instanceof WorkdayPermission) {
WorkdayPermission that = (WorkdayPermission) permission;
if (that.getName().equals("worker") && that.getActions().contains("adult")) {
final WorkdayEvaluator workdayEvaluator = Arc.container().instance(WorkdayEvaluator.class).get();
return workdayEvaluator.isWorkday(that.day);
}
}
return false;
} | if (that.getName().equals("worker") && that.getActions().contains("adult")) { | public boolean implies(Permission permission) {
if (permission instanceof WorkdayPermission) {
WorkdayPermission that = (WorkdayPermission) permission;
if (that.getName().equals("worker") && that.getActions().contains("adult")) {
final WorkdayEvaluator workdayEvaluator = Arc.container().instance(WorkdayEvaluator.class).get();
return workdayEvaluator.isWorkday(that.day);
}
}
return false;
} | class must have a formal parameter {@link String} | class must have a formal parameter {@link String} |
Yes, that's a good point. However, it is currently used by production code and a lot of testing code paths. I would move this issue out of the scope of this PR. WDYT? | public ExecutionAttemptID() {
this(new ExecutionVertexID(), 0);
} | this(new ExecutionVertexID(), 0); | public ExecutionAttemptID() {
this(new ExecutionVertexID(new JobVertexID(), 0), 0);
} | class ExecutionAttemptID implements java.io.Serializable {
private static final long serialVersionUID = -1169683445778281344L;
private final ExecutionVertexID executionVertexID;
private final int attemptNumber;
/**
* Get a random execution attempt id.
*/
public ExecutionAttemptID(ExecutionVertexID executionVertexID, int attemptNumber) {
Preconditions.checkState(attemptNumber >= 0);
this.executionVertexID = Preconditions.checkNotNull(executionVertexID);
this.attemptNumber = attemptNumber;
}
public void writeTo(ByteBuf buf) {
executionVertexID.writeTo(buf);
buf.writeInt(this.attemptNumber);
}
public static ExecutionAttemptID fromByteBuf(ByteBuf buf) {
final ExecutionVertexID executionVertexID = ExecutionVertexID.fromByteBuf(buf);
final int attemptNumber = buf.readInt();
return new ExecutionAttemptID(executionVertexID, attemptNumber);
}
@VisibleForTesting
public int getAttemptNumber() {
return attemptNumber;
}
@VisibleForTesting
public ExecutionVertexID getExecutionVertexID() {
return executionVertexID;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
} else if (obj != null && obj.getClass() == getClass()) {
ExecutionAttemptID that = (ExecutionAttemptID) obj;
return that.executionVertexID.equals(this.executionVertexID)
&& that.attemptNumber == this.attemptNumber;
} else {
return false;
}
}
@Override
public int hashCode() {
return this.executionVertexID.hashCode() ^ this.attemptNumber;
}
@Override
public String toString() {
return executionVertexID.toString() + "_" + attemptNumber;
}
} | class ExecutionAttemptID implements java.io.Serializable {
private static final long serialVersionUID = -1169683445778281344L;
private final ExecutionVertexID executionVertexId;
private final int attemptNumber;
/**
* Get a random execution attempt id.
*/
public ExecutionAttemptID(ExecutionVertexID executionVertexId, int attemptNumber) {
Preconditions.checkState(attemptNumber >= 0);
this.executionVertexId = Preconditions.checkNotNull(executionVertexId);
this.attemptNumber = attemptNumber;
}
public void writeTo(ByteBuf buf) {
executionVertexId.writeTo(buf);
buf.writeInt(this.attemptNumber);
}
public static ExecutionAttemptID fromByteBuf(ByteBuf buf) {
final ExecutionVertexID executionVertexId = ExecutionVertexID.fromByteBuf(buf);
final int attemptNumber = buf.readInt();
return new ExecutionAttemptID(executionVertexId, attemptNumber);
}
@VisibleForTesting
public int getAttemptNumber() {
return attemptNumber;
}
@VisibleForTesting
public ExecutionVertexID getExecutionVertexId() {
return executionVertexId;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
} else if (obj != null && obj.getClass() == getClass()) {
ExecutionAttemptID that = (ExecutionAttemptID) obj;
return that.executionVertexId.equals(this.executionVertexId)
&& that.attemptNumber == this.attemptNumber;
} else {
return false;
}
}
@Override
public int hashCode() {
return this.executionVertexId.hashCode() * 31 + this.attemptNumber;
}
@Override
public String toString() {
return executionVertexId.toString() + "_" + attemptNumber;
}
} |
Then we have to do in L5127 ``` arrayLiteral.exprs.add(addConversionExprIfRequired(restArg, elemType)); ``` I'd rather complete the list and then set it. | public void visit(BLangFunction funcNode) {
SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
if (!funcNode.interfaceFunction) {
addReturnIfNotPresent(funcNode);
}
funcNode.originalFuncSymbol = funcNode.symbol;
funcNode.symbol = ASTBuilderUtil.duplicateInvokableSymbol(funcNode.symbol);
funcNode.requiredParams = rewrite(funcNode.requiredParams, funcEnv);
funcNode.restParam = rewrite(funcNode.restParam, funcEnv);
funcNode.workers = rewrite(funcNode.workers, funcEnv);
if (funcNode.returnTypeNode != null && funcNode.returnTypeNode.getKind() != null) {
funcNode.returnTypeNode = rewrite(funcNode.returnTypeNode, funcEnv);
}
List<BLangAnnotationAttachment> participantAnnotation
= funcNode.annAttachments.stream()
.filter(a -> Transactions.isTransactionsAnnotation(a.pkgAlias.value,
a.annotationName.value))
.collect(Collectors.toList());
funcNode.body = rewrite(funcNode.body, funcEnv);
funcNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
if (funcNode.returnTypeNode != null) {
funcNode.returnTypeAnnAttachments.forEach(attachment -> rewrite(attachment, env));
}
if (participantAnnotation.isEmpty()) {
result = funcNode;
return;
}
result = desugarParticipantFunction(funcNode, participantAnnotation);
}
private BLangFunction desugarParticipantFunction(BLangFunction funcNode,
List<BLangAnnotationAttachment> participantAnnotation) {
BLangAnnotationAttachment annotation = participantAnnotation.get(0);
BLangBlockFunctionBody onCommitBody = null;
BLangBlockFunctionBody onAbortBody = null;
funcNode.requiredParams.forEach(bLangSimpleVariable -> bLangSimpleVariable.symbol.closure = true);
if (funcNode.receiver != null) {
funcNode.receiver.symbol.closure = true;
}
BType trxReturnType = BUnionType.create(null, symTable.errorType, symTable.anyType);
BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType);
BLangLambdaFunction commitFunc = createLambdaFunction(funcNode.pos, "$anonOnCommitFunc$",
ASTBuilderUtil.createTypeNode(symTable.nilType));
BLangLambdaFunction abortFunc = createLambdaFunction(funcNode.pos, "$anonOnAbortFunc$",
ASTBuilderUtil.createTypeNode(symTable.nilType));
BLangSimpleVariable onCommitTrxVar = ASTBuilderUtil
.createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null,
new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID,
symTable.stringType, commitFunc.function.symbol));
BLangSimpleVariable onAbortTrxVar = ASTBuilderUtil
.createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null,
new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID,
symTable.stringType, abortFunc.function.symbol));
BLangSimpleVarRef trxIdOnCommitRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onCommitTrxVar.symbol);
BLangSimpleVarRef trxIdOnAbortRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onAbortTrxVar.symbol);
for (Map.Entry<String, BLangExpression> entry :
getKeyValuePairs((BLangStatementExpression) annotation.expr).entrySet()) {
switch (entry.getKey()) {
case Transactions.TRX_ONCOMMIT_FUNC:
BInvokableSymbol commitSym = (BInvokableSymbol) ((BLangSimpleVarRef) entry.getValue()).symbol;
BLangInvocation onCommit = ASTBuilderUtil
.createInvocationExprMethod(funcNode.pos, commitSym, Lists.of(trxIdOnCommitRef),
Collections.emptyList(), symResolver);
BLangStatement onCommitStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommit);
onCommitBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(onCommitStmt));
break;
case Transactions.TRX_ONABORT_FUNC:
BInvokableSymbol abortSym = (BInvokableSymbol) ((BLangSimpleVarRef) entry.getValue()).symbol;
BLangInvocation onAbort = ASTBuilderUtil
.createInvocationExprMethod(funcNode.pos, abortSym, Lists.of(trxIdOnAbortRef),
Collections.emptyList(), symResolver);
BLangStatement onAbortStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbort);
onAbortBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(onAbortStmt));
break;
}
}
if (onCommitBody == null) {
onCommitBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommitBody);
returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE);
}
if (onAbortBody == null) {
onAbortBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbortBody);
returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE);
}
commitFunc.function.body = onCommitBody;
commitFunc.function.requiredParams.add(onCommitTrxVar);
commitFunc.type = new BInvokableType(Lists.of(onCommitTrxVar.symbol.type),
commitFunc.function.symbol.type.getReturnType(), null);
commitFunc.function.symbol.type = commitFunc.type;
commitFunc.function.symbol.params = Lists.of(onCommitTrxVar.symbol);
abortFunc.function.body = onAbortBody;
abortFunc.function.requiredParams.add(onAbortTrxVar);
abortFunc.type = new BInvokableType(Lists.of(onAbortTrxVar.symbol.type),
abortFunc.function.symbol.type.getReturnType(), null);
abortFunc.function.symbol.type = abortFunc.type;
abortFunc.function.symbol.params = Lists.of(onAbortTrxVar.symbol);
BSymbol trxModSym = env.enclPkg.imports
.stream()
.filter(importPackage -> importPackage.symbol.
pkgID.toString().equals(Names.TRANSACTION_ORG.value + Names.ORG_NAME_SEPARATOR.value
+ Names.TRANSACTION_PACKAGE.value))
.findAny().get().symbol;
BInvokableSymbol invokableSymbol =
(BInvokableSymbol) symResolver.lookupSymbolInMainSpace(symTable.pkgEnvMap.get(trxModSym),
getParticipantFunctionName(funcNode));
BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.stringType,
getTransactionBlockId());
BLangLambdaFunction trxMainWrapperFunc = createLambdaFunction(funcNode.pos, "$anonTrxWrapperFunc$",
Collections.emptyList(),
funcNode.returnTypeNode,
funcNode.body);
for (BLangSimpleVariable var : funcNode.requiredParams) {
trxMainWrapperFunc.function.closureVarSymbols.add(new ClosureVarSymbol(var.symbol, var.pos));
}
BLangBlockFunctionBody trxMainBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos);
BLangLambdaFunction trxMainFunc
= createLambdaFunction(funcNode.pos, "$anonTrxParticipantFunc$", Collections.emptyList(),
trxReturnNode, trxMainBody);
trxMainWrapperFunc.capturedClosureEnv = trxMainFunc.function.clonedEnv;
commitFunc.capturedClosureEnv = env.createClone();
abortFunc.capturedClosureEnv = env.createClone();
BVarSymbol wrapperSym = new BVarSymbol(0, names.fromString("$wrapper$1"), this.env.scope.owner.pkgID,
trxMainWrapperFunc.type, trxMainFunc.function.symbol);
BLangSimpleVariable wrapperFuncVar = ASTBuilderUtil.createVariable(funcNode.pos, "$wrapper$1",
trxMainWrapperFunc.type, trxMainWrapperFunc,
wrapperSym);
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(funcNode.pos, trxMainBody);
variableDef.var = wrapperFuncVar;
BLangSimpleVarRef wrapperVarRef = rewrite(ASTBuilderUtil.createVariableRef(variableDef.pos,
wrapperFuncVar.symbol), env);
BLangInvocation wrapperInvocation = new BFunctionPointerInvocation(trxMainWrapperFunc.pos, wrapperVarRef,
wrapperFuncVar.symbol,
trxMainWrapperFunc.function.symbol.retType);
BLangReturn wrapperReturn = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired
(wrapperInvocation, trxReturnNode.type));
trxMainWrapperFunc.function.receiver = funcNode.receiver;
trxMainFunc.function.receiver = funcNode.receiver;
trxMainBody.stmts.add(wrapperReturn);
rewrite(trxMainFunc.function, env);
List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, trxMainFunc, commitFunc, abortFunc);
BLangInvocation participantInvocation
= ASTBuilderUtil.createInvocationExprMethod(funcNode.pos, invokableSymbol, requiredArgs,
Collections.emptyList(), symResolver);
participantInvocation.type = ((BInvokableType) invokableSymbol.type).retType;
BLangStatement stmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired
(participantInvocation, funcNode.symbol.retType));
funcNode.body = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(rewrite(stmt, env)));
return funcNode;
}
private Name getParticipantFunctionName(BLangFunction function) {
if (Symbols.isFlagOn((function).symbol.flags, Flags.RESOURCE)) {
return TRX_REMOTE_PARTICIPANT_BEGIN_FUNCTION;
}
return TRX_LOCAL_PARTICIPANT_BEGIN_FUNCTION;
}
@Override
public void visit(BLangResource resourceNode) {
}
public void visit(BLangAnnotation annotationNode) {
annotationNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
}
public void visit(BLangAnnotationAttachment annAttachmentNode) {
annAttachmentNode.expr = rewrite(annAttachmentNode.expr, env);
result = annAttachmentNode;
}
@Override
public void visit(BLangSimpleVariable varNode) {
if (((varNode.symbol.owner.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE)
&& (varNode.symbol.owner.tag & SymTag.LET) != SymTag.LET) {
varNode.expr = null;
result = varNode;
return;
}
if (varNode.typeNode != null && varNode.typeNode.getKind() != null) {
varNode.typeNode = rewrite(varNode.typeNode, env);
}
BLangExpression bLangExpression = rewriteExpr(varNode.expr);
if (bLangExpression != null) {
bLangExpression = addConversionExprIfRequired(bLangExpression, varNode.type);
}
varNode.expr = bLangExpression;
varNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = varNode;
}
@Override
public void visit(BLangLetExpression letExpression) {
SymbolEnv prevEnv = this.env;
this.env = letExpression.env;
BLangExpression expr = letExpression.expr;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(letExpression.pos);
for (BLangLetVariable letVariable : letExpression.letVarDeclarations) {
BLangNode node = rewrite((BLangNode) letVariable.definitionNode, env);
if (node.getKind() == NodeKind.BLOCK) {
blockStmt.stmts.addAll(((BLangBlockStmt) node).stmts);
} else {
blockStmt.addStatement((BLangSimpleVariableDef) node);
}
}
BLangSimpleVariableDef tempVarDef = createVarDef(String.format("$let_var_%d_$", letCount++),
expr.type, expr, expr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.type = expr.type;
result = rewrite(stmtExpr, env);
this.env = prevEnv;
}
@Override
public void visit(BLangTupleVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangRecordVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangErrorVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangBlockStmt block) {
SymbolEnv blockEnv = SymbolEnv.createBlockEnv(block, env);
block.stmts = rewriteStmt(block.stmts, blockEnv);
result = block;
}
@Override
public void visit(BLangSimpleVariableDef varDefNode) {
varDefNode.var = rewrite(varDefNode.var, env);
result = varDefNode;
}
@Override
public void visit(BLangTupleVariableDef varDefNode) {
varDefNode.var = rewrite(varDefNode.var, env);
BLangTupleVariable tupleVariable = varDefNode.var;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BType runTimeType = new BArrayType(symTable.anyType);
String name = "tuple";
final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(varDefNode.pos, name, runTimeType, null,
new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType,
this.env.scope.owner));
tuple.expr = tupleVariable.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = tuple;
createVarDefStmts(tupleVariable, blockStmt, tuple.symbol, null);
createRestFieldVarDefStmts(tupleVariable, blockStmt, tuple.symbol);
result = rewrite(blockStmt, env);
}
private void createRestFieldVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt blockStmt,
BVarSymbol tupleVarSymbol) {
final BLangSimpleVariable arrayVar = (BLangSimpleVariable) parentTupleVariable.restVariable;
boolean isTupleType = parentTupleVariable.type.tag == TypeTags.TUPLE;
DiagnosticPos pos = blockStmt.pos;
if (arrayVar != null) {
BLangArrayLiteral arrayExpr = createArrayLiteralExprNode();
arrayExpr.type = arrayVar.type;
arrayVar.expr = arrayExpr;
BLangSimpleVariableDef arrayVarDef = ASTBuilderUtil.createVariableDefStmt(arrayVar.pos, blockStmt);
arrayVarDef.var = arrayVar;
BLangExpression tupleExpr = parentTupleVariable.expr;
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, arrayVar.symbol);
BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
startIndexLiteral.value = (long) (isTupleType ? ((BTupleType) parentTupleVariable.type).tupleTypes.size()
: parentTupleVariable.memberVariables.size());
startIndexLiteral.type = symTable.intType;
BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos,
"$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(arrayVarRef,
createLengthInvocation(pos, arrayVarRef));
indexAccessExpr.type = (isTupleType ? ((BTupleType) parentTupleVariable.type).restType : symTable.anyType);
createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null);
foreach.body = foreachBody;
blockStmt.addStatement(foreach);
}
}
@Override
public void visit(BLangRecordVariableDef varDefNode) {
BLangRecordVariable varNode = varDefNode.var;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null);
final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(varDefNode.pos, "$map$0", runTimeType,
null, new BVarSymbol(0, names.fromString("$map$0"), this.env.scope.owner.pkgID,
runTimeType, this.env.scope.owner));
mapVariable.expr = varDefNode.var.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = mapVariable;
createVarDefStmts(varNode, blockStmt, mapVariable.symbol, null);
result = rewrite(blockStmt, env);
}
@Override
public void visit(BLangErrorVariableDef varDefNode) {
BLangErrorVariable errorVariable = varDefNode.errorVariable;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BVarSymbol errorVarSymbol = new BVarSymbol(0, names.fromString("$error$"),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner);
final BLangSimpleVariable error = ASTBuilderUtil.createVariable(varDefNode.pos, errorVarSymbol.name.value,
symTable.errorType, null, errorVarSymbol);
error.expr = errorVariable.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = error;
createVarDefStmts(errorVariable, blockStmt, error.symbol, null);
result = rewrite(blockStmt, env);
}
/**
* This method iterate through each member of the tupleVar and create the relevant var def statements. This method
* does the check for node kind of each member and call the related var def creation method.
*
* Example:
* ((string, float) int)) ((a, b), c)) = (tuple)
*
* (a, b) is again a tuple, so it is a recursive var def creation.
*
* c is a simple var, so a simple var def will be created.
*
*/
private void createVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangVariable> memberVars = parentTupleVariable.memberVariables;
for (int index = 0; index < memberVars.size(); index++) {
BLangVariable variable = memberVars.get(index);
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.intType, (long) index);
if (NodeKind.VARIABLE == variable.getKind()) {
createSimpleVarDefStmt((BLangSimpleVariable) variable, parentBlockStmt, indexExpr, tupleVarSymbol,
parentIndexAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangTupleVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.RECORD_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangRecordVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.ERROR_VARIABLE) {
BType accessedElemType = symTable.errorType;
if (tupleVarSymbol.type.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) tupleVarSymbol.type;
accessedElemType = arrayType.eType;
}
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, accessedElemType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
}
}
}
/**
* Overloaded method to handle record variables.
* This method iterate through each member of the recordVar and create the relevant var def statements. This method
* does the check for node kind of each member and call the related var def creation method.
*
* Example:
* type Foo record {
* string name;
* (int, string) age;
* Address address;
* };
*
* Foo {name: a, age: (b, c), address: d} = {record literal}
*
* a is a simple var, so a simple var def will be created.
*
* (b, c) is a tuple, so it is a recursive var def creation.
*
* d is a record, so it is a recursive var def creation.
*
*/
private void createVarDefStmts(BLangRecordVariable parentRecordVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
List<BLangRecordVariableKeyValue> variableList = parentRecordVariable.variableList;
for (BLangRecordVariableKeyValue recordFieldKeyValue : variableList) {
BLangVariable variable = recordFieldKeyValue.valueBindingPattern;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.stringType,
recordFieldKeyValue.key.value);
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.VARIABLE) {
createSimpleVarDefStmt((BLangSimpleVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt,
indexExpr, recordVarSymbol, parentIndexAccessExpr);
continue;
}
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) recordFieldKeyValue.valueBindingPattern;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
new BArrayType(symTable.anyType), recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangTupleVariable) recordFieldKeyValue.valueBindingPattern,
parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVariable.pos, symTable.mapType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangRecordVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt,
recordVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.ERROR_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVariable.pos, variable.type, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
}
}
if (parentRecordVariable.restParam != null) {
DiagnosticPos pos = parentBlockStmt.pos;
BMapType restParamType = (BMapType) ((BLangVariable) parentRecordVariable.restParam).type;
BLangSimpleVarRef variableReference;
if (parentIndexAccessExpr != null) {
BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1",
parentIndexAccessExpr.type, null, new BVarSymbol(0, names.fromString("$map$1"),
this.env.scope.owner.pkgID, parentIndexAccessExpr.type, this.env.scope.owner));
mapVariable.expr = parentIndexAccessExpr;
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDef.var = mapVariable;
variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol);
} else {
variableReference = ASTBuilderUtil.createVariableRef(pos,
((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol);
}
List<String> keysToRemove = parentRecordVariable.variableList.stream()
.map(var -> var.getKey().getValue())
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos,
keysToRemove, restParamType, parentBlockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol);
BLangSimpleVariable restParam = (BLangSimpleVariable) parentRecordVariable.restParam;
BLangSimpleVariableDef restParamVarDef = ASTBuilderUtil.createVariableDefStmt(pos,
parentBlockStmt);
restParamVarDef.var = restParam;
restParamVarDef.var.type = restParamType;
restParam.expr = varRef;
}
}
/**
* This method will create the relevant var def statements for reason and details of the error variable.
* The var def statements are created by creating the reason() and detail() builtin methods.
*/
private void createVarDefStmts(BLangErrorVariable parentErrorVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol errorVariableSymbol, BLangIndexBasedAccess parentIndexBasedAccess) {
BVarSymbol convertedErrorVarSymbol;
if (parentIndexBasedAccess != null) {
BType prevType = parentIndexBasedAccess.type;
parentIndexBasedAccess.type = symTable.anyType;
BLangSimpleVariableDef errorVarDef = createVarDef("$error$" + errorCount++,
symTable.errorType,
addConversionExprIfRequired(parentIndexBasedAccess, symTable.errorType),
parentErrorVariable.pos);
parentIndexBasedAccess.type = prevType;
parentBlockStmt.addStatement(errorVarDef);
convertedErrorVarSymbol = errorVarDef.var.symbol;
} else {
convertedErrorVarSymbol = errorVariableSymbol;
}
parentErrorVariable.reason.expr = generateErrorReasonBuiltinFunction(parentErrorVariable.reason.pos,
parentErrorVariable.reason.type, convertedErrorVarSymbol, null);
if (names.fromIdNode((parentErrorVariable.reason).name) == Names.IGNORE) {
parentErrorVariable.reason = null;
} else {
BLangSimpleVariableDef reasonVariableDef =
ASTBuilderUtil.createVariableDefStmt(parentErrorVariable.reason.pos, parentBlockStmt);
reasonVariableDef.var = parentErrorVariable.reason;
}
if ((parentErrorVariable.detail == null || parentErrorVariable.detail.isEmpty())
&& parentErrorVariable.restDetail == null) {
return;
}
BType detailMapType;
BType detailType = ((BErrorType) parentErrorVariable.type).detailType;
if (detailType.tag == TypeTags.MAP) {
detailMapType = detailType;
} else {
detailMapType = symTable.detailType;
}
parentErrorVariable.detailExpr = generateErrorDetailBuiltinFunction(
parentErrorVariable.pos,
convertedErrorVarSymbol, null);
BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail",
parentErrorVariable.detailExpr.type, parentErrorVariable.detailExpr, parentErrorVariable.pos);
detailTempVarDef.type = parentErrorVariable.detailExpr.type;
parentBlockStmt.addStatement(detailTempVarDef);
this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol);
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : parentErrorVariable.detail) {
BLangExpression detailEntryVar = createErrorDetailVar(detailEntry, detailTempVarDef.var.symbol);
createAndAddBoundVariableDef(parentBlockStmt, detailEntry, detailEntryVar);
}
if (parentErrorVariable.restDetail != null && !parentErrorVariable.restDetail.name.value.equals(IGNORE.value)) {
DiagnosticPos pos = parentErrorVariable.restDetail.pos;
BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(
pos, detailTempVarDef.var.symbol);
List<String> keysToRemove = parentErrorVariable.detail.stream()
.map(detail -> detail.key.getValue())
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVariable.pos, keysToRemove,
parentErrorVariable.restDetail.type, parentBlockStmt);
BLangSimpleVariableDef variableDefStmt = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDefStmt.var = ASTBuilderUtil.createVariable(pos,
parentErrorVariable.restDetail.name.value,
filteredDetail.type,
ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol),
parentErrorVariable.restDetail.symbol);
BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos,
ASTBuilderUtil.createVariableRef(pos, parentErrorVariable.restDetail.symbol),
ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol));
parentBlockStmt.addStatement(assignmentStmt);
}
rewrite(parentBlockStmt, env);
}
private BLangSimpleVariableDef forceCastIfApplicable(BVarSymbol errorVarySymbol, DiagnosticPos pos,
BType targetType) {
BVarSymbol errorVarSym = new BVarSymbol(Flags.PUBLIC, names.fromString("$cast$temp$"),
this.env.enclPkg.packageID, targetType, this.env.scope.owner);
BLangSimpleVarRef variableRef = ASTBuilderUtil.createVariableRef(pos, errorVarySymbol);
BLangExpression expr;
if (targetType.tag == TypeTags.RECORD) {
expr = variableRef;
} else {
expr = addConversionExprIfRequired(variableRef, targetType);
}
BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(pos, errorVarSym.name.value, targetType, expr,
errorVarSym);
return ASTBuilderUtil.createVariableDef(pos, errorVar);
}
private BLangSimpleVariable generateRestFilter(BLangSimpleVarRef mapVarRef, DiagnosticPos pos,
List<String> keysToRemove, BType targetType,
BLangBlockStmt parentBlockStmt) {
BLangExpression typeCastExpr = addConversionExprIfRequired(mapVarRef, targetType);
int restNum = annonVarCount++;
String name = "$map$ref$" + restNum;
BLangSimpleVariable mapVariable = defVariable(pos, targetType, parentBlockStmt, typeCastExpr, name);
BLangInvocation entriesInvocation = generateMapEntriesInvocation(
ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol), typeCastExpr.type);
String entriesVarName = "$map$ref$entries$" + restNum;
BType entriesType = new BMapType(TypeTags.MAP,
new BTupleType(Arrays.asList(symTable.stringType, ((BMapType) targetType).constraint)), null);
BLangSimpleVariable entriesInvocationVar = defVariable(pos, entriesType, parentBlockStmt,
addConversionExprIfRequired(entriesInvocation, entriesType),
entriesVarName);
BLangLambdaFunction filter = createFuncToFilterOutRestParam(keysToRemove, pos);
BLangInvocation filterInvocation = generateMapFilterInvocation(pos, entriesInvocationVar, filter);
String filteredEntriesName = "$filtered$detail$entries" + restNum;
BLangSimpleVariable filteredVar = defVariable(pos, entriesType, parentBlockStmt, filterInvocation,
filteredEntriesName);
String filteredVarName = "$detail$filtered" + restNum;
BLangLambdaFunction backToMapLambda = generateEntriesToMapLambda(pos);
BLangInvocation mapInvocation = generateMapMapInvocation(pos, filteredVar, backToMapLambda);
BLangSimpleVariable filtered = defVariable(pos, targetType, parentBlockStmt,
mapInvocation,
filteredVarName);
String filteredRestVarName = "$restVar$" + restNum;
BLangInvocation constructed = generateConstructFromInvocation(pos, targetType, filtered.symbol);
return defVariable(pos, targetType, parentBlockStmt,
addConversionExprIfRequired(constructed, targetType),
filteredRestVarName);
}
private BLangInvocation generateMapEntriesInvocation(BLangExpression expr, BType type) {
BLangInvocation invocationNode = createInvocationNode("entries", new ArrayList<>(), type);
invocationNode.expr = expr;
invocationNode.symbol = symResolver.lookupLangLibMethod(type, names.fromString("entries"));
invocationNode.requiredArgs = Lists.of(expr);
invocationNode.type = invocationNode.symbol.type.getReturnType();
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangInvocation generateMapMapInvocation(DiagnosticPos pos, BLangSimpleVariable filteredVar,
BLangLambdaFunction backToMapLambda) {
BLangInvocation invocationNode = createInvocationNode("map", new ArrayList<>(), filteredVar.type);
invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol);
invocationNode.symbol = symResolver.lookupLangLibMethod(filteredVar.type, names.fromString("map"));
invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol));
invocationNode.type = invocationNode.symbol.type.getReturnType();
invocationNode.requiredArgs.add(backToMapLambda);
return invocationNode;
}
private BLangLambdaFunction generateEntriesToMapLambda(DiagnosticPos pos) {
String anonfuncName = "$anonGetValFunc$" + lambdaFunctionCount++;
BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName);
BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID,
getStringAnyTupleType(), this.env.scope.owner);
BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(),
null, keyValSymbol);
function.requiredParams.add(inputParameter);
BLangValueType anyType = new BLangValueType();
anyType.typeKind = TypeKind.ANY;
anyType.type = symTable.anyType;
function.returnTypeNode = anyType;
BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>());
function.body = functionBlock;
BLangIndexBasedAccess indexBasesAccessExpr =
ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol,
ASTBuilderUtil
.createLiteral(pos, symTable.intType, (long) 1));
BLangSimpleVariableDef tupSecondElem = createVarDef("val", indexBasesAccessExpr.type,
indexBasesAccessExpr, pos);
functionBlock.addStatement(tupSecondElem);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock);
returnStmt.expr = ASTBuilderUtil.createVariableRef(pos, tupSecondElem.var.symbol);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet),
new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true);
functionSymbol.retType = function.returnTypeNode.type;
functionSymbol.params = function.requiredParams.stream()
.map(param -> param.symbol)
.collect(Collectors.toList());
functionSymbol.scope = env.scope;
functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()),
symTable.anyType, null);
function.symbol = functionSymbol;
rewrite(function, env);
env.enclPkg.addFunction(function);
return createLambdaFunction(function, functionSymbol);
}
private BLangInvocation generateMapFilterInvocation(DiagnosticPos pos,
BLangSimpleVariable entriesInvocationVar,
BLangLambdaFunction filter) {
BLangInvocation invocationNode = createInvocationNode("filter", new ArrayList<>(), entriesInvocationVar.type);
invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol);
invocationNode.symbol = symResolver.lookupLangLibMethod(entriesInvocationVar.type, names.fromString("filter"));
invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol));
invocationNode.type = invocationNode.symbol.type.getReturnType();
invocationNode.requiredArgs.add(filter);
return invocationNode;
}
private BLangSimpleVariable defVariable(DiagnosticPos pos, BType varType, BLangBlockStmt parentBlockStmt,
BLangExpression expression, String name) {
Name varName = names.fromString(name);
BLangSimpleVariable detailMap = ASTBuilderUtil.createVariable(pos, name, varType, expression,
new BVarSymbol(Flags.PUBLIC, varName, env.enclPkg.packageID, varType, env.scope.owner));
BLangSimpleVariableDef constructedMap = ASTBuilderUtil.createVariableDef(pos, detailMap);
constructedMap.type = varType;
parentBlockStmt.addStatement(constructedMap);
env.scope.define(varName, detailMap.symbol);
return detailMap;
}
private void createAndAddBoundVariableDef(BLangBlockStmt parentBlockStmt,
BLangErrorVariable.BLangErrorDetailEntry detailEntry,
BLangExpression detailEntryVar) {
if (detailEntry.valueBindingPattern.getKind() == NodeKind.VARIABLE) {
BLangSimpleVariableDef errorDetailVar = createVarDef(
((BLangSimpleVariable) detailEntry.valueBindingPattern).name.value,
detailEntry.valueBindingPattern.type,
detailEntryVar,
detailEntry.valueBindingPattern.pos);
parentBlockStmt.addStatement(errorDetailVar);
} else if (detailEntry.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) {
BLangRecordVariableDef recordVariableDef = ASTBuilderUtil.createRecordVariableDef(
detailEntry.valueBindingPattern.pos,
(BLangRecordVariable) detailEntry.valueBindingPattern);
recordVariableDef.var.expr = detailEntryVar;
recordVariableDef.type = symTable.recordType;
parentBlockStmt.addStatement(recordVariableDef);
} else if (detailEntry.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariableDef tupleVariableDef = ASTBuilderUtil.createTupleVariableDef(
detailEntry.valueBindingPattern.pos, (BLangTupleVariable) detailEntry.valueBindingPattern);
parentBlockStmt.addStatement(tupleVariableDef);
}
}
private BLangExpression createErrorDetailVar(BLangErrorVariable.BLangErrorDetailEntry detailEntry,
BVarSymbol tempDetailVarSymbol) {
BLangExpression detailEntryVar = createIndexBasedAccessExpr(
detailEntry.valueBindingPattern.type,
detailEntry.valueBindingPattern.pos,
createStringLiteral(detailEntry.key.pos, detailEntry.key.value),
tempDetailVarSymbol, null);
if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar;
bLangIndexBasedAccess.originalType = symTable.pureType;
}
return detailEntryVar;
}
private BLangExpression constructStringTemplateConcatExpression(List<BLangExpression> exprs) {
BLangExpression concatExpr = null;
BLangExpression currentExpr;
for (BLangExpression expr : exprs) {
currentExpr = expr;
if (expr.type.tag != TypeTags.STRING && expr.type.tag != TypeTags.XML) {
currentExpr = getToStringInvocationOnExpr(expr);
}
if (concatExpr == null) {
concatExpr = currentExpr;
continue;
}
BType binaryExprType =
TypeTags.isXMLTypeTag(concatExpr.type.tag) || TypeTags.isXMLTypeTag(currentExpr.type.tag)
? symTable.xmlType
: symTable.stringType;
concatExpr =
ASTBuilderUtil.createBinaryExpr(concatExpr.pos, concatExpr, currentExpr,
binaryExprType, OperatorKind.ADD, null);
}
return concatExpr;
}
private BLangInvocation getToStringInvocationOnExpr(BLangExpression expression) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langValueModuleSymbol.scope
.lookup(names.fromString(TO_STRING_FUNCTION_NAME)).symbol;
List<BLangExpression> requiredArgs = new ArrayList<BLangExpression>() {{
add(addConversionExprIfRequired(expression, symbol.params.get(0).type));
}};
return ASTBuilderUtil.createInvocationExprMethod(expression.pos, symbol, requiredArgs, new ArrayList<>(),
symResolver);
}
private BLangInvocation generateErrorDetailBuiltinFunction(DiagnosticPos pos, BVarSymbol errorVarySymbol,
BLangIndexBasedAccess parentIndexBasedAccess) {
BLangExpression onExpr =
parentIndexBasedAccess != null
? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarySymbol);
return createLangLibInvocationNode(ERROR_DETAIL_FUNCTION_NAME, onExpr, new ArrayList<>(), null, pos);
}
private BLangInvocation generateErrorReasonBuiltinFunction(DiagnosticPos pos, BType reasonType,
BVarSymbol errorVarSymbol,
BLangIndexBasedAccess parentIndexBasedAccess) {
BLangExpression onExpr =
parentIndexBasedAccess != null
? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarSymbol);
return createLangLibInvocationNode(ERROR_REASON_FUNCTION_NAME, onExpr, new ArrayList<>(), reasonType, pos);
}
private BLangInvocation generateConstructFromInvocation(DiagnosticPos pos,
BType targetType,
BVarSymbol source) {
BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol);
BLangInvocation invocationNode = createInvocationNode(CONSTRUCT_FROM, new ArrayList<>(), typedescType);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = targetType;
typedescExpr.type = typedescType;
invocationNode.expr = typedescExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(typedescType, names.fromString(CONSTRUCT_FROM));
invocationNode.requiredArgs = Lists.of(typedescExpr, ASTBuilderUtil.createVariableRef(pos, source));
invocationNode.type = BUnionType.create(null, targetType, symTable.errorType);
return invocationNode;
}
private BLangLambdaFunction createFuncToFilterOutRestParam(List<String> toRemoveList, DiagnosticPos pos) {
String anonfuncName = "$anonRestParamFilterFunc$" + lambdaFunctionCount++;
BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName);
BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID,
getStringAnyTupleType(), this.env.scope.owner);
BLangBlockFunctionBody functionBlock = createAnonymousFunctionBlock(pos, function, keyValSymbol);
BLangIndexBasedAccess indexBasesAccessExpr =
ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol, ASTBuilderUtil
.createLiteral(pos, symTable.intType, (long) 0));
BLangSimpleVariableDef tupFirstElem = createVarDef("key", indexBasesAccessExpr.type,
indexBasesAccessExpr, pos);
functionBlock.addStatement(tupFirstElem);
for (String toRemoveItem : toRemoveList) {
createIfStmt(pos, tupFirstElem.var.symbol, functionBlock, toRemoveItem);
}
BInvokableSymbol functionSymbol = createReturnTrueStatement(pos, function, functionBlock);
return createLambdaFunction(function, functionSymbol);
}
private BLangLambdaFunction createFuncToFilterOutRestParam(BLangRecordVariable recordVariable, DiagnosticPos pos) {
List<String> fieldNamesToRemove = recordVariable.variableList.stream()
.map(var -> var.getKey().getValue())
.collect(Collectors.toList());
return createFuncToFilterOutRestParam(fieldNamesToRemove, pos);
}
private void createIfStmt(DiagnosticPos pos, BVarSymbol inputParamSymbol, BLangBlockFunctionBody blockStmt,
String key) {
BLangSimpleVarRef firstElemRef = ASTBuilderUtil.createVariableRef(pos, inputParamSymbol);
BLangExpression converted = addConversionExprIfRequired(firstElemRef, symTable.stringType);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, blockStmt);
BLangBlockStmt ifBlock = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>());
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, ifBlock);
returnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, false);
ifStmt.body = ifBlock;
BLangGroupExpr groupExpr = new BLangGroupExpr();
groupExpr.type = symTable.booleanType;
BLangBinaryExpr binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, converted,
ASTBuilderUtil.createLiteral(pos, symTable.stringType, key),
symTable.booleanType, OperatorKind.EQUAL, null);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
binaryExpr.opKind, binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type);
groupExpr.expression = binaryExpr;
ifStmt.expr = groupExpr;
}
BLangLambdaFunction createLambdaFunction(BLangFunction function, BInvokableSymbol functionSymbol) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = function;
lambdaFunction.type = functionSymbol.type;
return lambdaFunction;
}
private BInvokableSymbol createReturnTrueStatement(DiagnosticPos pos, BLangFunction function,
BLangBlockFunctionBody functionBlock) {
BLangReturn trueReturnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock);
trueReturnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet),
new Name(function.name.value),
env.enclPkg.packageID, function.type,
env.enclEnv.enclVarSym, true);
functionSymbol.retType = function.returnTypeNode.type;
functionSymbol.params = function.requiredParams.stream()
.map(param -> param.symbol)
.collect(Collectors.toList());
functionSymbol.scope = env.scope;
functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()),
getRestType(functionSymbol), symTable.booleanType, null);
function.symbol = functionSymbol;
rewrite(function, env);
env.enclPkg.addFunction(function);
return functionSymbol;
}
private BLangBlockFunctionBody createAnonymousFunctionBlock(DiagnosticPos pos, BLangFunction function,
BVarSymbol keyValSymbol) {
BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(),
null, keyValSymbol);
function.requiredParams.add(inputParameter);
BLangValueType booleanTypeKind = new BLangValueType();
booleanTypeKind.typeKind = TypeKind.BOOLEAN;
booleanTypeKind.type = symTable.booleanType;
function.returnTypeNode = booleanTypeKind;
BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>());
function.body = functionBlock;
return functionBlock;
}
private BTupleType getStringAnyTupleType() {
ArrayList<BType> typeList = new ArrayList<BType>() {{
add(symTable.stringType);
add(symTable.anyType);
}};
return new BTupleType(typeList);
}
/**
* This method creates a simple variable def and assigns and array expression based on the given indexExpr.
*
* case 1: when there is no parent array access expression, but with the indexExpr : 1
* string s = x[1];
*
* case 2: when there is a parent array expression : x[2] and indexExpr : 3
* string s = x[2][3];
*
* case 3: when there is no parent array access expression, but with the indexExpr : name
* string s = x[name];
*
* case 4: when there is a parent map expression : x[name] and indexExpr : fName
* string s = x[name][fName];
*
* case 5: when there is a parent map expression : x[name] and indexExpr : 1
* string s = x[name][1];
*/
private void createSimpleVarDefStmt(BLangSimpleVariable simpleVariable, BLangBlockStmt parentBlockStmt,
BLangLiteral indexExpr, BVarSymbol tupleVarSymbol,
BLangIndexBasedAccess parentArrayAccessExpr) {
Name varName = names.fromIdNode(simpleVariable.name);
if (varName == Names.IGNORE) {
return;
}
final BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDefStmt(simpleVariable.pos,
parentBlockStmt);
simpleVariableDef.var = simpleVariable;
simpleVariable.expr = createIndexBasedAccessExpr(simpleVariable.type, simpleVariable.pos,
indexExpr, tupleVarSymbol, parentArrayAccessExpr);
}
@Override
public void visit(BLangAssignment assignNode) {
if (safeNavigateLHS(assignNode.varRef)) {
BLangAccessExpression accessExpr = (BLangAccessExpression) assignNode.varRef;
accessExpr.leafNode = true;
result = rewriteSafeNavigationAssignment(accessExpr, assignNode.expr, assignNode.safeAssignment);
result = rewrite(result, env);
return;
}
assignNode.varRef = rewriteExpr(assignNode.varRef);
assignNode.expr = rewriteExpr(assignNode.expr);
assignNode.expr = addConversionExprIfRequired(rewriteExpr(assignNode.expr), assignNode.varRef.type);
result = assignNode;
}
@Override
public void visit(BLangTupleDestructure tupleDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(tupleDestructure.pos);
BType runTimeType = new BArrayType(symTable.anyType);
String name = "tuple";
final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(tupleDestructure.pos, name, runTimeType, null,
new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType,
this.env.scope.owner));
tuple.expr = tupleDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(tupleDestructure.pos,
blockStmt);
variableDef.var = tuple;
createVarRefAssignmentStmts(tupleDestructure.varRef, blockStmt, tuple.symbol, null);
createRestFieldAssignmentStmt(tupleDestructure, blockStmt, tuple.symbol);
result = rewrite(blockStmt, env);
}
private void createRestFieldAssignmentStmt(BLangTupleDestructure tupleDestructure, BLangBlockStmt blockStmt,
BVarSymbol tupleVarSymbol) {
BLangTupleVarRef tupleVarRef = tupleDestructure.varRef;
DiagnosticPos pos = blockStmt.pos;
if (tupleVarRef.restParam != null) {
BLangExpression tupleExpr = tupleDestructure.expr;
BLangSimpleVarRef restParam = (BLangSimpleVarRef) tupleVarRef.restParam;
BArrayType restParamType = (BArrayType) restParam.type;
BLangArrayLiteral arrayExpr = createArrayLiteralExprNode();
arrayExpr.type = restParamType;
BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt);
restParamAssignment.varRef = restParam;
restParamAssignment.varRef.type = restParamType;
restParamAssignment.expr = arrayExpr;
BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
startIndexLiteral.value = (long) tupleVarRef.expressions.size();
startIndexLiteral.type = symTable.intType;
BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos,
"$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(restParam,
createLengthInvocation(pos, restParam));
indexAccessExpr.type = restParamType.eType;
createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null);
foreach.body = foreachBody;
blockStmt.addStatement(foreach);
}
}
private BLangInvocation createLengthInvocation(DiagnosticPos pos, BLangExpression collection) {
BInvokableSymbol lengthInvokableSymbol = (BInvokableSymbol) symResolver
.lookupLangLibMethod(collection.type, names.fromString(LENGTH_FUNCTION_NAME));
BLangInvocation lengthInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, lengthInvokableSymbol,
Lists.of(collection), symResolver);
lengthInvocation.argExprs = lengthInvocation.requiredArgs;
lengthInvocation.type = lengthInvokableSymbol.type.getReturnType();
return lengthInvocation;
}
/**
* This method iterate through each member of the tupleVarRef and create the relevant var ref assignment statements.
* This method does the check for node kind of each member and call the related var ref creation method.
*
* Example:
* ((a, b), c)) = (tuple)
*
* (a, b) is again a tuple, so it is a recursive var ref creation.
*
* c is a simple var, so a simple var def will be created.
*
*/
private void createVarRefAssignmentStmts(BLangTupleVarRef parentTupleVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangExpression> expressions = parentTupleVariable.expressions;
for (int index = 0; index < expressions.size(); index++) {
BLangExpression expression = expressions.get(index);
if (NodeKind.SIMPLE_VARIABLE_REF == expression.getKind() ||
NodeKind.FIELD_BASED_ACCESS_EXPR == expression.getKind() ||
NodeKind.INDEX_BASED_ACCESS_EXPR == expression.getKind() ||
NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == expression.getKind()) {
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(expression.pos, symTable.intType, (long) index);
createSimpleVarRefAssignmentStmt((BLangVariableReference) expression, parentBlockStmt, indexExpr,
tupleVarSymbol, parentIndexAccessExpr);
continue;
}
if (expression.getKind() == NodeKind.TUPLE_VARIABLE_REF) {
BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(tupleVarRef.pos, symTable.intType, (long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVarRef.pos,
new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangTupleVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
continue;
}
if (expression.getKind() == NodeKind.RECORD_VARIABLE_REF) {
BLangRecordVarRef recordVarRef = (BLangRecordVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(recordVarRef.pos, symTable.intType,
(long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangRecordVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
TypeDefBuilderHelper.addTypeDefinition(recordVarRef.type, recordVarRef.type.tsymbol,
TypeDefBuilderHelper.createRecordTypeNode(
(BRecordType) recordVarRef.type,
env.enclPkg.packageID, symTable, recordVarRef.pos),
env);
continue;
}
if (expression.getKind() == NodeKind.ERROR_VARIABLE_REF) {
BLangErrorVarRef errorVarRef = (BLangErrorVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(errorVarRef.pos, symTable.intType,
(long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, expression.type, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangErrorVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
}
}
}
/**
* This method creates a assignment statement and assigns and array expression based on the given indexExpr.
*
*/
private void createSimpleVarRefAssignmentStmt(BLangVariableReference simpleVarRef, BLangBlockStmt parentBlockStmt,
BLangExpression indexExpr, BVarSymbol tupleVarSymbol,
BLangIndexBasedAccess parentArrayAccessExpr) {
if (simpleVarRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
Name varName = names.fromIdNode(((BLangSimpleVarRef) simpleVarRef).variableName);
if (varName == Names.IGNORE) {
return;
}
}
BLangExpression assignmentExpr = createIndexBasedAccessExpr(simpleVarRef.type, simpleVarRef.pos,
indexExpr, tupleVarSymbol, parentArrayAccessExpr);
assignmentExpr = addConversionExprIfRequired(assignmentExpr, simpleVarRef.type);
final BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(parentBlockStmt.pos,
parentBlockStmt);
assignmentStmt.varRef = simpleVarRef;
assignmentStmt.expr = assignmentExpr;
}
private BLangExpression createIndexBasedAccessExpr(BType varType, DiagnosticPos varPos, BLangExpression indexExpr,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentExpr) {
BLangIndexBasedAccess arrayAccess = ASTBuilderUtil.createIndexBasesAccessExpr(varPos,
symTable.anyType, tupleVarSymbol, indexExpr);
arrayAccess.originalType = varType;
if (parentExpr != null) {
arrayAccess.expr = parentExpr;
}
final BLangExpression assignmentExpr;
if (types.isValueType(varType)) {
BLangTypeConversionExpr castExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
castExpr.expr = arrayAccess;
castExpr.type = varType;
assignmentExpr = castExpr;
} else {
assignmentExpr = arrayAccess;
}
return assignmentExpr;
}
@Override
public void visit(BLangRecordDestructure recordDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(recordDestructure.pos);
BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null);
String name = "$map$0";
final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(recordDestructure.pos, name, runTimeType,
null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
runTimeType, this.env.scope.owner));
mapVariable.expr = recordDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.
createVariableDefStmt(recordDestructure.pos, blockStmt);
variableDef.var = mapVariable;
createVarRefAssignmentStmts(recordDestructure.varRef, blockStmt, mapVariable.symbol, null);
result = rewrite(blockStmt, env);
}
@Override
public void visit(BLangErrorDestructure errorDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(errorDestructure.pos);
String name = "$error$";
final BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(errorDestructure.pos, name,
symTable.errorType, null, new BVarSymbol(0, names.fromString(name),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
errorVar.expr = errorDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(errorDestructure.pos,
blockStmt);
variableDef.var = errorVar;
createVarRefAssignmentStmts(errorDestructure.varRef, blockStmt, errorVar.symbol, null);
result = rewrite(blockStmt, env);
}
private void createVarRefAssignmentStmts(BLangRecordVarRef parentRecordVarRef, BLangBlockStmt parentBlockStmt,
BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangRecordVarRefKeyValue> variableRefList = parentRecordVarRef.recordRefFields;
for (BLangRecordVarRefKeyValue varRefKeyValue : variableRefList) {
BLangExpression variableReference = varRefKeyValue.variableReference;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variableReference.pos, symTable.stringType,
varRefKeyValue.variableName.getValue());
if (NodeKind.SIMPLE_VARIABLE_REF == variableReference.getKind() ||
NodeKind.FIELD_BASED_ACCESS_EXPR == variableReference.getKind() ||
NodeKind.INDEX_BASED_ACCESS_EXPR == variableReference.getKind() ||
NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == variableReference.getKind()) {
createSimpleVarRefAssignmentStmt((BLangVariableReference) variableReference, parentBlockStmt,
indexExpr, recordVarSymbol, parentIndexAccessExpr);
continue;
}
if (NodeKind.RECORD_VARIABLE_REF == variableReference.getKind()) {
BLangRecordVarRef recordVariable = (BLangRecordVarRef) variableReference;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVarRef.pos, symTable.mapType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts(recordVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (NodeKind.TUPLE_VARIABLE_REF == variableReference.getKind()) {
BLangTupleVarRef tupleVariable = (BLangTupleVarRef) variableReference;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
symTable.tupleType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts(tupleVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (NodeKind.ERROR_VARIABLE_REF == variableReference.getKind()) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(variableReference.pos,
symTable.errorType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangErrorVarRef) variableReference, parentBlockStmt, recordVarSymbol,
arrayAccessExpr);
}
}
if (parentRecordVarRef.restParam != null) {
DiagnosticPos pos = parentBlockStmt.pos;
BMapType restParamType = (BMapType) ((BLangSimpleVarRef) parentRecordVarRef.restParam).type;
BLangSimpleVarRef variableReference;
if (parentIndexAccessExpr != null) {
BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1", restParamType,
null, new BVarSymbol(0, names.fromString("$map$1"), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner));
mapVariable.expr = parentIndexAccessExpr;
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDef.var = mapVariable;
variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol);
} else {
variableReference = ASTBuilderUtil.createVariableRef(pos,
((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol);
}
BLangSimpleVarRef restParam = (BLangSimpleVarRef) parentRecordVarRef.restParam;
List<String> keysToRemove = parentRecordVarRef.recordRefFields.stream()
.map(field -> field.variableName.value)
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos,
keysToRemove, restParamType, parentBlockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol);
BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, parentBlockStmt);
restParamAssignment.varRef = restParam;
restParamAssignment.varRef.type = restParamType;
restParamAssignment.expr = varRef;
}
}
private void createVarRefAssignmentStmts(BLangErrorVarRef parentErrorVarRef, BLangBlockStmt parentBlockStmt,
BVarSymbol errorVarySymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
if (parentErrorVarRef.reason.getKind() != NodeKind.SIMPLE_VARIABLE_REF ||
names.fromIdNode(((BLangSimpleVarRef) parentErrorVarRef.reason).variableName) != Names.IGNORE) {
BLangAssignment reasonAssignment = ASTBuilderUtil
.createAssignmentStmt(parentBlockStmt.pos, parentBlockStmt);
reasonAssignment.expr = generateErrorReasonBuiltinFunction(parentErrorVarRef.reason.pos,
symTable.stringType, errorVarySymbol, parentIndexAccessExpr);
reasonAssignment.expr = addConversionExprIfRequired(reasonAssignment.expr, parentErrorVarRef.reason.type);
reasonAssignment.varRef = parentErrorVarRef.reason;
}
if (parentErrorVarRef.detail.isEmpty() && isIgnoredErrorRefRestVar(parentErrorVarRef)) {
return;
}
BLangInvocation errorDetailBuiltinFunction = generateErrorDetailBuiltinFunction(parentErrorVarRef.pos,
errorVarySymbol,
parentIndexAccessExpr);
BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail$" + errorCount++,
symTable.detailType, errorDetailBuiltinFunction,
parentErrorVarRef.pos);
detailTempVarDef.type = symTable.detailType;
parentBlockStmt.addStatement(detailTempVarDef);
this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol);
List<String> extractedKeys = new ArrayList<>();
for (BLangNamedArgsExpression detail : parentErrorVarRef.detail) {
extractedKeys.add(detail.name.value);
BLangVariableReference ref = (BLangVariableReference) detail.expr;
BLangExpression detailEntryVar = createIndexBasedAccessExpr(ref.type, ref.pos,
createStringLiteral(detail.name.pos, detail.name.value),
detailTempVarDef.var.symbol, null);
if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar;
bLangIndexBasedAccess.originalType = symTable.pureType;
}
BLangAssignment detailAssignment = ASTBuilderUtil.createAssignmentStmt(ref.pos, parentBlockStmt);
detailAssignment.varRef = ref;
detailAssignment.expr = detailEntryVar;
}
if (!isIgnoredErrorRefRestVar(parentErrorVarRef)) {
BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos,
detailTempVarDef.var.symbol);
BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVarRef.restVar.pos,
extractedKeys,
parentErrorVarRef.restVar.type, parentBlockStmt);
BLangAssignment restAssignment = ASTBuilderUtil.createAssignmentStmt(parentErrorVarRef.restVar.pos,
parentBlockStmt);
restAssignment.varRef = parentErrorVarRef.restVar;
restAssignment.expr = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos,
filteredDetail.symbol);
}
BErrorType errorType = (BErrorType) parentErrorVarRef.type;
if (errorType.detailType.getKind() == TypeKind.RECORD) {
BRecordTypeSymbol tsymbol = (BRecordTypeSymbol) errorType.detailType.tsymbol;
tsymbol.initializerFunc = createRecordInitFunc();
tsymbol.scope.define(tsymbol.initializerFunc.funcName, tsymbol.initializerFunc.symbol);
}
}
private boolean isIgnoredErrorRefRestVar(BLangErrorVarRef parentErrorVarRef) {
if (parentErrorVarRef.restVar == null) {
return true;
}
if (parentErrorVarRef.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return (((BLangSimpleVarRef) parentErrorVarRef.restVar).variableName.value.equals(IGNORE.value));
}
return false;
}
@Override
public void visit(BLangAbort abortNode) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(abortNode.pos, symTable.intType, -1L);
result = rewrite(returnStmt, env);
}
@Override
public void visit(BLangRetry retryNode) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(retryNode.pos, symTable.intType, 1L);
result = rewrite(returnStmt, env);
}
@Override
public void visit(BLangContinue nextNode) {
result = nextNode;
}
@Override
public void visit(BLangBreak breakNode) {
result = breakNode;
}
@Override
public void visit(BLangReturn returnNode) {
if (returnNode.expr != null) {
returnNode.expr = rewriteExpr(returnNode.expr);
}
result = returnNode;
}
@Override
public void visit(BLangPanic panicNode) {
panicNode.expr = rewriteExpr(panicNode.expr);
result = panicNode;
}
@Override
public void visit(BLangXMLNSStatement xmlnsStmtNode) {
xmlnsStmtNode.xmlnsDecl = rewrite(xmlnsStmtNode.xmlnsDecl, env);
result = xmlnsStmtNode;
}
@Override
public void visit(BLangXMLNS xmlnsNode) {
BLangXMLNS generatedXMLNSNode;
xmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI);
BSymbol ownerSymbol = xmlnsNode.symbol.owner;
if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
generatedXMLNSNode = new BLangLocalXMLNS();
} else {
generatedXMLNSNode = new BLangPackageXMLNS();
}
generatedXMLNSNode.namespaceURI = xmlnsNode.namespaceURI;
generatedXMLNSNode.prefix = xmlnsNode.prefix;
generatedXMLNSNode.symbol = xmlnsNode.symbol;
result = generatedXMLNSNode;
}
public void visit(BLangCompoundAssignment compoundAssignment) {
BLangVariableReference varRef = compoundAssignment.varRef;
if (compoundAssignment.varRef.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) {
if (varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
varRef = ASTBuilderUtil.createVariableRef(compoundAssignment.varRef.pos, varRef.symbol);
varRef.lhsVar = true;
}
result = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, rewriteExpr(varRef),
rewriteExpr(compoundAssignment.modifiedExpr));
return;
}
List<BLangStatement> statements = new ArrayList<>();
List<BLangSimpleVarRef> varRefs = new ArrayList<>();
List<BType> types = new ArrayList<>();
do {
BLangSimpleVariableDef tempIndexVarDef = createVarDef("$temp" + ++indexExprCount + "$",
((BLangIndexBasedAccess) varRef).indexExpr.type, ((BLangIndexBasedAccess) varRef).indexExpr,
compoundAssignment.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(tempIndexVarDef.pos,
tempIndexVarDef.var.symbol);
statements.add(0, tempIndexVarDef);
varRefs.add(0, tempVarRef);
types.add(0, varRef.type);
varRef = (BLangVariableReference) ((BLangIndexBasedAccess) varRef).expr;
} while (varRef.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR);
BLangVariableReference var = varRef;
for (int ref = 0; ref < varRefs.size(); ref++) {
var = ASTBuilderUtil.createIndexAccessExpr(var, varRefs.get(ref));
var.type = types.get(ref);
}
var.type = compoundAssignment.varRef.type;
BLangExpression rhsExpression = ASTBuilderUtil.createBinaryExpr(compoundAssignment.pos, var,
compoundAssignment.expr, compoundAssignment.type, compoundAssignment.opKind, null);
rhsExpression.type = compoundAssignment.modifiedExpr.type;
BLangAssignment assignStmt = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, var,
rhsExpression);
statements.add(assignStmt);
BLangBlockStmt bLangBlockStmt = ASTBuilderUtil.createBlockStmt(compoundAssignment.pos, statements);
result = rewrite(bLangBlockStmt, env);
}
@Override
public void visit(BLangExpressionStmt exprStmtNode) {
exprStmtNode.expr = rewriteExpr(exprStmtNode.expr);
result = exprStmtNode;
}
@Override
public void visit(BLangIf ifNode) {
ifNode.expr = rewriteExpr(ifNode.expr);
ifNode.body = rewrite(ifNode.body, env);
ifNode.elseStmt = rewrite(ifNode.elseStmt, env);
result = ifNode;
}
@Override
public void visit(BLangMatch matchStmt) {
BLangBlockStmt matchBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode();
matchBlockStmt.pos = matchStmt.pos;
String matchExprVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable matchExprVar = ASTBuilderUtil.createVariable(matchStmt.expr.pos,
matchExprVarName, matchStmt.expr.type, matchStmt.expr, new BVarSymbol(0,
names.fromString(matchExprVarName),
this.env.scope.owner.pkgID, matchStmt.expr.type, this.env.scope.owner));
BLangSimpleVariableDef matchExprVarDef = ASTBuilderUtil.createVariableDef(matchBlockStmt.pos, matchExprVar);
matchBlockStmt.stmts.add(matchExprVarDef);
matchBlockStmt.stmts.add(generateIfElseStmt(matchStmt, matchExprVar));
rewrite(matchBlockStmt, this.env);
result = matchBlockStmt;
}
@Override
public void visit(BLangForeach foreach) {
BLangBlockStmt blockNode;
BVarSymbol dataSymbol = new BVarSymbol(0, names.fromString("$data$"), this.env.scope.owner.pkgID,
foreach.collection.type, this.env.scope.owner);
BLangSimpleVariable dataVariable = ASTBuilderUtil.createVariable(foreach.pos, "$data$",
foreach.collection.type, foreach.collection, dataSymbol);
BLangSimpleVariableDef dataVarDef = ASTBuilderUtil.createVariableDef(foreach.pos, dataVariable);
BVarSymbol collectionSymbol = dataVariable.symbol;
switch (foreach.collection.type.tag) {
case TypeTags.STRING:
case TypeTags.ARRAY:
case TypeTags.TUPLE:
case TypeTags.XML:
case TypeTags.MAP:
case TypeTags.STREAM:
case TypeTags.RECORD:
BInvokableSymbol iteratorSymbol = getLangLibIteratorInvokableSymbol(collectionSymbol);
blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, true);
break;
case TypeTags.OBJECT:
iteratorSymbol = getIterableObjectIteratorInvokableSymbol(collectionSymbol);
blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, false);
break;
default:
blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.stmts.add(0, dataVarDef);
break;
}
rewrite(blockNode, this.env);
result = blockNode;
}
private BLangBlockStmt desugarForeachWithIteratorDef(BLangForeach foreach,
BLangSimpleVariableDef dataVariableDefinition,
BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVariableDef iteratorVarDef = getIteratorVariableDefinition(foreach.pos, collectionSymbol,
iteratorInvokableSymbol, isIteratorFuncFromLangLib);
BLangBlockStmt blockNode = desugarForeachToWhile(foreach, iteratorVarDef);
blockNode.stmts.add(0, dataVariableDefinition);
return blockNode;
}
public BInvokableSymbol getIterableObjectIteratorInvokableSymbol(BVarSymbol collectionSymbol) {
BObjectTypeSymbol typeSymbol = (BObjectTypeSymbol) collectionSymbol.type.tsymbol;
BAttachedFunction iteratorFunc = null;
for (BAttachedFunction func : typeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) {
iteratorFunc = func;
break;
}
}
BAttachedFunction function = iteratorFunc;
return function.symbol;
}
BInvokableSymbol getLangLibIteratorInvokableSymbol(BVarSymbol collectionSymbol) {
return (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionSymbol.type,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
}
private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner);
BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,
foreach.nillableResultType, iteratorSymbol, resultSymbol);
BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, resultReferenceInWhile, userDefineType);
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
BLangAssignment resultAssignment = getIteratorNextAssignment(foreach.pos, iteratorSymbol, resultSymbol);
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);
valueAccessExpr.expr = addConversionExprIfRequired(valueAccessExpr.expr,
types.getSafeType(valueAccessExpr.expr.type, true, false));
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
whileNode.body.stmts.add(1, resultAssignment);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(resultVariableDefinition);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.type = type;
return recordType;
}
@Override
public void visit(BLangWhile whileNode) {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
@Override
public void visit(BLangLock lockNode) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);
statementExpression.type = symTable.nilType;
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.type = nillableError;
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError, this.env.scope.owner);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr, nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.type = symTable.booleanType;
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
@Override
public void visit(BLangTransaction transactionNode) {
DiagnosticPos pos = transactionNode.pos;
BType trxReturnType = symTable.intType;
BType otherReturnType = symTable.nilType;
BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType);
BLangType otherReturnNode = ASTBuilderUtil.createTypeNode(otherReturnType);
DiagnosticPos invPos = transactionNode.pos;
/* transaction block code will be desugar to function which returns int. Return value determines the status of
the transaction code.
ex.
0 = successful
1 = retry
-1 = abort
Since transaction block code doesn't return anything, we need to add return statement at end of the
block unless we have abort or retry statement.
*/
DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src,
invPos.eLine, invPos.eLine, invPos.sCol, invPos.sCol);
BLangStatement statement = null;
if (!transactionNode.transactionBody.stmts.isEmpty()) {
statement = transactionNode.transactionBody.stmts.get(transactionNode.transactionBody.stmts.size() - 1);
}
if (statement == null || !(statement.getKind() == NodeKind.ABORT) && !(statement.getKind() == NodeKind.ABORT)) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(returnStmtPos, trxReturnType, 0L);
transactionNode.transactionBody.addStatement(returnStmt);
}
if (transactionNode.abortedBody == null) {
transactionNode.abortedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.committedBody == null) {
transactionNode.committedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.onRetryBody == null) {
transactionNode.onRetryBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.retryCount == null) {
transactionNode.retryCount = ASTBuilderUtil.createLiteral(pos, symTable.intType, 3L);
}
BLangLambdaFunction trxMainFunc = createLambdaFunction(pos, "$anonTrxMainFunc$", Collections.emptyList(),
trxReturnNode, transactionNode.transactionBody.stmts,
env, transactionNode.transactionBody.scope);
BLangLambdaFunction trxOnRetryFunc = createLambdaFunction(pos, "$anonTrxOnRetryFunc$", Collections.emptyList(),
otherReturnNode, transactionNode.onRetryBody.stmts,
env, transactionNode.onRetryBody.scope);
BLangLambdaFunction trxCommittedFunc = createLambdaFunction(pos, "$anonTrxCommittedFunc$",
Collections.emptyList(), otherReturnNode,
transactionNode.committedBody.stmts, env,
transactionNode.committedBody.scope);
BLangLambdaFunction trxAbortedFunc = createLambdaFunction(pos, "$anonTrxAbortedFunc$", Collections.emptyList(),
otherReturnNode, transactionNode.abortedBody.stmts,
env, transactionNode.abortedBody.scope);
trxMainFunc.capturedClosureEnv = env.createClone();
trxOnRetryFunc.capturedClosureEnv = env.createClone();
trxCommittedFunc.capturedClosureEnv = env.createClone();
trxAbortedFunc.capturedClosureEnv = env.createClone();
PackageID packageID = new PackageID(Names.BALLERINA_ORG, Names.TRANSACTION_PACKAGE, Names.EMPTY);
BPackageSymbol transactionPkgSymbol = new BPackageSymbol(packageID, null, 0);
BInvokableSymbol invokableSymbol =
(BInvokableSymbol) symResolver.lookupSymbolInMainSpace(symTable.pkgEnvMap.get(transactionPkgSymbol),
TRX_INITIATOR_BEGIN_FUNCTION);
BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(pos, symTable.stringType,
getTransactionBlockId());
List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, transactionNode.retryCount, trxMainFunc,
trxOnRetryFunc,
trxCommittedFunc, trxAbortedFunc);
BLangInvocation trxInvocation = ASTBuilderUtil.createInvocationExprMethod(pos, invokableSymbol,
requiredArgs,
Collections.emptyList(),
symResolver);
BLangExpressionStmt stmt = ASTBuilderUtil.createExpressionStmt(pos, ASTBuilderUtil.createBlockStmt(pos));
stmt.expr = trxInvocation;
result = rewrite(stmt, env);
}
private String getTransactionBlockId() {
return env.enclPkg.packageID.orgName + "$" + env.enclPkg.packageID.name + "$"
+ transactionIndex++;
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, BLangFunctionBody lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.type = new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null);
return lambdaFunction;
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, List<BLangStatement> fnBodyStmts,
SymbolEnv env, Scope trxScope) {
BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
body.scope = trxScope;
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(fnBodyStmts, bodyEnv);
return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body);
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
TypeNode returnType) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++);
lambdaFunction.function = func;
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
func.desugared = false;
lambdaFunction.pos = pos;
return lambdaFunction;
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.type.tag == TypeTags.ARRAY && ((BArrayType) literalExpr.type).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.type = literalExpr.type;
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replaceAll(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
if (listConstructor.type.tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
} else if (listConstructor.type.tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.type));
result = rewriteExpr(expr);
} else if (getElementType(listConstructor.type).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
} else if (listConstructor.type.tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
if (arrayLiteral.type.tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.type));
return;
} else if (getElementType(arrayLiteral.type).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.type);
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
return;
}
tupleLiteral.exprs.forEach(expr -> {
BType expType = expr.impConversionExpr == null ? expr.type : expr.impConversionExpr.type;
types.setImplicitCastExpr(expr, expType, symTable.anyType);
});
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
if (groupExpr.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = groupExpr.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
} else {
result = rewriteExpr(groupExpr.expression);
}
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));
result = rewriteExpr(rewriteMappingConstructor(recordLiteral));
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.type = symTable.stringType;
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
varRefExpr.symbol.type.tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&
!((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.LET) == SymTag.LET) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.type));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
}
genVarRefExpr.type = varRefExpr.type;
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.lhsVar)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.lhsVar = varRefExpr.lhsVar;
genVarRefExpr.type = varRefExpr.symbol.type;
result = genVarRefExpr;
return;
}
genVarRefExpr.lhsVar = varRefExpr.lhsVar;
BType targetType = genVarRefExpr.type;
genVarRefExpr.type = genVarRefExpr.symbol.type;
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = fieldAccessExpr.expr.type;
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.type, varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.pos, fieldAccessExpr.field.value);
int varRefTypeTag = varRefType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false);
}
} else if (types.isLax(varRefType)) {
if (!(varRefType.tag == TypeTags.XML || varRefType.tag == TypeTags.XML_ELEMENT)) {
if (varRefType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) varRefType).constraint.tag)) {
result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));
return;
}
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else {
targetVarRef = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);
}
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.lhsVar = fieldAccessExpr.lhsVar;
targetVarRef.type = fieldAccessExpr.type;
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangStatementExpression statementExpression = new BLangStatementExpression();
BLangBlockStmt block = new BLangBlockStmt();
statementExpression.stmt = block;
BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.type, symTable.errorType);
DiagnosticPos pos = fieldAccessExpr.pos;
BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos);
block.addStatement(result);
BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);
resultRef.type = fieldAccessType;
statementExpression.type = fieldAccessType;
BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(
fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);
BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);
BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.type, symTable.nilType);
mapAccessExpr.type = xmlOrNil;
BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos);
BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);
block.addStatement(mapResult);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);
BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);
ifStmt.expr = isLikeNilExpr;
BLangBlockStmt resultNilBody = new BLangBlockStmt();
ifStmt.body = resultNilBody;
BLangBlockStmt resultHasValueBody = new BLangBlockStmt();
ifStmt.elseStmt = resultHasValueBody;
BLangInvocation errorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue("error");
errorInvocation.name = name;
errorInvocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
errorInvocation.symbol = symTable.errorConstructor;
errorInvocation.type = symTable.errorType;
ArrayList<BLangExpression> errorCtorArgs = new ArrayList<>();
errorInvocation.requiredArgs = errorCtorArgs;
errorCtorArgs.add(createStringLiteral(pos, "{" + BLangConstants.MAP_LANG_LIB + "}InvalidKey"));
BLangNamedArgsExpression message = new BLangNamedArgsExpression();
message.name = ASTBuilderUtil.createIdentifier(pos, "key");
message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);
errorCtorArgs.add(message);
BLangSimpleVariableDef errorDef =
createVarDef("_$_invalid_key_error", symTable.errorType, errorInvocation, pos);
resultNilBody.addStatement(errorDef);
BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);
BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);
errorVarAssignment.varRef = resultRef;
errorVarAssignment.expr = errorRef;
BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(
pos, resultHasValueBody);
mapResultAssignment.varRef = resultRef;
mapResultAssignment.expr = mapResultRef;
statementExpression.expr = resultRef;
return statementExpression;
}
private BLangAccessExpression rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {
ArrayList<BLangExpression> args = new ArrayList<>();
String fieldName = fieldAccessExpr.field.value;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;
fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);
}
if (fieldName.equals("_")) {
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,
fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());
}
BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);
args.add(attributeNameLiteral);
args.add(isOptionalAccessToLiteral(fieldAccessExpr));
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,
new ArrayList<>());
}
private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {
return rewrite(
createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);
}
private String createExpandedQName(String nsURI, String localName) {
return "{" + nsURI + "}" + localName;
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangVariableReference targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType varRefType = indexAccessExpr.expr.type;
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.type, varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, (BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.lhsVar = indexAccessExpr.lhsVar;
targetVarRef.type = indexAccessExpr.type;
result = targetVarRef;
}
@Override
public void visit(BLangInvocation iExpr) {
BLangInvocation genIExpr = iExpr;
if (iExpr.symbol != null && iExpr.symbol.kind == SymbolKind.ERROR_CONSTRUCTOR) {
result = rewriteErrorConstructor(iExpr);
}
reorderArguments(iExpr);
iExpr.requiredArgs = rewriteExprs(iExpr.requiredArgs);
fixNonRestArgTypeCastInTypeParamInvocation(iExpr);
iExpr.restArgs = rewriteExprs(iExpr.restArgs);
annotationDesugar.defineStatementAnnotations(iExpr.annAttachments, iExpr.pos, iExpr.symbol.pkgID,
iExpr.symbol.owner, env);
if (iExpr.functionPointerInvocation) {
visitFunctionPointerInvocation(iExpr);
return;
}
iExpr.expr = rewriteExpr(iExpr.expr);
result = genIExpr;
if (iExpr.expr == null) {
fixTypeCastInTypeParamInvocation(iExpr, genIExpr);
if (iExpr.exprSymbol == null) {
return;
}
iExpr.expr = ASTBuilderUtil.createVariableRef(iExpr.pos, iExpr.exprSymbol);
iExpr.expr = rewriteExpr(iExpr.expr);
}
switch (iExpr.expr.type.tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!iExpr.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(iExpr.requiredArgs);
argExprs.add(0, iExpr.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(iExpr.pos, argExprs, iExpr.restArgs, iExpr.symbol,
iExpr.type, iExpr.expr, iExpr.async);
attachedFunctionInvocation.actionInvocation = iExpr.actionInvocation;
attachedFunctionInvocation.name = iExpr.name;
attachedFunctionInvocation.annAttachments = iExpr.annAttachments;
result = genIExpr = attachedFunctionInvocation;
}
break;
}
fixTypeCastInTypeParamInvocation(iExpr, genIExpr);
}
private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {
if (!iExpr.langLibInvocation) {
return;
}
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
for (int i = 1; i < requiredArgs.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));
}
}
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
if (iExpr.langLibInvocation || TypeParamAnalyzer.containsTypeParam(((BInvokableSymbol) iExpr.symbol).retType)) {
BType originalInvType = genIExpr.type;
genIExpr.type = ((BInvokableSymbol) genIExpr.symbol).retType;
BLangExpression expr = addConversionExprIfRequired(genIExpr, originalInvType);
if (expr.getKind() == NodeKind.TYPE_CONVERSION_EXPR) {
this.result = expr;
return;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.expr = genIExpr;
conversionExpr.targetType = originalInvType;
conversionExpr.type = originalInvType;
conversionExpr.pos = genIExpr.pos;
this.result = conversionExpr;
}
}
private BLangInvocation rewriteErrorConstructor(BLangInvocation iExpr) {
BLangExpression reasonExpr = iExpr.requiredArgs.get(0);
if (reasonExpr.impConversionExpr != null &&
reasonExpr.impConversionExpr.targetType.tag != TypeTags.STRING) {
reasonExpr.impConversionExpr = null;
}
reasonExpr = addConversionExprIfRequired(reasonExpr, symTable.stringType);
reasonExpr = rewriteExpr(reasonExpr);
iExpr.requiredArgs.remove(0);
iExpr.requiredArgs.add(reasonExpr);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(iExpr.pos,
((BErrorType) iExpr.symbol.type).detailType);
List<BLangExpression> namedArgs = iExpr.requiredArgs.stream()
.filter(a -> a.getKind() == NodeKind.NAMED_ARGS_EXPR)
.collect(Collectors.toList());
if (namedArgs.isEmpty()) {
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.type);
} else {
for (BLangExpression arg : namedArgs) {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg;
BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (recordLiteral.type.tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.type);
}
recordLiteral.fields.add(member);
iExpr.requiredArgs.remove(arg);
}
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), ((BErrorType) iExpr.symbol.type).detailType);
}
iExpr.requiredArgs.add(errorDetail);
return iExpr;
}
public void visit(BLangTypeInit typeInitExpr) {
if (typeInitExpr.type.tag == TypeTags.STREAM) {
result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));
} else {
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.type);
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
typeInitExpr.initInvocation.exprSymbol = objVarDef.var.symbol;
typeInitExpr.initInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol;
if (typeInitExpr.initInvocation.type.tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitExpr.initInvocation;
typeInitExpr.initInvocation.name.value = Names.GENERATED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);
stmtExpr.type = objVarRef.symbol.type;
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitExpr.initInvocation.type,
typeInitExpr.initInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.type, null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(typeInitExpr.pos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.type = symTable.booleanType;
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(typeInitExpr.pos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = resultVarRef.symbol.type;
return stmtExpr;
}
private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CONSTRUCT_STREAM).symbol;
BType targetType = ((BStreamType) typeInitExpr.type).constraint;
BType errorType = ((BStreamType) typeInitExpr.type).error;
BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = targetType;
typedescExpr.type = typedescType;
BLangExpression iteratorObj = typeInitExpr.argsExpr.get(0);
BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(
typeInitExpr.pos, symbol, new ArrayList<>(Lists.of(typedescExpr, iteratorObj)),
symResolver);
streamConstructInvocation.type = new BStreamType(TypeTags.STREAM, targetType, errorType, null);
return streamConstructInvocation;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr, DiagnosticPos pos) {
BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type, this.env.scope.owner);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(pos, "$" + name + "$", type, expr,
(BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(pos);
objVarDef.var = objVar;
objVarDef.type = objVar.type;
return objVarDef;
}
private BType getObjectType(BType type) {
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context");
}
BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.type = symTable.errorType;
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$ternary_result$", ternaryExpr.type, null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = ternaryExpr.type;
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.type);
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (trapExpr.expr.type.tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.type);
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
binaryExpr.rhsExpr = getModifiedIntRangeEndExpr(binaryExpr.rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (lhsExprTypeTag == TypeTags.INT && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && rhsExprTypeTag == TypeTags.INT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
}
if (lhsExprTypeTag == rhsExprTypeTag) {
return;
}
if (TypeTags.isStringTypeTag(lhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) {
if (TypeTags.isXMLTypeTag(rhsExprTypeTag)) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type);
return;
}
if (TypeTags.isStringTypeTag(rhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) {
if (TypeTags.isXMLTypeTag(lhsExprTypeTag)) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type);
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type);
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type);
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type);
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type);
}
}
private BLangInvocation replaceWithIntRange(DiagnosticPos pos, BLangExpression lhsExpr, BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.type = symTable.intRangeType;
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.parent == null || binaryExpr.parent.type == null) {
return;
}
int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.type.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
BLangMatchExpression matchExpr = ASTBuilderUtil.createMatchExpression(elvisExpr.lhsExpr);
matchExpr.patternClauses.add(getMatchNullPatternGivenExpression(elvisExpr.pos,
rewriteExpr(elvisExpr.rhsExpr)));
matchExpr.type = elvisExpr.type;
matchExpr.pos = elvisExpr.pos;
result = rewriteExpr(matchExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final DiagnosticPos pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == unaryExpr.type.tag) {
binaryExpr.type = symTable.byteType;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.type = symTable.intType;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.type = bLangArrowFunction.body.expr.type;
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.type = bLangArrowFunction.funcType;
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value), env.enclPkg.symbol.pkgID, bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.restParam = getRestSymbol(funcNode);
funcSymbol.retType = funcNode.returnTypeNode.type;
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.type = new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.type, null);
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
lambdaFunction.capturedClosureEnv = env;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
bLangArrowFunction.function = lambdaFunction.function;
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
}
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.type);
if (workerSendNode.keyExpr != null) {
workerSendNode.keyExpr = rewriteExpr(workerSendNode.keyExpr);
}
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.type);
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
if (workerReceiveNode.keyExpr != null) {
workerReceiveNode.keyExpr = rewriteExpr(workerReceiveNode.keyExpr);
}
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
xmlAttributeAccessExpr.indexExpr = rewriteExpr(xmlAttributeAccessExpr.indexExpr);
xmlAttributeAccessExpr.expr = rewriteExpr(xmlAttributeAccessExpr.expr);
if (xmlAttributeAccessExpr.indexExpr != null
&& xmlAttributeAccessExpr.indexExpr.getKind() == NodeKind.XML_QNAME) {
((BLangXMLQName) xmlAttributeAccessExpr.indexExpr).isUsedInXML = true;
}
xmlAttributeAccessExpr.desugared = true;
if (xmlAttributeAccessExpr.lhsVar || xmlAttributeAccessExpr.indexExpr != null) {
result = xmlAttributeAccessExpr;
} else {
result = rewriteExpr(xmlAttributeAccessExpr);
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);
ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters);
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,
xmlElementAccess.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
}
private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX));
String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null;
ArrayList<BLangExpression> args = new ArrayList<>();
for (BLangXMLElementFilter filter : filters) {
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace));
if (nsSymbol == symTable.notFoundSymbol) {
if (defaultNS != null && !filter.name.equals("*")) {
String expandedName = createExpandedQName(defaultNS, filter.name);
args.add(createStringLiteral(filter.elemNamePos, expandedName));
} else {
args.add(createStringLiteral(filter.elemNamePos, filter.name));
}
} else {
BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol;
String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name);
BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName);
args.add(stringLiteral);
}
}
return args;
}
private BLangInvocation createLanglibXMLInvocation(DiagnosticPos pos, String functionName,
BLangExpression invokeOnExpr,
ArrayList<BLangExpression> args,
ArrayList<BLangExpression> restArgs) {
invokeOnExpr = rewriteExpr(invokeOnExpr);
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = invokeOnExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName));
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(invokeOnExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = rewriteExprs(restArgs);
invocationNode.type = ((BInvokableType) invocationNode.symbol.type).getReturnType();
invocationNode.langLibInvocation = true;
return invocationNode;
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);
xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);
ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
} else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,
xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());
result = rewriteExpr(invocationNode);
} else {
BLangExpression childIndexExpr;
if (xmlNavigation.childIndex == null) {
childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);
} else {
childIndexExpr = xmlNavigation.childIndex;
}
ArrayList<BLangExpression> args = new ArrayList<>();
args.add(rewriteExpr(childIndexExpr));
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);
result = rewriteExpr(invocationNode);
}
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);
result = typedescExpr;
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
if (!intRangeExpression.includeStart) {
intRangeExpression.startExpr = getModifiedIntRangeStartExpr(intRangeExpression.startExpr);
}
if (!intRangeExpression.includeEnd) {
intRangeExpression.endExpr = getModifiedIntRangeEndExpr(intRangeExpression.endExpr);
}
intRangeExpression.startExpr = rewriteExpr(intRangeExpression.startExpr);
intRangeExpression.endExpr = rewriteExpr(intRangeExpression.endExpr);
result = intRangeExpression;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
addMatchExprDefaultCase(bLangMatchExpression);
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos,
matchTempResultVarName, bLangMatchExpression.type, null,
new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID,
bLangMatchExpression.type, this.env.scope.owner));
BLangSimpleVariableDef tempResultVarDef =
ASTBuilderUtil.createVariableDef(bLangMatchExpression.pos, tempResultVar);
tempResultVarDef.desugared = true;
BLangBlockStmt stmts = ASTBuilderUtil.createBlockStmt(bLangMatchExpression.pos, Lists.of(tempResultVarDef));
List<BLangMatchTypedBindingPatternClause> patternClauses = new ArrayList<>();
for (int i = 0; i < bLangMatchExpression.patternClauses.size(); i++) {
BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i);
pattern.expr = rewriteExpr(pattern.expr);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
pattern.expr = addConversionExprIfRequired(pattern.expr, tempResultVarRef.type);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pattern.pos, tempResultVarRef, pattern.expr);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(pattern.pos, Lists.of(assignmentStmt));
patternClauses.add(ASTBuilderUtil.createMatchStatementPattern(pattern.pos, pattern.variable, patternBody));
}
stmts.addStatement(ASTBuilderUtil.createMatchStatement(bLangMatchExpression.pos, bLangMatchExpression.expr,
patternClauses));
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
BLangStatementExpression statementExpr = createStatementExpression(stmts, tempResultVarRef);
statementExpr.type = bLangMatchExpression.type;
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
String checkedExprVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable checkedExprVar = ASTBuilderUtil.createVariable(checkedExpr.pos,
checkedExprVarName, checkedExpr.type, null, new BVarSymbol(0,
names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExpr.type, this.env.scope.owner));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(checkedExpr.pos, checkedExprVar);
checkedExprVarDef.desugared = true;
BLangMatchTypedBindingPatternClause patternSuccessCase =
getSafeAssignSuccessPattern(checkedExprVar.pos, checkedExprVar.symbol.type, true,
checkedExprVar.symbol, null);
BLangMatchTypedBindingPatternClause patternErrorCase = getSafeAssignErrorPattern(checkedExpr.pos,
this.env.scope.owner, checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(checkedExpr.pos, checkedExpr.expr,
new ArrayList<BLangMatchTypedBindingPatternClause>() {{
add(patternSuccessCase);
add(patternErrorCase);
}});
BLangBlockStmt generatedStmtBlock = ASTBuilderUtil.createBlockStmt(checkedExpr.pos,
new ArrayList<BLangStatement>() {{
add(checkedExprVarDef);
add(matchStmt);
}});
BLangSimpleVarRef tempCheckedExprVarRef = ASTBuilderUtil.createVariableRef(
checkedExpr.pos, checkedExprVar.symbol);
BLangStatementExpression statementExpr = createStatementExpression(
generatedStmtBlock, tempCheckedExprVarRef);
statementExpr.type = checkedExpr.type;
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceTypeDefinition.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.type)) {
addConversionExprIfRequired(expr, symTable.anyType);
}
typeTestExpr.expr = rewriteExpr(expr);
typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);
result = typeTestExpr;
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.type = annotAccessExpr.type;
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.type,
binaryExpr.rhsExpr.type),
annotAccessExpr.type, null), null);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangQueryExpr queryExpr) {
BLangStatementExpression stmtExpr = queryDesugar.desugarQueryExpr(queryExpr, env);
result = rewrite(stmtExpr, env);
}
@Override
public void visit(BLangQueryAction queryAction) {
BLangStatementExpression stmtExpr = queryDesugar.desugarQueryAction(queryAction, env);
result = rewrite(stmtExpr, env);
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
if (constSymbol.literalType.tag != TypeTags.NIL && constSymbol.value.value == null) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.type, constantRef.value);
}
BLangSimpleVariableDef getIteratorVariableDefinition(DiagnosticPos pos, BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = pos;
iteratorInvocation.expr = dataReference;
iteratorInvocation.symbol = iteratorInvokableSymbol;
iteratorInvocation.type = iteratorInvokableSymbol.retType;
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
iteratorInvokableSymbol.retType, this.env.scope.owner);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$",
iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);
}
BLangSimpleVariableDef getIteratorNextVariableDefinition(DiagnosticPos pos, BType nillableResultType,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$",
nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(pos, resultVariable);
}
BLangAssignment getIteratorNextAssignment(DiagnosticPos pos,
BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInAssignment = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
nextInvocation.expr.type = types.getSafeType(nextInvocation.expr.type, true, false);
return ASTBuilderUtil.createAssignmentStmt(pos, resultReferenceInAssignment, nextInvocation, false);
}
BLangInvocation createIteratorNextInvocation(DiagnosticPos pos, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next");
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol = getNextFunc((BObjectType) iteratorSymbol.type).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.type = nextFuncSymbol.retType;
return nextInvocation;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (bAttachedFunction.funcName.value.equals("next")) {
return bAttachedFunction;
}
}
return null;
}
BLangFieldBasedAccess getValueAccessExpression(DiagnosticPos pos, BType varType, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, "value");
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = pos;
fieldBasedAccessExpression.type = varType;
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.type;
return fieldBasedAccessExpression;
}
private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.body.expr.pos;
returnNode.setExpression(bLangArrowFunction.body.expr);
blockNode.addStatement(returnNode);
return blockNode;
}
private BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.type = retType;
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
BLangExpression onExpr,
List<BLangExpression> args,
BType retType,
DiagnosticPos pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = onExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.type, names.fromString(functionName));
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(onExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.type = retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType;
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.type = new BArrayType(symTable.anyType);
return expr;
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangVariableReference expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.type = iExpr.symbol.type;
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.type)) {
return expr;
}
if (expr.type.tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), expr.type, expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.type)) {
return expr;
}
if (expr.type.tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(), expr.type,
expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatementLink link = new BLangStatementLink();
link.parent = currentLink;
currentLink = link;
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
link.statement = stmt;
stmt.statementLink = link;
currentLink = link.parent;
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(DiagnosticPos pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.intType;
return literal;
}
private BLangLiteral createByteLiteral(DiagnosticPos pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.type = targetType;
conversionExpr.targetType = targetType;
return conversionExpr;
}
private BType getElementType(BType type) {
if (type.tag != TypeTags.ARRAY) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol) ||
(invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {
return;
}
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;
if (invokableNode.workers.size() == 0 && invokableNode.symbol.type.getReturnType().isNullable()
&& (funcBody.stmts.size() < 1
|| funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
DiagnosticPos invPos = invokableNode.pos;
DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src, invPos.eLine, invPos.eLine, invPos.sCol,
invPos.sCol);
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
funcBody.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || symbol.type.tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
List<BLangExpression> restArgs = iExpr.restArgs;
int originalRequiredArgCount = iExpr.requiredArgs.size();
BLangExpression varargRef = null;
BLangBlockStmt blockStmt = null;
if (!iExpr.restArgs.isEmpty() &&
restArgs.get(restArgs.size() - 1).getKind() == NodeKind.REST_ARGS_EXPR &&
iExpr.requiredArgs.size() < invokableSymbol.params.size()) {
BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgs.size() - 1)).expr;
DiagnosticPos varargExpPos = expr.pos;
BType varargVarType = expr.type;
String varargVarName = DESUGARED_VARARG_KEY + this.varargCount++;
BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID,
varargVarType, this.env.scope.owner);
varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);
BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType,
((BLangRestArgsExpression) restArgs.get(restArgs.size() - 1)).expr,
varargVarSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);
varDef.var = var;
varDef.type = varargVarType;
blockStmt = createBlockStmt(varargExpPos);
blockStmt.stmts.add(varDef);
}
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol, varargRef);
}
if (restArgs.isEmpty() || restArgs.get(restArgs.size() - 1).getKind() != NodeKind.REST_ARGS_EXPR) {
if (invokableSymbol.restParam == null) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
List<BLangExpression> exprs = new ArrayList<>();
BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;
BType elemType = arrayType.eType;
for (BLangExpression restArg : restArgs) {
exprs.add(addConversionExprIfRequired(restArg, elemType));
}
arrayLiteral.exprs = exprs;
arrayLiteral.type = arrayType;
iExpr.restArgs = new ArrayList<>();
iExpr.restArgs.add(arrayLiteral);
return;
}
if (restArgs.size() == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
if (iExpr.requiredArgs.size() == originalRequiredArgCount) {
return;
}
BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);
stmtExpression.type = firstNonRestArg.type;
iExpr.requiredArgs.add(0, stmtExpression);
if (invokableSymbol.restParam == null) {
return;
}
BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);
BLangInvocation sliceInvocation =
createLangLibInvocationNode(SLICE_LANGLIB_METHOD, varargRef,
new ArrayList<BLangExpression>() {{
add(startIndex);
}},
varargRef.type, varargRef.pos);
restArgs.remove(0);
restArgs.add(addConversionExprIfRequired(sliceInvocation, invokableSymbol.restParam.type));
return;
}
BArrayType type = (BArrayType) invokableSymbol.restParam.type;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.type = type;
BType elemType = type.eType;
DiagnosticPos pos = restArgs.get(0).pos;
List<BLangExpression> exprs = new ArrayList<>();
for (int i = 0; i < restArgs.size() - 1; i++) {
exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType));
}
arrayLiteral.exprs = exprs;
BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
pushRestArgsExpr.pos = pos;
pushRestArgsExpr.expr = restArgs.remove(restArgs.size() - 1);
String name = DESUGARED_VARARG_KEY + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, type, arrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.type = type;
BLangBlockStmt pushBlockStmt = createBlockStmt(pos);
pushBlockStmt.stmts.add(varDef);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
new ArrayList<BLangExpression>() {{
add(pushRestArgsExpr);
}}, type, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);
stmtExpression.type = type;
iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }};
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new HashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
int varargIndex = 0;
BType varargType = null;
boolean tupleTypedVararg = false;
if (varargRef != null) {
varargType = varargRef.type;
tupleTypedVararg = varargType.tag == TypeTags.TUPLE;
}
for (int i = 0; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.get(param.name.value));
} else if (varargRef == null) {
BLangExpression expr = new BLangIgnoreExpr();
expr.type = param.type;
args.add(expr);
} else {
BLangIndexBasedAccess memberAccessExpr =
(BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
memberAccessExpr.pos = varargRef.pos;
memberAccessExpr.expr = varargRef;
memberAccessExpr.indexExpr = rewriteExpr(createIntLiteral(varargIndex));
memberAccessExpr.type = tupleTypedVararg ? ((BTupleType) varargType).tupleTypes.get(varargIndex) :
((BArrayType) varargType).eType;
varargIndex++;
args.add(addConversionExprIfRequired(memberAccessExpr, param.type));
}
}
iExpr.requiredArgs = args;
}
private BLangMatchTypedBindingPatternClause getSafeAssignErrorPattern(
DiagnosticPos pos, BSymbol invokableSymbol, List<BType> equivalentErrorTypes, boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = ((BInvokableType) invokableSymbol.type).retType;
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<BType>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable patternFailureCaseVar = ASTBuilderUtil.createVariable(pos,
patternFailureCaseVarName, symTable.errorType, null, new BVarSymbol(0,
names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
BLangVariableReference patternFailureCaseVarRef = ASTBuilderUtil.createVariableRef(pos,
patternFailureCaseVar.symbol);
BLangBlockStmt patternBlockFailureCase = (BLangBlockStmt) TreeBuilder.createBlockNode();
patternBlockFailureCase.pos = pos;
if (!isCheckPanicExpr && returnOnError) {
BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode();
returnStmt.pos = pos;
returnStmt.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(returnStmt);
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(panicNode);
}
return ASTBuilderUtil.createMatchStatementPattern(pos, patternFailureCaseVar, patternBlockFailureCase);
}
private BLangMatchTypedBindingPatternClause getSafeAssignSuccessPattern(DiagnosticPos pos, BType lhsType,
boolean isVarDef, BVarSymbol varSymbol, BLangExpression lhsExpr) {
String patternSuccessCaseVarName = GEN_VAR_PREFIX.value + "t_match";
BLangSimpleVariable patternSuccessCaseVar = ASTBuilderUtil.createVariable(pos,
patternSuccessCaseVarName, lhsType, null, new BVarSymbol(0,
names.fromString(patternSuccessCaseVarName),
this.env.scope.owner.pkgID, lhsType, this.env.scope.owner));
BLangExpression varRefExpr;
if (isVarDef) {
varRefExpr = ASTBuilderUtil.createVariableRef(pos, varSymbol);
} else {
varRefExpr = lhsExpr;
}
BLangVariableReference patternSuccessCaseVarRef = ASTBuilderUtil.createVariableRef(pos,
patternSuccessCaseVar.symbol);
BLangAssignment assignmentStmtSuccessCase = ASTBuilderUtil.createAssignmentStmt(pos,
varRefExpr, patternSuccessCaseVarRef, false);
BLangBlockStmt patternBlockSuccessCase = ASTBuilderUtil.createBlockStmt(pos,
new ArrayList<BLangStatement>() {{
add(assignmentStmtSuccessCase);
}});
return ASTBuilderUtil.createMatchStatementPattern(pos,
patternSuccessCaseVar, patternBlockSuccessCase);
}
private BLangStatement generateIfElseStmt(BLangMatch matchStmt, BLangSimpleVariable matchExprVar) {
List<BLangMatchBindingPatternClause> patterns = matchStmt.patternClauses;
BLangIf parentIfNode = generateIfElseStmt(patterns.get(0), matchExprVar);
BLangIf currentIfNode = parentIfNode;
for (int i = 1; i < patterns.size(); i++) {
BLangMatchBindingPatternClause patternClause = patterns.get(i);
if (i == patterns.size() - 1 && patternClause.isLastPattern) {
currentIfNode.elseStmt = getMatchPatternElseBody(patternClause, matchExprVar);
} else {
currentIfNode.elseStmt = generateIfElseStmt(patternClause, matchExprVar);
currentIfNode = (BLangIf) currentIfNode.elseStmt;
}
}
return parentIfNode;
}
/**
* Generate an if-else statement from the given match statement.
*
* @param pattern match pattern statement node
* @param matchExprVar variable node of the match expression
* @return if else statement node
*/
private BLangIf generateIfElseStmt(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) {
BLangExpression ifCondition = createPatternIfCondition(pattern, matchExprVar.symbol);
if (NodeKind.MATCH_TYPED_PATTERN_CLAUSE == pattern.getKind()) {
BLangBlockStmt patternBody = getMatchPatternBody(pattern, matchExprVar);
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, patternBody, null);
}
BType expectedType = matchExprVar.type;
if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) {
BLangMatchStructuredBindingPatternClause matchPattern = (BLangMatchStructuredBindingPatternClause) pattern;
expectedType = getStructuredBindingPatternType(matchPattern.bindingPatternVariable);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
BLangSimpleVariableDef varDef = forceCastIfApplicable(matchExprVar.symbol, pattern.pos, expectedType);
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, varDef.var.symbol);
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
if (structuredPattern.typeGuardExpr != null) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(structuredPattern.pos);
blockStmt.addStatement(varDef);
blockStmt.addStatement(varDefStmt);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt,
structuredPattern.typeGuardExpr);
stmtExpr.type = symTable.booleanType;
ifCondition = ASTBuilderUtil
.createBinaryExpr(pattern.pos, ifCondition, stmtExpr, symTable.booleanType, OperatorKind.AND,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.AND, symTable.booleanType,
symTable.booleanType));
} else {
structuredPattern.body.stmts.add(0, varDef);
structuredPattern.body.stmts.add(1, varDefStmt);
}
}
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, pattern.body, null);
}
private BLangBlockStmt getMatchPatternBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body;
BLangMatchTypedBindingPatternClause patternClause = (BLangMatchTypedBindingPatternClause) pattern;
if (patternClause.variable.name.value.equals(Names.IGNORE.value)) {
return patternClause.body;
}
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(patternClause.pos,
matchExprVar.symbol);
BLangExpression patternVarExpr = addConversionExprIfRequired(matchExprVarRef, patternClause.variable.type);
BLangSimpleVariable patternVar = ASTBuilderUtil.createVariable(patternClause.pos, "",
patternClause.variable.type, patternVarExpr, patternClause.variable.symbol);
BLangSimpleVariableDef patternVarDef = ASTBuilderUtil.createVariableDef(patternVar.pos, patternVar);
patternClause.body.stmts.add(0, patternVarDef);
body = patternClause.body;
return body;
}
private BLangBlockStmt getMatchPatternElseBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body = pattern.body;
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, matchExprVar.symbol);
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
structuredPattern.body.stmts.add(0, varDefStmt);
body = structuredPattern.body;
}
return body;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.type;
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
return expr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.type = lhsType;
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
return conversionExpr;
}
private BLangExpression createPatternIfCondition(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol) {
BType patternType;
switch (patternClause.getKind()) {
case MATCH_STATIC_PATTERN_CLAUSE:
BLangMatchStaticBindingPatternClause staticPattern =
(BLangMatchStaticBindingPatternClause) patternClause;
patternType = staticPattern.literal.type;
break;
case MATCH_STRUCTURED_PATTERN_CLAUSE:
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) patternClause;
patternType = getStructuredBindingPatternType(structuredPattern.bindingPatternVariable);
break;
default:
BLangMatchTypedBindingPatternClause simplePattern = (BLangMatchTypedBindingPatternClause) patternClause;
patternType = simplePattern.variable.type;
break;
}
BLangExpression binaryExpr;
BType[] memberTypes;
if (patternType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) patternType;
memberTypes = unionType.getMemberTypes().toArray(new BType[0]);
} else {
memberTypes = new BType[1];
memberTypes[0] = patternType;
}
if (memberTypes.length == 1) {
binaryExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
} else {
BLangExpression lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
BLangExpression rhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[1]);
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.type, rhsExpr.type));
for (int i = 2; i < memberTypes.length; i++) {
lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[i]);
rhsExpr = binaryExpr;
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.type, rhsExpr.type));
}
}
return binaryExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
List<BField> fields = new ArrayList<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName,
env.enclPkg.symbol.pkgID, fieldType, recordSymbol);
fields.add(new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BMapType) ((BLangSimpleVariable) recordVariable.restParam).type).constraint :
symTable.anydataType;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,
recordVarType,
bindingPatternVariable.pos);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.addTypeDefinition(recordVarType, recordSymbol, recordTypeNode, env);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + errorCount++),
env.enclPkg.symbol.pkgID,
null, null);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
TypeDefBuilderHelper.addTypeDefinition(detailType, detailType.tsymbol, recordTypeNode, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol,
((BErrorType) errorVariable.type).reasonType,
detailType);
errorTypeSymbol.type = errorType;
TypeDefBuilderHelper.addTypeDefinition(errorType, errorTypeSymbol, createErrorTypeNode(errorType), env);
return errorType;
}
return bindingPatternVariable.type;
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(
Flags.PUBLIC,
names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID,
symTable.pureType,
null);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.type,
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + errorNo + "$detailType"),
env.enclPkg.symbol.pkgID, null, null);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(
Flags.PUBLIC, fieldName, detailRecordTypeSymbol.pkgID, fieldType, detailRecordTypeSymbol);
detailRecordType.fields.add(new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordTypeSymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType);
}
BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.type = errorType;
return errorTypeNode;
}
private BLangExpression createPatternMatchBinaryExpr(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol, BType patternType) {
DiagnosticPos pos = patternClause.pos;
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
if (NodeKind.MATCH_STATIC_PATTERN_CLAUSE == patternClause.getKind()) {
BLangMatchStaticBindingPatternClause pattern = (BLangMatchStaticBindingPatternClause) patternClause;
return createBinaryExpression(pos, varRef, pattern.literal);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == patternClause.getKind()) {
return createIsLikeExpression(pos, ASTBuilderUtil.createVariableRef(pos, varSymbol), patternType);
}
if (patternType == symTable.nilType) {
BLangLiteral bLangLiteral = ASTBuilderUtil.createLiteral(pos, symTable.nilType, null);
return ASTBuilderUtil.createBinaryExpr(pos, varRef, bLangLiteral, symTable.booleanType,
OperatorKind.EQUAL, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.EQUAL,
symTable.anyType, symTable.nilType));
} else {
return createIsAssignableExpression(pos, varSymbol, patternType);
}
}
private BLangExpression createBinaryExpression(DiagnosticPos pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.type = symTable.anyType;
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.type, expression.type);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.type,
binaryExpr);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsAssignableExpr createIsAssignableExpression(DiagnosticPos pos,
BVarSymbol varSymbol,
BType patternType) {
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
return ASTBuilderUtil.createIsAssignableExpr(pos, varRef, patternType, symTable.booleanType, names);
}
private BLangIsLikeExpr createIsLikeExpression(DiagnosticPos pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.type = variable.type;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,
BVarSymbol symbol) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(variable.pos, symbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, variable.name);
fieldAccess.symbol = variable.symbol;
fieldAccess.type = variable.type;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private void addMatchExprDefaultCase(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes;
List<BType> unmatchedTypes = new ArrayList<>();
if (bLangMatchExpression.expr.type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) bLangMatchExpression.expr.type;
exprTypes = new ArrayList<>(unionType.getMemberTypes());
} else {
exprTypes = Lists.of(bLangMatchExpression.type);
}
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
if (this.types.isAssignable(type, pattern.variable.type)) {
assignable = true;
break;
}
}
if (!assignable) {
unmatchedTypes.add(type);
}
}
if (unmatchedTypes.isEmpty()) {
return;
}
BType defaultPatternType;
if (unmatchedTypes.size() == 1) {
defaultPatternType = unmatchedTypes.get(0);
} else {
defaultPatternType = BUnionType.create(null, new LinkedHashSet<>(unmatchedTypes));
}
String patternCaseVarName = GEN_VAR_PREFIX.value + "t_match_default";
BLangSimpleVariable patternMatchCaseVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos,
patternCaseVarName, defaultPatternType, null, new BVarSymbol(0, names.fromString(patternCaseVarName),
this.env.scope.owner.pkgID, defaultPatternType, this.env.scope.owner));
BLangMatchExprPatternClause defaultPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
defaultPattern.variable = patternMatchCaseVar;
defaultPattern.expr = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, patternMatchCaseVar.symbol);
defaultPattern.pos = bLangMatchExpression.pos;
bLangMatchExpression.patternClauses.add(defaultPattern);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.lhsVar || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.type;
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName,
accessExpr.type, null, new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.type, this.env.scope.owner));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.type, tempResultVar);
BLangMatch matcEXpr = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matcEXpr));
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.type = originalExprType;
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successPattern = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {
BType originalType = accessExpr.originalType;
if (TypeTags.isXMLTypeTag(originalType.tag)) {
accessExpr.type = BUnionType.create(null, originalType, symTable.errorType);
} else {
accessExpr.type = originalType;
}
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.type);
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(accessExpr.pos, accessExpr.expr, new ArrayList<>());
if (accessExpr.nilSafeNavigation) {
matchStmt.patternClauses.add(getMatchNullPattern(accessExpr, tempResultVar));
matchStmt.type = type;
}
if (accessExpr.errorSafeNavigation) {
matchStmt.patternClauses.add(getMatchErrorPattern(accessExpr, tempResultVar));
matchStmt.type = type;
matchStmt.pos = accessExpr.pos;
}
BLangMatchTypedBindingPatternClause successPattern =
getSuccessPattern(accessExpr, tempResultVar, accessExpr.errorSafeNavigation);
matchStmt.patternClauses.add(successPattern);
this.matchStmtStack.push(matchStmt);
if (this.successPattern != null) {
this.successPattern.body = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(matchStmt));
}
this.successPattern = successPattern;
}
private BLangMatchTypedBindingPatternClause getMatchErrorPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(expr.pos, errorPatternVarName,
symTable.errorType, null, new BVarSymbol(0, names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, errorPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause errorPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, errorPatternVar, patternBody);
return errorPattern;
}
private BLangMatchExprPatternClause getMatchNullPatternGivenExpression(DiagnosticPos pos,
BLangExpression expr) {
String nullPatternVarName = IGNORE.toString();
BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(pos, nullPatternVarName, symTable.nilType,
null, new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner));
BLangMatchExprPatternClause nullPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
nullPattern.variable = errorPatternVar;
nullPattern.expr = expr;
nullPattern.pos = pos;
return nullPattern;
}
private BLangMatchTypedBindingPatternClause getMatchNullPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
BLangSimpleVariable nullPatternVar = ASTBuilderUtil.createVariable(expr.pos, nullPatternVarName,
symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, nullPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause nullPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, nullPatternVar, patternBody);
return nullPattern;
}
private BLangMatchTypedBindingPatternClause getSuccessPattern(BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar, boolean liftError) {
BType type = types.getSafeType(accessExpr.expr.type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
BVarSymbol successPatternSymbol;
if (type.tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner);
} else {
successPatternSymbol = new BVarSymbol(0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
accessExpr.expr = ASTBuilderUtil.createVariableRef(accessExpr.pos, successPatternVar.symbol);
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
if (TypeTags.isXMLTypeTag(accessExpr.expr.type.tag)) {
accessExpr.type = BUnionType.create(null, accessExpr.originalType, symTable.errorType, symTable.nilType);
} else {
accessExpr.type = accessExpr.originalType;
}
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(accessExpr, tempResultVarRef.type);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause successPattern =
ASTBuilderUtil.createMatchStatementPattern(accessExpr.pos, successPatternVar, patternBody);
this.safeNavigationAssignment = assignmentStmt;
return successPattern;
}
private boolean safeNavigateLHS(BLangExpression expr) {
if (expr.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR && expr.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) {
return false;
}
BLangExpression varRef = ((BLangAccessExpression) expr).expr;
if (varRef.type.isNullable()) {
return true;
}
return safeNavigateLHS(varRef);
}
private BLangStatement rewriteSafeNavigationAssignment(BLangAccessExpression accessExpr, BLangExpression rhsExpr,
boolean safeAssignment) {
this.accessExprStack = new Stack<>();
List<BLangStatement> stmts = new ArrayList<>();
createLHSSafeNavigation(stmts, accessExpr.expr);
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos,
cloneExpression(accessExpr), rhsExpr);
stmts.add(assignment);
return ASTBuilderUtil.createBlockStmt(accessExpr.pos, stmts);
}
private void createLHSSafeNavigation(List<BLangStatement> stmts, BLangExpression expr) {
NodeKind kind = expr.getKind();
boolean root = false;
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
BLangAccessExpression accessExpr = (BLangAccessExpression) expr;
createLHSSafeNavigation(stmts, accessExpr.expr);
accessExpr.expr = accessExprStack.pop();
} else {
root = true;
}
if (expr.getKind() == NodeKind.INVOCATION) {
BLangInvocation invocation = (BLangInvocation) expr;
BVarSymbol interMediateSymbol = new BVarSymbol(0, names.fromString(GEN_VAR_PREFIX.value
+ "i_intermediate"), this.env.scope.owner.pkgID, invocation.type, this.env.scope.owner);
BLangSimpleVariable intermediateVariable = ASTBuilderUtil.createVariable(expr.pos,
interMediateSymbol.name.value, invocation.type, invocation, interMediateSymbol);
BLangSimpleVariableDef intermediateVariableDefinition = ASTBuilderUtil.createVariableDef(invocation.pos,
intermediateVariable);
stmts.add(intermediateVariableDefinition);
expr = ASTBuilderUtil.createVariableRef(invocation.pos, interMediateSymbol);
}
if (expr.type.isNullable()) {
BLangTypeTestExpr isNillTest = ASTBuilderUtil.createTypeTestExpr(expr.pos, expr, getNillTypeNode());
isNillTest.type = symTable.booleanType;
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(expr.pos);
expr = cloneExpression(expr);
expr.type = types.getSafeType(expr.type, true, false);
if (isDefaultableMappingType(expr.type) && !root) {
BLangRecordLiteral jsonLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
jsonLiteral.type = expr.type;
jsonLiteral.pos = expr.pos;
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(expr.pos,
expr, jsonLiteral);
thenStmt.addStatement(assignment);
} else {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = ERROR_REASON_NULL_REFERENCE_ERROR;
literal.type = symTable.stringType;
BLangInvocation errorCtorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
errorCtorInvocation.pos = expr.pos;
errorCtorInvocation.argExprs.add(literal);
errorCtorInvocation.requiredArgs.add(literal);
errorCtorInvocation.type = symTable.errorType;
errorCtorInvocation.symbol = symTable.errorConstructor;
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.expr = errorCtorInvocation;
panicNode.pos = expr.pos;
thenStmt.addStatement(panicNode);
}
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(expr.pos, isNillTest, thenStmt, null);
stmts.add(ifelse);
}
accessExprStack.push(expr);
}
BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.type = symTable.nilType;
return nillTypeNode;
}
private BLangVariableReference cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
case INVOCATION:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangVariableReference varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression(originalAccessExpr.expr);
}
varRef.type = types.getSafeType(originalAccessExpr.expr.type, true, false);
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
case INVOCATION:
accessExpr = null;
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.lhsVar = originalAccessExpr.lhsVar;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.type = originalAccessExpr.originalType;
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.booleanType;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
default:
return false;
}
}
private BLangFunction createInitFunctionForObjectType(BLangObjectTypeNode structureTypeNode, SymbolEnv env) {
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(structureTypeNode, env,
Names.GENERATED_INIT_SUFFIX, names, symTable);
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) structureTypeNode.type.tsymbol);
typeSymbol.generatedInitializerFunc = new BAttachedFunction(Names.GENERATED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.type);
structureTypeNode.generatedInitFunction = initFunction;
initFunction.returnTypeNode.type = symTable.nilType;
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.type, null, binaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = binaryExpr.type;
result = rewriteExpr(stmtExpr);
}
/**
* Split packahe init function into several smaller functions.
*
* @param packageNode package node
* @param env symbol environment
* @return initial init function but trimmed in size
*/
private BLangFunction splitInitFunction(BLangPackage packageNode, SymbolEnv env) {
int methodSize = INIT_METHOD_SPLIT_SIZE;
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) packageNode.initFunction.body;
if (funcBody.stmts.size() < methodSize || !isJvmTarget) {
return packageNode.initFunction;
}
BLangFunction initFunction = packageNode.initFunction;
List<BLangFunction> generatedFunctions = new ArrayList<>();
List<BLangStatement> stmts = new ArrayList<>(funcBody.stmts);
funcBody.stmts.clear();
BLangFunction newFunc = initFunction;
BLangBlockFunctionBody newFuncBody = (BLangBlockFunctionBody) newFunc.body;
int varDefIndex = 0;
for (int i = 0; i < stmts.size(); i++) {
if (stmts.get(i).getKind() == NodeKind.VARIABLE_DEF) {
break;
}
varDefIndex++;
if (i > 0 && i % methodSize == 0) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.add(stmts.get(i));
}
List<BLangStatement> chunkStmts = new ArrayList<>();
for (int i = varDefIndex; i < stmts.size(); i++) {
BLangStatement stmt = stmts.get(i);
chunkStmts.add(stmt);
varDefIndex++;
if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).expr.getKind() == NodeKind.SERVICE_CONSTRUCTOR) &&
(newFuncBody.stmts.size() + chunkStmts.size() > methodSize)) {
if (newFuncBody.stmts.size() + chunkStmts.size() > methodSize) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.addAll(chunkStmts);
chunkStmts.clear();
} else if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).varRef instanceof BLangPackageVarRef) &&
Symbols.isFlagOn(((BLangPackageVarRef) ((BLangAssignment) stmt).varRef).varSymbol.flags,
Flags.LISTENER)
) {
break;
}
}
newFuncBody.stmts.addAll(chunkStmts);
for (int i = varDefIndex; i < stmts.size(); i++) {
if (i > 0 && i % methodSize == 0) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.add(stmts.get(i));
}
generatedFunctions.add(newFunc);
for (int j = 0; j < generatedFunctions.size() - 1; j++) {
BLangFunction thisFunction = generatedFunctions.get(j);
BLangCheckedExpr checkedExpr =
ASTBuilderUtil.createCheckExpr(initFunction.pos,
createInvocationNode(generatedFunctions.get(j + 1).name.value,
new ArrayList<>(), symTable.errorOrNilType),
symTable.nilType);
checkedExpr.equivalentErrorTypeList.add(symTable.errorType);
BLangExpressionStmt expressionStmt = ASTBuilderUtil
.createExpressionStmt(thisFunction.pos, (BLangBlockFunctionBody) thisFunction.body);
expressionStmt.expr = checkedExpr;
expressionStmt.expr.pos = initFunction.pos;
if (j > 0) {
thisFunction = rewrite(thisFunction, env);
packageNode.functions.add(thisFunction);
packageNode.topLevelNodes.add(thisFunction);
}
}
if (generatedFunctions.size() > 1) {
BLangFunction lastFunc = generatedFunctions.get(generatedFunctions.size() - 1);
lastFunc = rewrite(lastFunc, env);
packageNode.functions.add(lastFunc);
packageNode.topLevelNodes.add(lastFunc);
}
return generatedFunctions.get(0);
}
/**
* Create an intermediate package init function.
*
* @param pkgNode package node
* @param env symbol environment of package
*/
private BLangFunction createIntermediateInitFunction(BLangPackage pkgNode, SymbolEnv env) {
String alias = pkgNode.symbol.pkgID.toString();
BLangFunction initFunction = ASTBuilderUtil
.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
new Name(Names.INIT_FUNCTION_SUFFIX.value
+ this.initFuncIndex++), symTable);
createInvokableSymbol(initFunction, env);
return initFunction;
}
private BType getRestType(BInvokableSymbol invokableSymbol) {
if (invokableSymbol != null && invokableSymbol.restParam != null) {
return invokableSymbol.restParam.type;
}
return null;
}
private BType getRestType(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.type;
}
return null;
}
private BVarSymbol getRestSymbol(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.symbol;
}
return null;
}
private boolean isComputedKey(RecordLiteralNode.RecordField field) {
if (!field.isKeyValueField()) {
return false;
}
return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;
}
private BLangStatementExpression rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {
List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields;
BType type = mappingConstructorExpr.type;
DiagnosticPos pos = mappingConstructorExpr.pos;
BLangRecordLiteral recordLiteral = type.tag == TypeTags.RECORD ? new BLangStructLiteral(pos, type) :
new BLangMapLiteral(pos, type);
String name = DESUGARED_MAPPING_CONSTR_KEY + this.annonVarCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner);
BLangSimpleVariable var = createVariable(pos, name, type, recordLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.type = type;
BLangBlockStmt blockStmt = createBlockStmt(pos);
blockStmt.stmts.add(varDef);
BLangSimpleVarRef mappingVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
BLangRecordLiteral.BLangRecordKey key = keyValueField.key;
BLangExpression keyExpr = key.expr;
BLangExpression indexExpr = key.computedKey ? keyExpr :
keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF ?
createStringLiteral(pos, ((BLangSimpleVarRef) keyExpr).variableName.value) :
((BLangLiteral) keyExpr);;
addMemberStoreForKeyValuePair(pos, blockStmt, mappingVarRef, indexExpr, keyValueField.valueExpr);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;
addMemberStoreForKeyValuePair(pos, blockStmt, mappingVarRef,
createStringLiteral(pos, varRefField.variableName.value),
varRefField);
} else {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = generateMapEntriesInvocation(spreadOpField.expr, spreadOpField.expr.type);
types.setForeachTypedBindingPatternType(foreach);
BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type,
this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBodyBlock = ASTBuilderUtil.createBlockStmt(pos);
BTupleType foreachVarRefType = (BTupleType) foreachVarRef.type;
BLangIndexBasedAccess indexExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexExpr.pos = pos;
indexExpr.expr = foreachVarRef;
indexExpr.indexExpr = rewriteExpr(createIntLiteral(0));
indexExpr.type = foreachVarRefType.tupleTypes.get(0);
BLangIndexBasedAccess valueExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
valueExpr.pos = pos;
valueExpr.expr = foreachVarRef;
valueExpr.indexExpr = rewriteExpr(createIntLiteral(1));
valueExpr.type = foreachVarRefType.tupleTypes.get(1);
addMemberStoreForKeyValuePair(pos, foreachBodyBlock, mappingVarRef, indexExpr, valueExpr);
foreach.body = foreachBodyBlock;
blockStmt.addStatement(foreach);
}
}
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, mappingVarRef);
stmtExpression.type = type;
return stmtExpression;
}
private void addMemberStoreForKeyValuePair(DiagnosticPos pos, BLangBlockStmt blockStmt,
BLangExpression mappingVarRef, BLangExpression indexExpr,
BLangExpression value) {
BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt);
assignmentStmt.expr = rewriteExpr(value);
BLangIndexBasedAccess indexAccessNode = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexAccessNode.pos = pos;
indexAccessNode.expr = mappingVarRef;
indexAccessNode.indexExpr = rewriteExpr(indexExpr);
indexAccessNode.type = value.type;
assignmentStmt.varRef = indexAccessNode;
}
private Map<String, BLangExpression> getKeyValuePairs(BLangStatementExpression desugaredMappingConst) {
List<BLangStatement> stmts = ((BLangBlockStmt) desugaredMappingConst.stmt).stmts;
Map<String, BLangExpression> keyValuePairs = new HashMap<>();
for (int i = 1; i < stmts.size(); i++) {
BLangAssignment assignmentStmt = (BLangAssignment) stmts.get(i);
BLangExpression indexExpr = ((BLangIndexBasedAccess) assignmentStmt.varRef).indexExpr;
if (indexExpr.getKind() != NodeKind.LITERAL) {
continue;
}
keyValuePairs.put((String) ((BLangLiteral) indexExpr).value, assignmentStmt.expr);
}
return keyValuePairs;
}
} | List<BLangExpression> exprs = new ArrayList<>(); | public void visit(BLangFunction funcNode) {
SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
if (!funcNode.interfaceFunction) {
addReturnIfNotPresent(funcNode);
}
funcNode.originalFuncSymbol = funcNode.symbol;
funcNode.symbol = ASTBuilderUtil.duplicateInvokableSymbol(funcNode.symbol);
funcNode.requiredParams = rewrite(funcNode.requiredParams, funcEnv);
funcNode.restParam = rewrite(funcNode.restParam, funcEnv);
funcNode.workers = rewrite(funcNode.workers, funcEnv);
if (funcNode.returnTypeNode != null && funcNode.returnTypeNode.getKind() != null) {
funcNode.returnTypeNode = rewrite(funcNode.returnTypeNode, funcEnv);
}
List<BLangAnnotationAttachment> participantAnnotation
= funcNode.annAttachments.stream()
.filter(a -> Transactions.isTransactionsAnnotation(a.pkgAlias.value,
a.annotationName.value))
.collect(Collectors.toList());
funcNode.body = rewrite(funcNode.body, funcEnv);
funcNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
if (funcNode.returnTypeNode != null) {
funcNode.returnTypeAnnAttachments.forEach(attachment -> rewrite(attachment, env));
}
if (participantAnnotation.isEmpty()) {
result = funcNode;
return;
}
result = desugarParticipantFunction(funcNode, participantAnnotation);
}
private BLangFunction desugarParticipantFunction(BLangFunction funcNode,
List<BLangAnnotationAttachment> participantAnnotation) {
BLangAnnotationAttachment annotation = participantAnnotation.get(0);
BLangBlockFunctionBody onCommitBody = null;
BLangBlockFunctionBody onAbortBody = null;
funcNode.requiredParams.forEach(bLangSimpleVariable -> bLangSimpleVariable.symbol.closure = true);
if (funcNode.receiver != null) {
funcNode.receiver.symbol.closure = true;
}
BType trxReturnType = BUnionType.create(null, symTable.errorType, symTable.anyType);
BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType);
BLangLambdaFunction commitFunc = createLambdaFunction(funcNode.pos, "$anonOnCommitFunc$",
ASTBuilderUtil.createTypeNode(symTable.nilType));
BLangLambdaFunction abortFunc = createLambdaFunction(funcNode.pos, "$anonOnAbortFunc$",
ASTBuilderUtil.createTypeNode(symTable.nilType));
BLangSimpleVariable onCommitTrxVar = ASTBuilderUtil
.createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null,
new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID,
symTable.stringType, commitFunc.function.symbol));
BLangSimpleVariable onAbortTrxVar = ASTBuilderUtil
.createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null,
new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID,
symTable.stringType, abortFunc.function.symbol));
BLangSimpleVarRef trxIdOnCommitRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onCommitTrxVar.symbol);
BLangSimpleVarRef trxIdOnAbortRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onAbortTrxVar.symbol);
for (Map.Entry<String, BLangExpression> entry :
getKeyValuePairs((BLangStatementExpression) annotation.expr).entrySet()) {
switch (entry.getKey()) {
case Transactions.TRX_ONCOMMIT_FUNC:
BInvokableSymbol commitSym = (BInvokableSymbol) ((BLangSimpleVarRef) entry.getValue()).symbol;
BLangInvocation onCommit = ASTBuilderUtil
.createInvocationExprMethod(funcNode.pos, commitSym, Lists.of(trxIdOnCommitRef),
Collections.emptyList(), symResolver);
BLangStatement onCommitStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommit);
onCommitBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(onCommitStmt));
break;
case Transactions.TRX_ONABORT_FUNC:
BInvokableSymbol abortSym = (BInvokableSymbol) ((BLangSimpleVarRef) entry.getValue()).symbol;
BLangInvocation onAbort = ASTBuilderUtil
.createInvocationExprMethod(funcNode.pos, abortSym, Lists.of(trxIdOnAbortRef),
Collections.emptyList(), symResolver);
BLangStatement onAbortStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbort);
onAbortBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(onAbortStmt));
break;
}
}
if (onCommitBody == null) {
onCommitBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommitBody);
returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE);
}
if (onAbortBody == null) {
onAbortBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbortBody);
returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE);
}
commitFunc.function.body = onCommitBody;
commitFunc.function.requiredParams.add(onCommitTrxVar);
commitFunc.type = new BInvokableType(Lists.of(onCommitTrxVar.symbol.type),
commitFunc.function.symbol.type.getReturnType(), null);
commitFunc.function.symbol.type = commitFunc.type;
commitFunc.function.symbol.params = Lists.of(onCommitTrxVar.symbol);
abortFunc.function.body = onAbortBody;
abortFunc.function.requiredParams.add(onAbortTrxVar);
abortFunc.type = new BInvokableType(Lists.of(onAbortTrxVar.symbol.type),
abortFunc.function.symbol.type.getReturnType(), null);
abortFunc.function.symbol.type = abortFunc.type;
abortFunc.function.symbol.params = Lists.of(onAbortTrxVar.symbol);
BSymbol trxModSym = env.enclPkg.imports
.stream()
.filter(importPackage -> importPackage.symbol.
pkgID.toString().equals(Names.TRANSACTION_ORG.value + Names.ORG_NAME_SEPARATOR.value
+ Names.TRANSACTION_PACKAGE.value))
.findAny().get().symbol;
BInvokableSymbol invokableSymbol =
(BInvokableSymbol) symResolver.lookupSymbolInMainSpace(symTable.pkgEnvMap.get(trxModSym),
getParticipantFunctionName(funcNode));
BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.stringType,
getTransactionBlockId());
BLangLambdaFunction trxMainWrapperFunc = createLambdaFunction(funcNode.pos, "$anonTrxWrapperFunc$",
Collections.emptyList(),
funcNode.returnTypeNode,
funcNode.body);
for (BLangSimpleVariable var : funcNode.requiredParams) {
trxMainWrapperFunc.function.closureVarSymbols.add(new ClosureVarSymbol(var.symbol, var.pos));
}
BLangBlockFunctionBody trxMainBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos);
BLangLambdaFunction trxMainFunc
= createLambdaFunction(funcNode.pos, "$anonTrxParticipantFunc$", Collections.emptyList(),
trxReturnNode, trxMainBody);
trxMainWrapperFunc.capturedClosureEnv = trxMainFunc.function.clonedEnv;
commitFunc.capturedClosureEnv = env.createClone();
abortFunc.capturedClosureEnv = env.createClone();
BVarSymbol wrapperSym = new BVarSymbol(0, names.fromString("$wrapper$1"), this.env.scope.owner.pkgID,
trxMainWrapperFunc.type, trxMainFunc.function.symbol);
BLangSimpleVariable wrapperFuncVar = ASTBuilderUtil.createVariable(funcNode.pos, "$wrapper$1",
trxMainWrapperFunc.type, trxMainWrapperFunc,
wrapperSym);
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(funcNode.pos, trxMainBody);
variableDef.var = wrapperFuncVar;
BLangSimpleVarRef wrapperVarRef = rewrite(ASTBuilderUtil.createVariableRef(variableDef.pos,
wrapperFuncVar.symbol), env);
BLangInvocation wrapperInvocation = new BFunctionPointerInvocation(trxMainWrapperFunc.pos, wrapperVarRef,
wrapperFuncVar.symbol,
trxMainWrapperFunc.function.symbol.retType);
BLangReturn wrapperReturn = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired
(wrapperInvocation, trxReturnNode.type));
trxMainWrapperFunc.function.receiver = funcNode.receiver;
trxMainFunc.function.receiver = funcNode.receiver;
trxMainBody.stmts.add(wrapperReturn);
rewrite(trxMainFunc.function, env);
List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, trxMainFunc, commitFunc, abortFunc);
BLangInvocation participantInvocation
= ASTBuilderUtil.createInvocationExprMethod(funcNode.pos, invokableSymbol, requiredArgs,
Collections.emptyList(), symResolver);
participantInvocation.type = ((BInvokableType) invokableSymbol.type).retType;
BLangStatement stmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired
(participantInvocation, funcNode.symbol.retType));
funcNode.body = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(rewrite(stmt, env)));
return funcNode;
}
private Name getParticipantFunctionName(BLangFunction function) {
if (Symbols.isFlagOn((function).symbol.flags, Flags.RESOURCE)) {
return TRX_REMOTE_PARTICIPANT_BEGIN_FUNCTION;
}
return TRX_LOCAL_PARTICIPANT_BEGIN_FUNCTION;
}
@Override
public void visit(BLangResource resourceNode) {
}
public void visit(BLangAnnotation annotationNode) {
annotationNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
}
public void visit(BLangAnnotationAttachment annAttachmentNode) {
annAttachmentNode.expr = rewrite(annAttachmentNode.expr, env);
result = annAttachmentNode;
}
@Override
public void visit(BLangSimpleVariable varNode) {
if (((varNode.symbol.owner.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE)
&& (varNode.symbol.owner.tag & SymTag.LET) != SymTag.LET) {
varNode.expr = null;
result = varNode;
return;
}
if (varNode.typeNode != null && varNode.typeNode.getKind() != null) {
varNode.typeNode = rewrite(varNode.typeNode, env);
}
BLangExpression bLangExpression = rewriteExpr(varNode.expr);
if (bLangExpression != null) {
bLangExpression = addConversionExprIfRequired(bLangExpression, varNode.type);
}
varNode.expr = bLangExpression;
varNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = varNode;
}
@Override
public void visit(BLangLetExpression letExpression) {
SymbolEnv prevEnv = this.env;
this.env = letExpression.env;
BLangExpression expr = letExpression.expr;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(letExpression.pos);
for (BLangLetVariable letVariable : letExpression.letVarDeclarations) {
BLangNode node = rewrite((BLangNode) letVariable.definitionNode, env);
if (node.getKind() == NodeKind.BLOCK) {
blockStmt.stmts.addAll(((BLangBlockStmt) node).stmts);
} else {
blockStmt.addStatement((BLangSimpleVariableDef) node);
}
}
BLangSimpleVariableDef tempVarDef = createVarDef(String.format("$let_var_%d_$", letCount++),
expr.type, expr, expr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.type = expr.type;
result = rewrite(stmtExpr, env);
this.env = prevEnv;
}
@Override
public void visit(BLangTupleVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangRecordVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangErrorVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangBlockStmt block) {
SymbolEnv blockEnv = SymbolEnv.createBlockEnv(block, env);
block.stmts = rewriteStmt(block.stmts, blockEnv);
result = block;
}
@Override
public void visit(BLangSimpleVariableDef varDefNode) {
varDefNode.var = rewrite(varDefNode.var, env);
result = varDefNode;
}
@Override
public void visit(BLangTupleVariableDef varDefNode) {
varDefNode.var = rewrite(varDefNode.var, env);
BLangTupleVariable tupleVariable = varDefNode.var;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BType runTimeType = new BArrayType(symTable.anyType);
String name = "tuple";
final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(varDefNode.pos, name, runTimeType, null,
new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType,
this.env.scope.owner));
tuple.expr = tupleVariable.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = tuple;
createVarDefStmts(tupleVariable, blockStmt, tuple.symbol, null);
createRestFieldVarDefStmts(tupleVariable, blockStmt, tuple.symbol);
result = rewrite(blockStmt, env);
}
private void createRestFieldVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt blockStmt,
BVarSymbol tupleVarSymbol) {
final BLangSimpleVariable arrayVar = (BLangSimpleVariable) parentTupleVariable.restVariable;
boolean isTupleType = parentTupleVariable.type.tag == TypeTags.TUPLE;
DiagnosticPos pos = blockStmt.pos;
if (arrayVar != null) {
BLangArrayLiteral arrayExpr = createArrayLiteralExprNode();
arrayExpr.type = arrayVar.type;
arrayVar.expr = arrayExpr;
BLangSimpleVariableDef arrayVarDef = ASTBuilderUtil.createVariableDefStmt(arrayVar.pos, blockStmt);
arrayVarDef.var = arrayVar;
BLangExpression tupleExpr = parentTupleVariable.expr;
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, arrayVar.symbol);
BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
startIndexLiteral.value = (long) (isTupleType ? ((BTupleType) parentTupleVariable.type).tupleTypes.size()
: parentTupleVariable.memberVariables.size());
startIndexLiteral.type = symTable.intType;
BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos,
"$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(arrayVarRef,
createLengthInvocation(pos, arrayVarRef));
indexAccessExpr.type = (isTupleType ? ((BTupleType) parentTupleVariable.type).restType : symTable.anyType);
createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null);
foreach.body = foreachBody;
blockStmt.addStatement(foreach);
}
}
@Override
public void visit(BLangRecordVariableDef varDefNode) {
BLangRecordVariable varNode = varDefNode.var;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null);
final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(varDefNode.pos, "$map$0", runTimeType,
null, new BVarSymbol(0, names.fromString("$map$0"), this.env.scope.owner.pkgID,
runTimeType, this.env.scope.owner));
mapVariable.expr = varDefNode.var.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = mapVariable;
createVarDefStmts(varNode, blockStmt, mapVariable.symbol, null);
result = rewrite(blockStmt, env);
}
@Override
public void visit(BLangErrorVariableDef varDefNode) {
BLangErrorVariable errorVariable = varDefNode.errorVariable;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BVarSymbol errorVarSymbol = new BVarSymbol(0, names.fromString("$error$"),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner);
final BLangSimpleVariable error = ASTBuilderUtil.createVariable(varDefNode.pos, errorVarSymbol.name.value,
symTable.errorType, null, errorVarSymbol);
error.expr = errorVariable.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = error;
createVarDefStmts(errorVariable, blockStmt, error.symbol, null);
result = rewrite(blockStmt, env);
}
/**
* This method iterate through each member of the tupleVar and create the relevant var def statements. This method
* does the check for node kind of each member and call the related var def creation method.
*
* Example:
* ((string, float) int)) ((a, b), c)) = (tuple)
*
* (a, b) is again a tuple, so it is a recursive var def creation.
*
* c is a simple var, so a simple var def will be created.
*
*/
private void createVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangVariable> memberVars = parentTupleVariable.memberVariables;
for (int index = 0; index < memberVars.size(); index++) {
BLangVariable variable = memberVars.get(index);
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.intType, (long) index);
if (NodeKind.VARIABLE == variable.getKind()) {
createSimpleVarDefStmt((BLangSimpleVariable) variable, parentBlockStmt, indexExpr, tupleVarSymbol,
parentIndexAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangTupleVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.RECORD_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangRecordVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.ERROR_VARIABLE) {
BType accessedElemType = symTable.errorType;
if (tupleVarSymbol.type.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) tupleVarSymbol.type;
accessedElemType = arrayType.eType;
}
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, accessedElemType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
}
}
}
/**
* Overloaded method to handle record variables.
* This method iterate through each member of the recordVar and create the relevant var def statements. This method
* does the check for node kind of each member and call the related var def creation method.
*
* Example:
* type Foo record {
* string name;
* (int, string) age;
* Address address;
* };
*
* Foo {name: a, age: (b, c), address: d} = {record literal}
*
* a is a simple var, so a simple var def will be created.
*
* (b, c) is a tuple, so it is a recursive var def creation.
*
* d is a record, so it is a recursive var def creation.
*
*/
private void createVarDefStmts(BLangRecordVariable parentRecordVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
List<BLangRecordVariableKeyValue> variableList = parentRecordVariable.variableList;
for (BLangRecordVariableKeyValue recordFieldKeyValue : variableList) {
BLangVariable variable = recordFieldKeyValue.valueBindingPattern;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.stringType,
recordFieldKeyValue.key.value);
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.VARIABLE) {
createSimpleVarDefStmt((BLangSimpleVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt,
indexExpr, recordVarSymbol, parentIndexAccessExpr);
continue;
}
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) recordFieldKeyValue.valueBindingPattern;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
new BArrayType(symTable.anyType), recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangTupleVariable) recordFieldKeyValue.valueBindingPattern,
parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVariable.pos, symTable.mapType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangRecordVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt,
recordVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.ERROR_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVariable.pos, variable.type, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
}
}
if (parentRecordVariable.restParam != null) {
DiagnosticPos pos = parentBlockStmt.pos;
BMapType restParamType = (BMapType) ((BLangVariable) parentRecordVariable.restParam).type;
BLangSimpleVarRef variableReference;
if (parentIndexAccessExpr != null) {
BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1",
parentIndexAccessExpr.type, null, new BVarSymbol(0, names.fromString("$map$1"),
this.env.scope.owner.pkgID, parentIndexAccessExpr.type, this.env.scope.owner));
mapVariable.expr = parentIndexAccessExpr;
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDef.var = mapVariable;
variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol);
} else {
variableReference = ASTBuilderUtil.createVariableRef(pos,
((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol);
}
List<String> keysToRemove = parentRecordVariable.variableList.stream()
.map(var -> var.getKey().getValue())
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos,
keysToRemove, restParamType, parentBlockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol);
BLangSimpleVariable restParam = (BLangSimpleVariable) parentRecordVariable.restParam;
BLangSimpleVariableDef restParamVarDef = ASTBuilderUtil.createVariableDefStmt(pos,
parentBlockStmt);
restParamVarDef.var = restParam;
restParamVarDef.var.type = restParamType;
restParam.expr = varRef;
}
}
/**
* This method will create the relevant var def statements for reason and details of the error variable.
* The var def statements are created by creating the reason() and detail() builtin methods.
*/
private void createVarDefStmts(BLangErrorVariable parentErrorVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol errorVariableSymbol, BLangIndexBasedAccess parentIndexBasedAccess) {
BVarSymbol convertedErrorVarSymbol;
if (parentIndexBasedAccess != null) {
BType prevType = parentIndexBasedAccess.type;
parentIndexBasedAccess.type = symTable.anyType;
BLangSimpleVariableDef errorVarDef = createVarDef("$error$" + errorCount++,
symTable.errorType,
addConversionExprIfRequired(parentIndexBasedAccess, symTable.errorType),
parentErrorVariable.pos);
parentIndexBasedAccess.type = prevType;
parentBlockStmt.addStatement(errorVarDef);
convertedErrorVarSymbol = errorVarDef.var.symbol;
} else {
convertedErrorVarSymbol = errorVariableSymbol;
}
parentErrorVariable.reason.expr = generateErrorReasonBuiltinFunction(parentErrorVariable.reason.pos,
parentErrorVariable.reason.type, convertedErrorVarSymbol, null);
if (names.fromIdNode((parentErrorVariable.reason).name) == Names.IGNORE) {
parentErrorVariable.reason = null;
} else {
BLangSimpleVariableDef reasonVariableDef =
ASTBuilderUtil.createVariableDefStmt(parentErrorVariable.reason.pos, parentBlockStmt);
reasonVariableDef.var = parentErrorVariable.reason;
}
if ((parentErrorVariable.detail == null || parentErrorVariable.detail.isEmpty())
&& parentErrorVariable.restDetail == null) {
return;
}
BType detailMapType;
BType detailType = ((BErrorType) parentErrorVariable.type).detailType;
if (detailType.tag == TypeTags.MAP) {
detailMapType = detailType;
} else {
detailMapType = symTable.detailType;
}
parentErrorVariable.detailExpr = generateErrorDetailBuiltinFunction(
parentErrorVariable.pos,
convertedErrorVarSymbol, null);
BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail",
parentErrorVariable.detailExpr.type, parentErrorVariable.detailExpr, parentErrorVariable.pos);
detailTempVarDef.type = parentErrorVariable.detailExpr.type;
parentBlockStmt.addStatement(detailTempVarDef);
this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol);
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : parentErrorVariable.detail) {
BLangExpression detailEntryVar = createErrorDetailVar(detailEntry, detailTempVarDef.var.symbol);
createAndAddBoundVariableDef(parentBlockStmt, detailEntry, detailEntryVar);
}
if (parentErrorVariable.restDetail != null && !parentErrorVariable.restDetail.name.value.equals(IGNORE.value)) {
DiagnosticPos pos = parentErrorVariable.restDetail.pos;
BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(
pos, detailTempVarDef.var.symbol);
List<String> keysToRemove = parentErrorVariable.detail.stream()
.map(detail -> detail.key.getValue())
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVariable.pos, keysToRemove,
parentErrorVariable.restDetail.type, parentBlockStmt);
BLangSimpleVariableDef variableDefStmt = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDefStmt.var = ASTBuilderUtil.createVariable(pos,
parentErrorVariable.restDetail.name.value,
filteredDetail.type,
ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol),
parentErrorVariable.restDetail.symbol);
BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos,
ASTBuilderUtil.createVariableRef(pos, parentErrorVariable.restDetail.symbol),
ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol));
parentBlockStmt.addStatement(assignmentStmt);
}
rewrite(parentBlockStmt, env);
}
private BLangSimpleVariableDef forceCastIfApplicable(BVarSymbol errorVarySymbol, DiagnosticPos pos,
BType targetType) {
BVarSymbol errorVarSym = new BVarSymbol(Flags.PUBLIC, names.fromString("$cast$temp$"),
this.env.enclPkg.packageID, targetType, this.env.scope.owner);
BLangSimpleVarRef variableRef = ASTBuilderUtil.createVariableRef(pos, errorVarySymbol);
BLangExpression expr;
if (targetType.tag == TypeTags.RECORD) {
expr = variableRef;
} else {
expr = addConversionExprIfRequired(variableRef, targetType);
}
BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(pos, errorVarSym.name.value, targetType, expr,
errorVarSym);
return ASTBuilderUtil.createVariableDef(pos, errorVar);
}
private BLangSimpleVariable generateRestFilter(BLangSimpleVarRef mapVarRef, DiagnosticPos pos,
List<String> keysToRemove, BType targetType,
BLangBlockStmt parentBlockStmt) {
BLangExpression typeCastExpr = addConversionExprIfRequired(mapVarRef, targetType);
int restNum = annonVarCount++;
String name = "$map$ref$" + restNum;
BLangSimpleVariable mapVariable = defVariable(pos, targetType, parentBlockStmt, typeCastExpr, name);
BLangInvocation entriesInvocation = generateMapEntriesInvocation(
ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol), typeCastExpr.type);
String entriesVarName = "$map$ref$entries$" + restNum;
BType entriesType = new BMapType(TypeTags.MAP,
new BTupleType(Arrays.asList(symTable.stringType, ((BMapType) targetType).constraint)), null);
BLangSimpleVariable entriesInvocationVar = defVariable(pos, entriesType, parentBlockStmt,
addConversionExprIfRequired(entriesInvocation, entriesType),
entriesVarName);
BLangLambdaFunction filter = createFuncToFilterOutRestParam(keysToRemove, pos);
BLangInvocation filterInvocation = generateMapFilterInvocation(pos, entriesInvocationVar, filter);
String filteredEntriesName = "$filtered$detail$entries" + restNum;
BLangSimpleVariable filteredVar = defVariable(pos, entriesType, parentBlockStmt, filterInvocation,
filteredEntriesName);
String filteredVarName = "$detail$filtered" + restNum;
BLangLambdaFunction backToMapLambda = generateEntriesToMapLambda(pos);
BLangInvocation mapInvocation = generateMapMapInvocation(pos, filteredVar, backToMapLambda);
BLangSimpleVariable filtered = defVariable(pos, targetType, parentBlockStmt,
mapInvocation,
filteredVarName);
String filteredRestVarName = "$restVar$" + restNum;
BLangInvocation constructed = generateConstructFromInvocation(pos, targetType, filtered.symbol);
return defVariable(pos, targetType, parentBlockStmt,
addConversionExprIfRequired(constructed, targetType),
filteredRestVarName);
}
private BLangInvocation generateMapEntriesInvocation(BLangExpression expr, BType type) {
BLangInvocation invocationNode = createInvocationNode("entries", new ArrayList<>(), type);
invocationNode.expr = expr;
invocationNode.symbol = symResolver.lookupLangLibMethod(type, names.fromString("entries"));
invocationNode.requiredArgs = Lists.of(expr);
invocationNode.type = invocationNode.symbol.type.getReturnType();
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangInvocation generateMapMapInvocation(DiagnosticPos pos, BLangSimpleVariable filteredVar,
BLangLambdaFunction backToMapLambda) {
BLangInvocation invocationNode = createInvocationNode("map", new ArrayList<>(), filteredVar.type);
invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol);
invocationNode.symbol = symResolver.lookupLangLibMethod(filteredVar.type, names.fromString("map"));
invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol));
invocationNode.type = invocationNode.symbol.type.getReturnType();
invocationNode.requiredArgs.add(backToMapLambda);
return invocationNode;
}
private BLangLambdaFunction generateEntriesToMapLambda(DiagnosticPos pos) {
String anonfuncName = "$anonGetValFunc$" + lambdaFunctionCount++;
BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName);
BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID,
getStringAnyTupleType(), this.env.scope.owner);
BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(),
null, keyValSymbol);
function.requiredParams.add(inputParameter);
BLangValueType anyType = new BLangValueType();
anyType.typeKind = TypeKind.ANY;
anyType.type = symTable.anyType;
function.returnTypeNode = anyType;
BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>());
function.body = functionBlock;
BLangIndexBasedAccess indexBasesAccessExpr =
ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol,
ASTBuilderUtil
.createLiteral(pos, symTable.intType, (long) 1));
BLangSimpleVariableDef tupSecondElem = createVarDef("val", indexBasesAccessExpr.type,
indexBasesAccessExpr, pos);
functionBlock.addStatement(tupSecondElem);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock);
returnStmt.expr = ASTBuilderUtil.createVariableRef(pos, tupSecondElem.var.symbol);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet),
new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true);
functionSymbol.retType = function.returnTypeNode.type;
functionSymbol.params = function.requiredParams.stream()
.map(param -> param.symbol)
.collect(Collectors.toList());
functionSymbol.scope = env.scope;
functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()),
symTable.anyType, null);
function.symbol = functionSymbol;
rewrite(function, env);
env.enclPkg.addFunction(function);
return createLambdaFunction(function, functionSymbol);
}
private BLangInvocation generateMapFilterInvocation(DiagnosticPos pos,
BLangSimpleVariable entriesInvocationVar,
BLangLambdaFunction filter) {
BLangInvocation invocationNode = createInvocationNode("filter", new ArrayList<>(), entriesInvocationVar.type);
invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol);
invocationNode.symbol = symResolver.lookupLangLibMethod(entriesInvocationVar.type, names.fromString("filter"));
invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol));
invocationNode.type = invocationNode.symbol.type.getReturnType();
invocationNode.requiredArgs.add(filter);
return invocationNode;
}
private BLangSimpleVariable defVariable(DiagnosticPos pos, BType varType, BLangBlockStmt parentBlockStmt,
BLangExpression expression, String name) {
Name varName = names.fromString(name);
BLangSimpleVariable detailMap = ASTBuilderUtil.createVariable(pos, name, varType, expression,
new BVarSymbol(Flags.PUBLIC, varName, env.enclPkg.packageID, varType, env.scope.owner));
BLangSimpleVariableDef constructedMap = ASTBuilderUtil.createVariableDef(pos, detailMap);
constructedMap.type = varType;
parentBlockStmt.addStatement(constructedMap);
env.scope.define(varName, detailMap.symbol);
return detailMap;
}
private void createAndAddBoundVariableDef(BLangBlockStmt parentBlockStmt,
BLangErrorVariable.BLangErrorDetailEntry detailEntry,
BLangExpression detailEntryVar) {
if (detailEntry.valueBindingPattern.getKind() == NodeKind.VARIABLE) {
BLangSimpleVariableDef errorDetailVar = createVarDef(
((BLangSimpleVariable) detailEntry.valueBindingPattern).name.value,
detailEntry.valueBindingPattern.type,
detailEntryVar,
detailEntry.valueBindingPattern.pos);
parentBlockStmt.addStatement(errorDetailVar);
} else if (detailEntry.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) {
BLangRecordVariableDef recordVariableDef = ASTBuilderUtil.createRecordVariableDef(
detailEntry.valueBindingPattern.pos,
(BLangRecordVariable) detailEntry.valueBindingPattern);
recordVariableDef.var.expr = detailEntryVar;
recordVariableDef.type = symTable.recordType;
parentBlockStmt.addStatement(recordVariableDef);
} else if (detailEntry.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariableDef tupleVariableDef = ASTBuilderUtil.createTupleVariableDef(
detailEntry.valueBindingPattern.pos, (BLangTupleVariable) detailEntry.valueBindingPattern);
parentBlockStmt.addStatement(tupleVariableDef);
}
}
private BLangExpression createErrorDetailVar(BLangErrorVariable.BLangErrorDetailEntry detailEntry,
BVarSymbol tempDetailVarSymbol) {
BLangExpression detailEntryVar = createIndexBasedAccessExpr(
detailEntry.valueBindingPattern.type,
detailEntry.valueBindingPattern.pos,
createStringLiteral(detailEntry.key.pos, detailEntry.key.value),
tempDetailVarSymbol, null);
if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar;
bLangIndexBasedAccess.originalType = symTable.pureType;
}
return detailEntryVar;
}
private BLangExpression constructStringTemplateConcatExpression(List<BLangExpression> exprs) {
BLangExpression concatExpr = null;
BLangExpression currentExpr;
for (BLangExpression expr : exprs) {
currentExpr = expr;
if (expr.type.tag != TypeTags.STRING && expr.type.tag != TypeTags.XML) {
currentExpr = getToStringInvocationOnExpr(expr);
}
if (concatExpr == null) {
concatExpr = currentExpr;
continue;
}
BType binaryExprType =
TypeTags.isXMLTypeTag(concatExpr.type.tag) || TypeTags.isXMLTypeTag(currentExpr.type.tag)
? symTable.xmlType
: symTable.stringType;
concatExpr =
ASTBuilderUtil.createBinaryExpr(concatExpr.pos, concatExpr, currentExpr,
binaryExprType, OperatorKind.ADD, null);
}
return concatExpr;
}
private BLangInvocation getToStringInvocationOnExpr(BLangExpression expression) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langValueModuleSymbol.scope
.lookup(names.fromString(TO_STRING_FUNCTION_NAME)).symbol;
List<BLangExpression> requiredArgs = new ArrayList<BLangExpression>() {{
add(addConversionExprIfRequired(expression, symbol.params.get(0).type));
}};
return ASTBuilderUtil.createInvocationExprMethod(expression.pos, symbol, requiredArgs, new ArrayList<>(),
symResolver);
}
private BLangInvocation generateErrorDetailBuiltinFunction(DiagnosticPos pos, BVarSymbol errorVarySymbol,
BLangIndexBasedAccess parentIndexBasedAccess) {
BLangExpression onExpr =
parentIndexBasedAccess != null
? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarySymbol);
return createLangLibInvocationNode(ERROR_DETAIL_FUNCTION_NAME, onExpr, new ArrayList<>(), null, pos);
}
private BLangInvocation generateErrorReasonBuiltinFunction(DiagnosticPos pos, BType reasonType,
BVarSymbol errorVarSymbol,
BLangIndexBasedAccess parentIndexBasedAccess) {
BLangExpression onExpr =
parentIndexBasedAccess != null
? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarSymbol);
return createLangLibInvocationNode(ERROR_REASON_FUNCTION_NAME, onExpr, new ArrayList<>(), reasonType, pos);
}
private BLangInvocation generateConstructFromInvocation(DiagnosticPos pos,
BType targetType,
BVarSymbol source) {
BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol);
BLangInvocation invocationNode = createInvocationNode(CONSTRUCT_FROM, new ArrayList<>(), typedescType);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = targetType;
typedescExpr.type = typedescType;
invocationNode.expr = typedescExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(typedescType, names.fromString(CONSTRUCT_FROM));
invocationNode.requiredArgs = Lists.of(typedescExpr, ASTBuilderUtil.createVariableRef(pos, source));
invocationNode.type = BUnionType.create(null, targetType, symTable.errorType);
return invocationNode;
}
private BLangLambdaFunction createFuncToFilterOutRestParam(List<String> toRemoveList, DiagnosticPos pos) {
String anonfuncName = "$anonRestParamFilterFunc$" + lambdaFunctionCount++;
BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName);
BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID,
getStringAnyTupleType(), this.env.scope.owner);
BLangBlockFunctionBody functionBlock = createAnonymousFunctionBlock(pos, function, keyValSymbol);
BLangIndexBasedAccess indexBasesAccessExpr =
ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol, ASTBuilderUtil
.createLiteral(pos, symTable.intType, (long) 0));
BLangSimpleVariableDef tupFirstElem = createVarDef("key", indexBasesAccessExpr.type,
indexBasesAccessExpr, pos);
functionBlock.addStatement(tupFirstElem);
for (String toRemoveItem : toRemoveList) {
createIfStmt(pos, tupFirstElem.var.symbol, functionBlock, toRemoveItem);
}
BInvokableSymbol functionSymbol = createReturnTrueStatement(pos, function, functionBlock);
return createLambdaFunction(function, functionSymbol);
}
private BLangLambdaFunction createFuncToFilterOutRestParam(BLangRecordVariable recordVariable, DiagnosticPos pos) {
List<String> fieldNamesToRemove = recordVariable.variableList.stream()
.map(var -> var.getKey().getValue())
.collect(Collectors.toList());
return createFuncToFilterOutRestParam(fieldNamesToRemove, pos);
}
private void createIfStmt(DiagnosticPos pos, BVarSymbol inputParamSymbol, BLangBlockFunctionBody blockStmt,
String key) {
BLangSimpleVarRef firstElemRef = ASTBuilderUtil.createVariableRef(pos, inputParamSymbol);
BLangExpression converted = addConversionExprIfRequired(firstElemRef, symTable.stringType);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, blockStmt);
BLangBlockStmt ifBlock = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>());
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, ifBlock);
returnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, false);
ifStmt.body = ifBlock;
BLangGroupExpr groupExpr = new BLangGroupExpr();
groupExpr.type = symTable.booleanType;
BLangBinaryExpr binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, converted,
ASTBuilderUtil.createLiteral(pos, symTable.stringType, key),
symTable.booleanType, OperatorKind.EQUAL, null);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
binaryExpr.opKind, binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type);
groupExpr.expression = binaryExpr;
ifStmt.expr = groupExpr;
}
BLangLambdaFunction createLambdaFunction(BLangFunction function, BInvokableSymbol functionSymbol) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = function;
lambdaFunction.type = functionSymbol.type;
return lambdaFunction;
}
private BInvokableSymbol createReturnTrueStatement(DiagnosticPos pos, BLangFunction function,
BLangBlockFunctionBody functionBlock) {
BLangReturn trueReturnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock);
trueReturnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet),
new Name(function.name.value),
env.enclPkg.packageID, function.type,
env.enclEnv.enclVarSym, true);
functionSymbol.retType = function.returnTypeNode.type;
functionSymbol.params = function.requiredParams.stream()
.map(param -> param.symbol)
.collect(Collectors.toList());
functionSymbol.scope = env.scope;
functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()),
getRestType(functionSymbol), symTable.booleanType, null);
function.symbol = functionSymbol;
rewrite(function, env);
env.enclPkg.addFunction(function);
return functionSymbol;
}
private BLangBlockFunctionBody createAnonymousFunctionBlock(DiagnosticPos pos, BLangFunction function,
BVarSymbol keyValSymbol) {
BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(),
null, keyValSymbol);
function.requiredParams.add(inputParameter);
BLangValueType booleanTypeKind = new BLangValueType();
booleanTypeKind.typeKind = TypeKind.BOOLEAN;
booleanTypeKind.type = symTable.booleanType;
function.returnTypeNode = booleanTypeKind;
BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>());
function.body = functionBlock;
return functionBlock;
}
private BTupleType getStringAnyTupleType() {
ArrayList<BType> typeList = new ArrayList<BType>() {{
add(symTable.stringType);
add(symTable.anyType);
}};
return new BTupleType(typeList);
}
/**
* This method creates a simple variable def and assigns and array expression based on the given indexExpr.
*
* case 1: when there is no parent array access expression, but with the indexExpr : 1
* string s = x[1];
*
* case 2: when there is a parent array expression : x[2] and indexExpr : 3
* string s = x[2][3];
*
* case 3: when there is no parent array access expression, but with the indexExpr : name
* string s = x[name];
*
* case 4: when there is a parent map expression : x[name] and indexExpr : fName
* string s = x[name][fName];
*
* case 5: when there is a parent map expression : x[name] and indexExpr : 1
* string s = x[name][1];
*/
private void createSimpleVarDefStmt(BLangSimpleVariable simpleVariable, BLangBlockStmt parentBlockStmt,
BLangLiteral indexExpr, BVarSymbol tupleVarSymbol,
BLangIndexBasedAccess parentArrayAccessExpr) {
Name varName = names.fromIdNode(simpleVariable.name);
if (varName == Names.IGNORE) {
return;
}
final BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDefStmt(simpleVariable.pos,
parentBlockStmt);
simpleVariableDef.var = simpleVariable;
simpleVariable.expr = createIndexBasedAccessExpr(simpleVariable.type, simpleVariable.pos,
indexExpr, tupleVarSymbol, parentArrayAccessExpr);
}
@Override
public void visit(BLangAssignment assignNode) {
if (safeNavigateLHS(assignNode.varRef)) {
BLangAccessExpression accessExpr = (BLangAccessExpression) assignNode.varRef;
accessExpr.leafNode = true;
result = rewriteSafeNavigationAssignment(accessExpr, assignNode.expr, assignNode.safeAssignment);
result = rewrite(result, env);
return;
}
assignNode.varRef = rewriteExpr(assignNode.varRef);
assignNode.expr = rewriteExpr(assignNode.expr);
assignNode.expr = addConversionExprIfRequired(rewriteExpr(assignNode.expr), assignNode.varRef.type);
result = assignNode;
}
@Override
public void visit(BLangTupleDestructure tupleDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(tupleDestructure.pos);
BType runTimeType = new BArrayType(symTable.anyType);
String name = "tuple";
final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(tupleDestructure.pos, name, runTimeType, null,
new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType,
this.env.scope.owner));
tuple.expr = tupleDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(tupleDestructure.pos,
blockStmt);
variableDef.var = tuple;
createVarRefAssignmentStmts(tupleDestructure.varRef, blockStmt, tuple.symbol, null);
createRestFieldAssignmentStmt(tupleDestructure, blockStmt, tuple.symbol);
result = rewrite(blockStmt, env);
}
private void createRestFieldAssignmentStmt(BLangTupleDestructure tupleDestructure, BLangBlockStmt blockStmt,
BVarSymbol tupleVarSymbol) {
BLangTupleVarRef tupleVarRef = tupleDestructure.varRef;
DiagnosticPos pos = blockStmt.pos;
if (tupleVarRef.restParam != null) {
BLangExpression tupleExpr = tupleDestructure.expr;
BLangSimpleVarRef restParam = (BLangSimpleVarRef) tupleVarRef.restParam;
BArrayType restParamType = (BArrayType) restParam.type;
BLangArrayLiteral arrayExpr = createArrayLiteralExprNode();
arrayExpr.type = restParamType;
BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt);
restParamAssignment.varRef = restParam;
restParamAssignment.varRef.type = restParamType;
restParamAssignment.expr = arrayExpr;
BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
startIndexLiteral.value = (long) tupleVarRef.expressions.size();
startIndexLiteral.type = symTable.intType;
BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos,
"$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(restParam,
createLengthInvocation(pos, restParam));
indexAccessExpr.type = restParamType.eType;
createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null);
foreach.body = foreachBody;
blockStmt.addStatement(foreach);
}
}
private BLangInvocation createLengthInvocation(DiagnosticPos pos, BLangExpression collection) {
BInvokableSymbol lengthInvokableSymbol = (BInvokableSymbol) symResolver
.lookupLangLibMethod(collection.type, names.fromString(LENGTH_FUNCTION_NAME));
BLangInvocation lengthInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, lengthInvokableSymbol,
Lists.of(collection), symResolver);
lengthInvocation.argExprs = lengthInvocation.requiredArgs;
lengthInvocation.type = lengthInvokableSymbol.type.getReturnType();
return lengthInvocation;
}
/**
* This method iterate through each member of the tupleVarRef and create the relevant var ref assignment statements.
* This method does the check for node kind of each member and call the related var ref creation method.
*
* Example:
* ((a, b), c)) = (tuple)
*
* (a, b) is again a tuple, so it is a recursive var ref creation.
*
* c is a simple var, so a simple var def will be created.
*
*/
private void createVarRefAssignmentStmts(BLangTupleVarRef parentTupleVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangExpression> expressions = parentTupleVariable.expressions;
for (int index = 0; index < expressions.size(); index++) {
BLangExpression expression = expressions.get(index);
if (NodeKind.SIMPLE_VARIABLE_REF == expression.getKind() ||
NodeKind.FIELD_BASED_ACCESS_EXPR == expression.getKind() ||
NodeKind.INDEX_BASED_ACCESS_EXPR == expression.getKind() ||
NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == expression.getKind()) {
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(expression.pos, symTable.intType, (long) index);
createSimpleVarRefAssignmentStmt((BLangVariableReference) expression, parentBlockStmt, indexExpr,
tupleVarSymbol, parentIndexAccessExpr);
continue;
}
if (expression.getKind() == NodeKind.TUPLE_VARIABLE_REF) {
BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(tupleVarRef.pos, symTable.intType, (long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVarRef.pos,
new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangTupleVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
continue;
}
if (expression.getKind() == NodeKind.RECORD_VARIABLE_REF) {
BLangRecordVarRef recordVarRef = (BLangRecordVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(recordVarRef.pos, symTable.intType,
(long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangRecordVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
TypeDefBuilderHelper.addTypeDefinition(recordVarRef.type, recordVarRef.type.tsymbol,
TypeDefBuilderHelper.createRecordTypeNode(
(BRecordType) recordVarRef.type,
env.enclPkg.packageID, symTable, recordVarRef.pos),
env);
continue;
}
if (expression.getKind() == NodeKind.ERROR_VARIABLE_REF) {
BLangErrorVarRef errorVarRef = (BLangErrorVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(errorVarRef.pos, symTable.intType,
(long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, expression.type, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangErrorVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
}
}
}
/**
* This method creates a assignment statement and assigns and array expression based on the given indexExpr.
*
*/
private void createSimpleVarRefAssignmentStmt(BLangVariableReference simpleVarRef, BLangBlockStmt parentBlockStmt,
BLangExpression indexExpr, BVarSymbol tupleVarSymbol,
BLangIndexBasedAccess parentArrayAccessExpr) {
if (simpleVarRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
Name varName = names.fromIdNode(((BLangSimpleVarRef) simpleVarRef).variableName);
if (varName == Names.IGNORE) {
return;
}
}
BLangExpression assignmentExpr = createIndexBasedAccessExpr(simpleVarRef.type, simpleVarRef.pos,
indexExpr, tupleVarSymbol, parentArrayAccessExpr);
assignmentExpr = addConversionExprIfRequired(assignmentExpr, simpleVarRef.type);
final BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(parentBlockStmt.pos,
parentBlockStmt);
assignmentStmt.varRef = simpleVarRef;
assignmentStmt.expr = assignmentExpr;
}
private BLangExpression createIndexBasedAccessExpr(BType varType, DiagnosticPos varPos, BLangExpression indexExpr,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentExpr) {
BLangIndexBasedAccess arrayAccess = ASTBuilderUtil.createIndexBasesAccessExpr(varPos,
symTable.anyType, tupleVarSymbol, indexExpr);
arrayAccess.originalType = varType;
if (parentExpr != null) {
arrayAccess.expr = parentExpr;
}
final BLangExpression assignmentExpr;
if (types.isValueType(varType)) {
BLangTypeConversionExpr castExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
castExpr.expr = arrayAccess;
castExpr.type = varType;
assignmentExpr = castExpr;
} else {
assignmentExpr = arrayAccess;
}
return assignmentExpr;
}
@Override
public void visit(BLangRecordDestructure recordDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(recordDestructure.pos);
BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null);
String name = "$map$0";
final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(recordDestructure.pos, name, runTimeType,
null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
runTimeType, this.env.scope.owner));
mapVariable.expr = recordDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.
createVariableDefStmt(recordDestructure.pos, blockStmt);
variableDef.var = mapVariable;
createVarRefAssignmentStmts(recordDestructure.varRef, blockStmt, mapVariable.symbol, null);
result = rewrite(blockStmt, env);
}
@Override
public void visit(BLangErrorDestructure errorDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(errorDestructure.pos);
String name = "$error$";
final BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(errorDestructure.pos, name,
symTable.errorType, null, new BVarSymbol(0, names.fromString(name),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
errorVar.expr = errorDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(errorDestructure.pos,
blockStmt);
variableDef.var = errorVar;
createVarRefAssignmentStmts(errorDestructure.varRef, blockStmt, errorVar.symbol, null);
result = rewrite(blockStmt, env);
}
private void createVarRefAssignmentStmts(BLangRecordVarRef parentRecordVarRef, BLangBlockStmt parentBlockStmt,
BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangRecordVarRefKeyValue> variableRefList = parentRecordVarRef.recordRefFields;
for (BLangRecordVarRefKeyValue varRefKeyValue : variableRefList) {
BLangExpression variableReference = varRefKeyValue.variableReference;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variableReference.pos, symTable.stringType,
varRefKeyValue.variableName.getValue());
if (NodeKind.SIMPLE_VARIABLE_REF == variableReference.getKind() ||
NodeKind.FIELD_BASED_ACCESS_EXPR == variableReference.getKind() ||
NodeKind.INDEX_BASED_ACCESS_EXPR == variableReference.getKind() ||
NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == variableReference.getKind()) {
createSimpleVarRefAssignmentStmt((BLangVariableReference) variableReference, parentBlockStmt,
indexExpr, recordVarSymbol, parentIndexAccessExpr);
continue;
}
if (NodeKind.RECORD_VARIABLE_REF == variableReference.getKind()) {
BLangRecordVarRef recordVariable = (BLangRecordVarRef) variableReference;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVarRef.pos, symTable.mapType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts(recordVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (NodeKind.TUPLE_VARIABLE_REF == variableReference.getKind()) {
BLangTupleVarRef tupleVariable = (BLangTupleVarRef) variableReference;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
symTable.tupleType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts(tupleVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (NodeKind.ERROR_VARIABLE_REF == variableReference.getKind()) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(variableReference.pos,
symTable.errorType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangErrorVarRef) variableReference, parentBlockStmt, recordVarSymbol,
arrayAccessExpr);
}
}
if (parentRecordVarRef.restParam != null) {
DiagnosticPos pos = parentBlockStmt.pos;
BMapType restParamType = (BMapType) ((BLangSimpleVarRef) parentRecordVarRef.restParam).type;
BLangSimpleVarRef variableReference;
if (parentIndexAccessExpr != null) {
BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1", restParamType,
null, new BVarSymbol(0, names.fromString("$map$1"), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner));
mapVariable.expr = parentIndexAccessExpr;
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDef.var = mapVariable;
variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol);
} else {
variableReference = ASTBuilderUtil.createVariableRef(pos,
((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol);
}
BLangSimpleVarRef restParam = (BLangSimpleVarRef) parentRecordVarRef.restParam;
List<String> keysToRemove = parentRecordVarRef.recordRefFields.stream()
.map(field -> field.variableName.value)
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos,
keysToRemove, restParamType, parentBlockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol);
BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, parentBlockStmt);
restParamAssignment.varRef = restParam;
restParamAssignment.varRef.type = restParamType;
restParamAssignment.expr = varRef;
}
}
private void createVarRefAssignmentStmts(BLangErrorVarRef parentErrorVarRef, BLangBlockStmt parentBlockStmt,
BVarSymbol errorVarySymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
if (parentErrorVarRef.reason.getKind() != NodeKind.SIMPLE_VARIABLE_REF ||
names.fromIdNode(((BLangSimpleVarRef) parentErrorVarRef.reason).variableName) != Names.IGNORE) {
BLangAssignment reasonAssignment = ASTBuilderUtil
.createAssignmentStmt(parentBlockStmt.pos, parentBlockStmt);
reasonAssignment.expr = generateErrorReasonBuiltinFunction(parentErrorVarRef.reason.pos,
symTable.stringType, errorVarySymbol, parentIndexAccessExpr);
reasonAssignment.expr = addConversionExprIfRequired(reasonAssignment.expr, parentErrorVarRef.reason.type);
reasonAssignment.varRef = parentErrorVarRef.reason;
}
if (parentErrorVarRef.detail.isEmpty() && isIgnoredErrorRefRestVar(parentErrorVarRef)) {
return;
}
BLangInvocation errorDetailBuiltinFunction = generateErrorDetailBuiltinFunction(parentErrorVarRef.pos,
errorVarySymbol,
parentIndexAccessExpr);
BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail$" + errorCount++,
symTable.detailType, errorDetailBuiltinFunction,
parentErrorVarRef.pos);
detailTempVarDef.type = symTable.detailType;
parentBlockStmt.addStatement(detailTempVarDef);
this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol);
List<String> extractedKeys = new ArrayList<>();
for (BLangNamedArgsExpression detail : parentErrorVarRef.detail) {
extractedKeys.add(detail.name.value);
BLangVariableReference ref = (BLangVariableReference) detail.expr;
BLangExpression detailEntryVar = createIndexBasedAccessExpr(ref.type, ref.pos,
createStringLiteral(detail.name.pos, detail.name.value),
detailTempVarDef.var.symbol, null);
if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar;
bLangIndexBasedAccess.originalType = symTable.pureType;
}
BLangAssignment detailAssignment = ASTBuilderUtil.createAssignmentStmt(ref.pos, parentBlockStmt);
detailAssignment.varRef = ref;
detailAssignment.expr = detailEntryVar;
}
if (!isIgnoredErrorRefRestVar(parentErrorVarRef)) {
BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos,
detailTempVarDef.var.symbol);
BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVarRef.restVar.pos,
extractedKeys,
parentErrorVarRef.restVar.type, parentBlockStmt);
BLangAssignment restAssignment = ASTBuilderUtil.createAssignmentStmt(parentErrorVarRef.restVar.pos,
parentBlockStmt);
restAssignment.varRef = parentErrorVarRef.restVar;
restAssignment.expr = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos,
filteredDetail.symbol);
}
BErrorType errorType = (BErrorType) parentErrorVarRef.type;
if (errorType.detailType.getKind() == TypeKind.RECORD) {
BRecordTypeSymbol tsymbol = (BRecordTypeSymbol) errorType.detailType.tsymbol;
tsymbol.initializerFunc = createRecordInitFunc();
tsymbol.scope.define(tsymbol.initializerFunc.funcName, tsymbol.initializerFunc.symbol);
}
}
private boolean isIgnoredErrorRefRestVar(BLangErrorVarRef parentErrorVarRef) {
if (parentErrorVarRef.restVar == null) {
return true;
}
if (parentErrorVarRef.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return (((BLangSimpleVarRef) parentErrorVarRef.restVar).variableName.value.equals(IGNORE.value));
}
return false;
}
@Override
public void visit(BLangAbort abortNode) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(abortNode.pos, symTable.intType, -1L);
result = rewrite(returnStmt, env);
}
@Override
public void visit(BLangRetry retryNode) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(retryNode.pos, symTable.intType, 1L);
result = rewrite(returnStmt, env);
}
@Override
public void visit(BLangContinue nextNode) {
result = nextNode;
}
@Override
public void visit(BLangBreak breakNode) {
result = breakNode;
}
@Override
public void visit(BLangReturn returnNode) {
if (returnNode.expr != null) {
returnNode.expr = rewriteExpr(returnNode.expr);
}
result = returnNode;
}
@Override
public void visit(BLangPanic panicNode) {
panicNode.expr = rewriteExpr(panicNode.expr);
result = panicNode;
}
@Override
public void visit(BLangXMLNSStatement xmlnsStmtNode) {
xmlnsStmtNode.xmlnsDecl = rewrite(xmlnsStmtNode.xmlnsDecl, env);
result = xmlnsStmtNode;
}
@Override
public void visit(BLangXMLNS xmlnsNode) {
BLangXMLNS generatedXMLNSNode;
xmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI);
BSymbol ownerSymbol = xmlnsNode.symbol.owner;
if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
generatedXMLNSNode = new BLangLocalXMLNS();
} else {
generatedXMLNSNode = new BLangPackageXMLNS();
}
generatedXMLNSNode.namespaceURI = xmlnsNode.namespaceURI;
generatedXMLNSNode.prefix = xmlnsNode.prefix;
generatedXMLNSNode.symbol = xmlnsNode.symbol;
result = generatedXMLNSNode;
}
public void visit(BLangCompoundAssignment compoundAssignment) {
BLangVariableReference varRef = compoundAssignment.varRef;
if (compoundAssignment.varRef.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) {
if (varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
varRef = ASTBuilderUtil.createVariableRef(compoundAssignment.varRef.pos, varRef.symbol);
varRef.lhsVar = true;
}
result = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, rewriteExpr(varRef),
rewriteExpr(compoundAssignment.modifiedExpr));
return;
}
List<BLangStatement> statements = new ArrayList<>();
List<BLangSimpleVarRef> varRefs = new ArrayList<>();
List<BType> types = new ArrayList<>();
do {
BLangSimpleVariableDef tempIndexVarDef = createVarDef("$temp" + ++indexExprCount + "$",
((BLangIndexBasedAccess) varRef).indexExpr.type, ((BLangIndexBasedAccess) varRef).indexExpr,
compoundAssignment.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(tempIndexVarDef.pos,
tempIndexVarDef.var.symbol);
statements.add(0, tempIndexVarDef);
varRefs.add(0, tempVarRef);
types.add(0, varRef.type);
varRef = (BLangVariableReference) ((BLangIndexBasedAccess) varRef).expr;
} while (varRef.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR);
BLangVariableReference var = varRef;
for (int ref = 0; ref < varRefs.size(); ref++) {
var = ASTBuilderUtil.createIndexAccessExpr(var, varRefs.get(ref));
var.type = types.get(ref);
}
var.type = compoundAssignment.varRef.type;
BLangExpression rhsExpression = ASTBuilderUtil.createBinaryExpr(compoundAssignment.pos, var,
compoundAssignment.expr, compoundAssignment.type, compoundAssignment.opKind, null);
rhsExpression.type = compoundAssignment.modifiedExpr.type;
BLangAssignment assignStmt = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, var,
rhsExpression);
statements.add(assignStmt);
BLangBlockStmt bLangBlockStmt = ASTBuilderUtil.createBlockStmt(compoundAssignment.pos, statements);
result = rewrite(bLangBlockStmt, env);
}
@Override
public void visit(BLangExpressionStmt exprStmtNode) {
exprStmtNode.expr = rewriteExpr(exprStmtNode.expr);
result = exprStmtNode;
}
@Override
public void visit(BLangIf ifNode) {
ifNode.expr = rewriteExpr(ifNode.expr);
ifNode.body = rewrite(ifNode.body, env);
ifNode.elseStmt = rewrite(ifNode.elseStmt, env);
result = ifNode;
}
@Override
public void visit(BLangMatch matchStmt) {
BLangBlockStmt matchBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode();
matchBlockStmt.pos = matchStmt.pos;
String matchExprVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable matchExprVar = ASTBuilderUtil.createVariable(matchStmt.expr.pos,
matchExprVarName, matchStmt.expr.type, matchStmt.expr, new BVarSymbol(0,
names.fromString(matchExprVarName),
this.env.scope.owner.pkgID, matchStmt.expr.type, this.env.scope.owner));
BLangSimpleVariableDef matchExprVarDef = ASTBuilderUtil.createVariableDef(matchBlockStmt.pos, matchExprVar);
matchBlockStmt.stmts.add(matchExprVarDef);
matchBlockStmt.stmts.add(generateIfElseStmt(matchStmt, matchExprVar));
rewrite(matchBlockStmt, this.env);
result = matchBlockStmt;
}
@Override
public void visit(BLangForeach foreach) {
BLangBlockStmt blockNode;
BVarSymbol dataSymbol = new BVarSymbol(0, names.fromString("$data$"), this.env.scope.owner.pkgID,
foreach.collection.type, this.env.scope.owner);
BLangSimpleVariable dataVariable = ASTBuilderUtil.createVariable(foreach.pos, "$data$",
foreach.collection.type, foreach.collection, dataSymbol);
BLangSimpleVariableDef dataVarDef = ASTBuilderUtil.createVariableDef(foreach.pos, dataVariable);
BVarSymbol collectionSymbol = dataVariable.symbol;
switch (foreach.collection.type.tag) {
case TypeTags.STRING:
case TypeTags.ARRAY:
case TypeTags.TUPLE:
case TypeTags.XML:
case TypeTags.MAP:
case TypeTags.STREAM:
case TypeTags.RECORD:
BInvokableSymbol iteratorSymbol = getLangLibIteratorInvokableSymbol(collectionSymbol);
blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, true);
break;
case TypeTags.OBJECT:
iteratorSymbol = getIterableObjectIteratorInvokableSymbol(collectionSymbol);
blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, false);
break;
default:
blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.stmts.add(0, dataVarDef);
break;
}
rewrite(blockNode, this.env);
result = blockNode;
}
private BLangBlockStmt desugarForeachWithIteratorDef(BLangForeach foreach,
BLangSimpleVariableDef dataVariableDefinition,
BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVariableDef iteratorVarDef = getIteratorVariableDefinition(foreach.pos, collectionSymbol,
iteratorInvokableSymbol, isIteratorFuncFromLangLib);
BLangBlockStmt blockNode = desugarForeachToWhile(foreach, iteratorVarDef);
blockNode.stmts.add(0, dataVariableDefinition);
return blockNode;
}
public BInvokableSymbol getIterableObjectIteratorInvokableSymbol(BVarSymbol collectionSymbol) {
BObjectTypeSymbol typeSymbol = (BObjectTypeSymbol) collectionSymbol.type.tsymbol;
BAttachedFunction iteratorFunc = null;
for (BAttachedFunction func : typeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) {
iteratorFunc = func;
break;
}
}
BAttachedFunction function = iteratorFunc;
return function.symbol;
}
BInvokableSymbol getLangLibIteratorInvokableSymbol(BVarSymbol collectionSymbol) {
return (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionSymbol.type,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
}
private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner);
BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,
foreach.nillableResultType, iteratorSymbol, resultSymbol);
BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, resultReferenceInWhile, userDefineType);
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
BLangAssignment resultAssignment = getIteratorNextAssignment(foreach.pos, iteratorSymbol, resultSymbol);
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);
valueAccessExpr.expr = addConversionExprIfRequired(valueAccessExpr.expr,
types.getSafeType(valueAccessExpr.expr.type, true, false));
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
whileNode.body.stmts.add(1, resultAssignment);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(resultVariableDefinition);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.type = type;
return recordType;
}
@Override
public void visit(BLangWhile whileNode) {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
@Override
public void visit(BLangLock lockNode) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);
statementExpression.type = symTable.nilType;
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.type = nillableError;
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError, this.env.scope.owner);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr, nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.type = symTable.booleanType;
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
@Override
public void visit(BLangTransaction transactionNode) {
DiagnosticPos pos = transactionNode.pos;
BType trxReturnType = symTable.intType;
BType otherReturnType = symTable.nilType;
BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType);
BLangType otherReturnNode = ASTBuilderUtil.createTypeNode(otherReturnType);
DiagnosticPos invPos = transactionNode.pos;
/* transaction block code will be desugar to function which returns int. Return value determines the status of
the transaction code.
ex.
0 = successful
1 = retry
-1 = abort
Since transaction block code doesn't return anything, we need to add return statement at end of the
block unless we have abort or retry statement.
*/
DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src,
invPos.eLine, invPos.eLine, invPos.sCol, invPos.sCol);
BLangStatement statement = null;
if (!transactionNode.transactionBody.stmts.isEmpty()) {
statement = transactionNode.transactionBody.stmts.get(transactionNode.transactionBody.stmts.size() - 1);
}
if (statement == null || !(statement.getKind() == NodeKind.ABORT) && !(statement.getKind() == NodeKind.ABORT)) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(returnStmtPos, trxReturnType, 0L);
transactionNode.transactionBody.addStatement(returnStmt);
}
if (transactionNode.abortedBody == null) {
transactionNode.abortedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.committedBody == null) {
transactionNode.committedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.onRetryBody == null) {
transactionNode.onRetryBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.retryCount == null) {
transactionNode.retryCount = ASTBuilderUtil.createLiteral(pos, symTable.intType, 3L);
}
BLangLambdaFunction trxMainFunc = createLambdaFunction(pos, "$anonTrxMainFunc$", Collections.emptyList(),
trxReturnNode, transactionNode.transactionBody.stmts,
env, transactionNode.transactionBody.scope);
BLangLambdaFunction trxOnRetryFunc = createLambdaFunction(pos, "$anonTrxOnRetryFunc$", Collections.emptyList(),
otherReturnNode, transactionNode.onRetryBody.stmts,
env, transactionNode.onRetryBody.scope);
BLangLambdaFunction trxCommittedFunc = createLambdaFunction(pos, "$anonTrxCommittedFunc$",
Collections.emptyList(), otherReturnNode,
transactionNode.committedBody.stmts, env,
transactionNode.committedBody.scope);
BLangLambdaFunction trxAbortedFunc = createLambdaFunction(pos, "$anonTrxAbortedFunc$", Collections.emptyList(),
otherReturnNode, transactionNode.abortedBody.stmts,
env, transactionNode.abortedBody.scope);
trxMainFunc.capturedClosureEnv = env.createClone();
trxOnRetryFunc.capturedClosureEnv = env.createClone();
trxCommittedFunc.capturedClosureEnv = env.createClone();
trxAbortedFunc.capturedClosureEnv = env.createClone();
PackageID packageID = new PackageID(Names.BALLERINA_ORG, Names.TRANSACTION_PACKAGE, Names.EMPTY);
BPackageSymbol transactionPkgSymbol = new BPackageSymbol(packageID, null, 0);
BInvokableSymbol invokableSymbol =
(BInvokableSymbol) symResolver.lookupSymbolInMainSpace(symTable.pkgEnvMap.get(transactionPkgSymbol),
TRX_INITIATOR_BEGIN_FUNCTION);
BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(pos, symTable.stringType,
getTransactionBlockId());
List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, transactionNode.retryCount, trxMainFunc,
trxOnRetryFunc,
trxCommittedFunc, trxAbortedFunc);
BLangInvocation trxInvocation = ASTBuilderUtil.createInvocationExprMethod(pos, invokableSymbol,
requiredArgs,
Collections.emptyList(),
symResolver);
BLangExpressionStmt stmt = ASTBuilderUtil.createExpressionStmt(pos, ASTBuilderUtil.createBlockStmt(pos));
stmt.expr = trxInvocation;
result = rewrite(stmt, env);
}
private String getTransactionBlockId() {
return env.enclPkg.packageID.orgName + "$" + env.enclPkg.packageID.name + "$"
+ transactionIndex++;
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, BLangFunctionBody lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.type = new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null);
return lambdaFunction;
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, List<BLangStatement> fnBodyStmts,
SymbolEnv env, Scope trxScope) {
BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
body.scope = trxScope;
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(fnBodyStmts, bodyEnv);
return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body);
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
TypeNode returnType) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++);
lambdaFunction.function = func;
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
func.desugared = false;
lambdaFunction.pos = pos;
return lambdaFunction;
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.type.tag == TypeTags.ARRAY && ((BArrayType) literalExpr.type).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.type = literalExpr.type;
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replaceAll(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
if (listConstructor.type.tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
} else if (listConstructor.type.tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.type));
result = rewriteExpr(expr);
} else if (getElementType(listConstructor.type).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
} else if (listConstructor.type.tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
if (arrayLiteral.type.tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.type));
return;
} else if (getElementType(arrayLiteral.type).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.type);
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
return;
}
tupleLiteral.exprs.forEach(expr -> {
BType expType = expr.impConversionExpr == null ? expr.type : expr.impConversionExpr.type;
types.setImplicitCastExpr(expr, expType, symTable.anyType);
});
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
if (groupExpr.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = groupExpr.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
} else {
result = rewriteExpr(groupExpr.expression);
}
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));
result = rewriteExpr(rewriteMappingConstructor(recordLiteral));
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.type = symTable.stringType;
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
varRefExpr.symbol.type.tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&
!((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.LET) == SymTag.LET) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.type));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
}
genVarRefExpr.type = varRefExpr.type;
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.lhsVar)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.lhsVar = varRefExpr.lhsVar;
genVarRefExpr.type = varRefExpr.symbol.type;
result = genVarRefExpr;
return;
}
genVarRefExpr.lhsVar = varRefExpr.lhsVar;
BType targetType = genVarRefExpr.type;
genVarRefExpr.type = genVarRefExpr.symbol.type;
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = fieldAccessExpr.expr.type;
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.type, varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.pos, fieldAccessExpr.field.value);
int varRefTypeTag = varRefType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false);
}
} else if (types.isLax(varRefType)) {
if (!(varRefType.tag == TypeTags.XML || varRefType.tag == TypeTags.XML_ELEMENT)) {
if (varRefType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) varRefType).constraint.tag)) {
result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));
return;
}
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else {
targetVarRef = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);
}
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.lhsVar = fieldAccessExpr.lhsVar;
targetVarRef.type = fieldAccessExpr.type;
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangStatementExpression statementExpression = new BLangStatementExpression();
BLangBlockStmt block = new BLangBlockStmt();
statementExpression.stmt = block;
BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.type, symTable.errorType);
DiagnosticPos pos = fieldAccessExpr.pos;
BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos);
block.addStatement(result);
BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);
resultRef.type = fieldAccessType;
statementExpression.type = fieldAccessType;
BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(
fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);
BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);
BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.type, symTable.nilType);
mapAccessExpr.type = xmlOrNil;
BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos);
BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);
block.addStatement(mapResult);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);
BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);
ifStmt.expr = isLikeNilExpr;
BLangBlockStmt resultNilBody = new BLangBlockStmt();
ifStmt.body = resultNilBody;
BLangBlockStmt resultHasValueBody = new BLangBlockStmt();
ifStmt.elseStmt = resultHasValueBody;
BLangInvocation errorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue("error");
errorInvocation.name = name;
errorInvocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
errorInvocation.symbol = symTable.errorConstructor;
errorInvocation.type = symTable.errorType;
ArrayList<BLangExpression> errorCtorArgs = new ArrayList<>();
errorInvocation.requiredArgs = errorCtorArgs;
errorCtorArgs.add(createStringLiteral(pos, "{" + BLangConstants.MAP_LANG_LIB + "}InvalidKey"));
BLangNamedArgsExpression message = new BLangNamedArgsExpression();
message.name = ASTBuilderUtil.createIdentifier(pos, "key");
message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);
errorCtorArgs.add(message);
BLangSimpleVariableDef errorDef =
createVarDef("_$_invalid_key_error", symTable.errorType, errorInvocation, pos);
resultNilBody.addStatement(errorDef);
BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);
BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);
errorVarAssignment.varRef = resultRef;
errorVarAssignment.expr = errorRef;
BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(
pos, resultHasValueBody);
mapResultAssignment.varRef = resultRef;
mapResultAssignment.expr = mapResultRef;
statementExpression.expr = resultRef;
return statementExpression;
}
private BLangAccessExpression rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {
ArrayList<BLangExpression> args = new ArrayList<>();
String fieldName = fieldAccessExpr.field.value;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;
fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);
}
if (fieldName.equals("_")) {
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,
fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());
}
BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);
args.add(attributeNameLiteral);
args.add(isOptionalAccessToLiteral(fieldAccessExpr));
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,
new ArrayList<>());
}
private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {
return rewrite(
createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);
}
private String createExpandedQName(String nsURI, String localName) {
return "{" + nsURI + "}" + localName;
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangVariableReference targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType varRefType = indexAccessExpr.expr.type;
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.type, varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, (BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.lhsVar = indexAccessExpr.lhsVar;
targetVarRef.type = indexAccessExpr.type;
result = targetVarRef;
}
@Override
public void visit(BLangInvocation iExpr) {
BLangInvocation genIExpr = iExpr;
if (iExpr.symbol != null && iExpr.symbol.kind == SymbolKind.ERROR_CONSTRUCTOR) {
result = rewriteErrorConstructor(iExpr);
}
reorderArguments(iExpr);
iExpr.requiredArgs = rewriteExprs(iExpr.requiredArgs);
fixNonRestArgTypeCastInTypeParamInvocation(iExpr);
iExpr.restArgs = rewriteExprs(iExpr.restArgs);
annotationDesugar.defineStatementAnnotations(iExpr.annAttachments, iExpr.pos, iExpr.symbol.pkgID,
iExpr.symbol.owner, env);
if (iExpr.functionPointerInvocation) {
visitFunctionPointerInvocation(iExpr);
return;
}
iExpr.expr = rewriteExpr(iExpr.expr);
result = genIExpr;
if (iExpr.expr == null) {
fixTypeCastInTypeParamInvocation(iExpr, genIExpr);
if (iExpr.exprSymbol == null) {
return;
}
iExpr.expr = ASTBuilderUtil.createVariableRef(iExpr.pos, iExpr.exprSymbol);
iExpr.expr = rewriteExpr(iExpr.expr);
}
switch (iExpr.expr.type.tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!iExpr.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(iExpr.requiredArgs);
argExprs.add(0, iExpr.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(iExpr.pos, argExprs, iExpr.restArgs, iExpr.symbol,
iExpr.type, iExpr.expr, iExpr.async);
attachedFunctionInvocation.actionInvocation = iExpr.actionInvocation;
attachedFunctionInvocation.name = iExpr.name;
attachedFunctionInvocation.annAttachments = iExpr.annAttachments;
result = genIExpr = attachedFunctionInvocation;
}
break;
}
fixTypeCastInTypeParamInvocation(iExpr, genIExpr);
}
private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {
if (!iExpr.langLibInvocation) {
return;
}
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
for (int i = 1; i < requiredArgs.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));
}
}
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
if (iExpr.langLibInvocation || TypeParamAnalyzer.containsTypeParam(((BInvokableSymbol) iExpr.symbol).retType)) {
BType originalInvType = genIExpr.type;
genIExpr.type = ((BInvokableSymbol) genIExpr.symbol).retType;
BLangExpression expr = addConversionExprIfRequired(genIExpr, originalInvType);
if (expr.getKind() == NodeKind.TYPE_CONVERSION_EXPR) {
this.result = expr;
return;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.expr = genIExpr;
conversionExpr.targetType = originalInvType;
conversionExpr.type = originalInvType;
conversionExpr.pos = genIExpr.pos;
this.result = conversionExpr;
}
}
private BLangInvocation rewriteErrorConstructor(BLangInvocation iExpr) {
BLangExpression reasonExpr = iExpr.requiredArgs.get(0);
if (reasonExpr.impConversionExpr != null &&
reasonExpr.impConversionExpr.targetType.tag != TypeTags.STRING) {
reasonExpr.impConversionExpr = null;
}
reasonExpr = addConversionExprIfRequired(reasonExpr, symTable.stringType);
reasonExpr = rewriteExpr(reasonExpr);
iExpr.requiredArgs.remove(0);
iExpr.requiredArgs.add(reasonExpr);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(iExpr.pos,
((BErrorType) iExpr.symbol.type).detailType);
List<BLangExpression> namedArgs = iExpr.requiredArgs.stream()
.filter(a -> a.getKind() == NodeKind.NAMED_ARGS_EXPR)
.collect(Collectors.toList());
if (namedArgs.isEmpty()) {
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.type);
} else {
for (BLangExpression arg : namedArgs) {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg;
BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (recordLiteral.type.tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.type);
}
recordLiteral.fields.add(member);
iExpr.requiredArgs.remove(arg);
}
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), ((BErrorType) iExpr.symbol.type).detailType);
}
iExpr.requiredArgs.add(errorDetail);
return iExpr;
}
public void visit(BLangTypeInit typeInitExpr) {
if (typeInitExpr.type.tag == TypeTags.STREAM) {
result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));
} else {
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.type);
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
typeInitExpr.initInvocation.exprSymbol = objVarDef.var.symbol;
typeInitExpr.initInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol;
if (typeInitExpr.initInvocation.type.tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitExpr.initInvocation;
typeInitExpr.initInvocation.name.value = Names.GENERATED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);
stmtExpr.type = objVarRef.symbol.type;
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitExpr.initInvocation.type,
typeInitExpr.initInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.type, null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(typeInitExpr.pos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.type = symTable.booleanType;
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(typeInitExpr.pos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = resultVarRef.symbol.type;
return stmtExpr;
}
private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CONSTRUCT_STREAM).symbol;
BType targetType = ((BStreamType) typeInitExpr.type).constraint;
BType errorType = ((BStreamType) typeInitExpr.type).error;
BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = targetType;
typedescExpr.type = typedescType;
BLangExpression iteratorObj = typeInitExpr.argsExpr.get(0);
BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(
typeInitExpr.pos, symbol, new ArrayList<>(Lists.of(typedescExpr, iteratorObj)),
symResolver);
streamConstructInvocation.type = new BStreamType(TypeTags.STREAM, targetType, errorType, null);
return streamConstructInvocation;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr, DiagnosticPos pos) {
BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type, this.env.scope.owner);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(pos, "$" + name + "$", type, expr,
(BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(pos);
objVarDef.var = objVar;
objVarDef.type = objVar.type;
return objVarDef;
}
private BType getObjectType(BType type) {
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context");
}
BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.type = symTable.errorType;
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$ternary_result$", ternaryExpr.type, null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = ternaryExpr.type;
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.type);
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (trapExpr.expr.type.tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.type);
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
binaryExpr.rhsExpr = getModifiedIntRangeEndExpr(binaryExpr.rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (lhsExprTypeTag == TypeTags.INT && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && rhsExprTypeTag == TypeTags.INT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
}
if (lhsExprTypeTag == rhsExprTypeTag) {
return;
}
if (TypeTags.isStringTypeTag(lhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) {
if (TypeTags.isXMLTypeTag(rhsExprTypeTag)) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type);
return;
}
if (TypeTags.isStringTypeTag(rhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) {
if (TypeTags.isXMLTypeTag(lhsExprTypeTag)) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type);
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type);
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type);
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type);
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type);
}
}
private BLangInvocation replaceWithIntRange(DiagnosticPos pos, BLangExpression lhsExpr, BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.type = symTable.intRangeType;
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.expectedType == null) {
return;
}
int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.expectedType.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
BLangMatchExpression matchExpr = ASTBuilderUtil.createMatchExpression(elvisExpr.lhsExpr);
matchExpr.patternClauses.add(getMatchNullPatternGivenExpression(elvisExpr.pos,
rewriteExpr(elvisExpr.rhsExpr)));
matchExpr.type = elvisExpr.type;
matchExpr.pos = elvisExpr.pos;
result = rewriteExpr(matchExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final DiagnosticPos pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == unaryExpr.type.tag) {
binaryExpr.type = symTable.byteType;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.type = symTable.intType;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.type = bLangArrowFunction.body.expr.type;
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.type = bLangArrowFunction.funcType;
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value), env.enclPkg.symbol.pkgID, bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.restParam = getRestSymbol(funcNode);
funcSymbol.retType = funcNode.returnTypeNode.type;
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.type = new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.type, null);
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
lambdaFunction.capturedClosureEnv = env;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
bLangArrowFunction.function = lambdaFunction.function;
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
}
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.type);
if (workerSendNode.keyExpr != null) {
workerSendNode.keyExpr = rewriteExpr(workerSendNode.keyExpr);
}
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.type);
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
if (workerReceiveNode.keyExpr != null) {
workerReceiveNode.keyExpr = rewriteExpr(workerReceiveNode.keyExpr);
}
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
xmlAttributeAccessExpr.indexExpr = rewriteExpr(xmlAttributeAccessExpr.indexExpr);
xmlAttributeAccessExpr.expr = rewriteExpr(xmlAttributeAccessExpr.expr);
if (xmlAttributeAccessExpr.indexExpr != null
&& xmlAttributeAccessExpr.indexExpr.getKind() == NodeKind.XML_QNAME) {
((BLangXMLQName) xmlAttributeAccessExpr.indexExpr).isUsedInXML = true;
}
xmlAttributeAccessExpr.desugared = true;
if (xmlAttributeAccessExpr.lhsVar || xmlAttributeAccessExpr.indexExpr != null) {
result = xmlAttributeAccessExpr;
} else {
result = rewriteExpr(xmlAttributeAccessExpr);
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);
ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters);
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,
xmlElementAccess.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
}
private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX));
String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null;
ArrayList<BLangExpression> args = new ArrayList<>();
for (BLangXMLElementFilter filter : filters) {
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace));
if (nsSymbol == symTable.notFoundSymbol) {
if (defaultNS != null && !filter.name.equals("*")) {
String expandedName = createExpandedQName(defaultNS, filter.name);
args.add(createStringLiteral(filter.elemNamePos, expandedName));
} else {
args.add(createStringLiteral(filter.elemNamePos, filter.name));
}
} else {
BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol;
String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name);
BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName);
args.add(stringLiteral);
}
}
return args;
}
private BLangInvocation createLanglibXMLInvocation(DiagnosticPos pos, String functionName,
BLangExpression invokeOnExpr,
ArrayList<BLangExpression> args,
ArrayList<BLangExpression> restArgs) {
invokeOnExpr = rewriteExpr(invokeOnExpr);
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = invokeOnExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName));
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(invokeOnExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = rewriteExprs(restArgs);
invocationNode.type = ((BInvokableType) invocationNode.symbol.type).getReturnType();
invocationNode.langLibInvocation = true;
return invocationNode;
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);
xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);
ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
} else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,
xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());
result = rewriteExpr(invocationNode);
} else {
BLangExpression childIndexExpr;
if (xmlNavigation.childIndex == null) {
childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);
} else {
childIndexExpr = xmlNavigation.childIndex;
}
ArrayList<BLangExpression> args = new ArrayList<>();
args.add(rewriteExpr(childIndexExpr));
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);
result = rewriteExpr(invocationNode);
}
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);
result = typedescExpr;
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
if (!intRangeExpression.includeStart) {
intRangeExpression.startExpr = getModifiedIntRangeStartExpr(intRangeExpression.startExpr);
}
if (!intRangeExpression.includeEnd) {
intRangeExpression.endExpr = getModifiedIntRangeEndExpr(intRangeExpression.endExpr);
}
intRangeExpression.startExpr = rewriteExpr(intRangeExpression.startExpr);
intRangeExpression.endExpr = rewriteExpr(intRangeExpression.endExpr);
result = intRangeExpression;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
addMatchExprDefaultCase(bLangMatchExpression);
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos,
matchTempResultVarName, bLangMatchExpression.type, null,
new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID,
bLangMatchExpression.type, this.env.scope.owner));
BLangSimpleVariableDef tempResultVarDef =
ASTBuilderUtil.createVariableDef(bLangMatchExpression.pos, tempResultVar);
tempResultVarDef.desugared = true;
BLangBlockStmt stmts = ASTBuilderUtil.createBlockStmt(bLangMatchExpression.pos, Lists.of(tempResultVarDef));
List<BLangMatchTypedBindingPatternClause> patternClauses = new ArrayList<>();
for (int i = 0; i < bLangMatchExpression.patternClauses.size(); i++) {
BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i);
pattern.expr = rewriteExpr(pattern.expr);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
pattern.expr = addConversionExprIfRequired(pattern.expr, tempResultVarRef.type);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pattern.pos, tempResultVarRef, pattern.expr);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(pattern.pos, Lists.of(assignmentStmt));
patternClauses.add(ASTBuilderUtil.createMatchStatementPattern(pattern.pos, pattern.variable, patternBody));
}
stmts.addStatement(ASTBuilderUtil.createMatchStatement(bLangMatchExpression.pos, bLangMatchExpression.expr,
patternClauses));
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
BLangStatementExpression statementExpr = createStatementExpression(stmts, tempResultVarRef);
statementExpr.type = bLangMatchExpression.type;
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
String checkedExprVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable checkedExprVar = ASTBuilderUtil.createVariable(checkedExpr.pos,
checkedExprVarName, checkedExpr.type, null, new BVarSymbol(0,
names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExpr.type, this.env.scope.owner));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(checkedExpr.pos, checkedExprVar);
checkedExprVarDef.desugared = true;
BLangMatchTypedBindingPatternClause patternSuccessCase =
getSafeAssignSuccessPattern(checkedExprVar.pos, checkedExprVar.symbol.type, true,
checkedExprVar.symbol, null);
BLangMatchTypedBindingPatternClause patternErrorCase = getSafeAssignErrorPattern(checkedExpr.pos,
this.env.scope.owner, checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(checkedExpr.pos, checkedExpr.expr,
new ArrayList<BLangMatchTypedBindingPatternClause>() {{
add(patternSuccessCase);
add(patternErrorCase);
}});
BLangBlockStmt generatedStmtBlock = ASTBuilderUtil.createBlockStmt(checkedExpr.pos,
new ArrayList<BLangStatement>() {{
add(checkedExprVarDef);
add(matchStmt);
}});
BLangSimpleVarRef tempCheckedExprVarRef = ASTBuilderUtil.createVariableRef(
checkedExpr.pos, checkedExprVar.symbol);
BLangStatementExpression statementExpr = createStatementExpression(
generatedStmtBlock, tempCheckedExprVarRef);
statementExpr.type = checkedExpr.type;
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceTypeDefinition.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.type)) {
addConversionExprIfRequired(expr, symTable.anyType);
}
typeTestExpr.expr = rewriteExpr(expr);
typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);
result = typeTestExpr;
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.type = annotAccessExpr.type;
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.type,
binaryExpr.rhsExpr.type),
annotAccessExpr.type, null), null);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangQueryExpr queryExpr) {
BLangStatementExpression stmtExpr = queryDesugar.desugarQueryExpr(queryExpr, env);
result = rewrite(stmtExpr, env);
}
@Override
public void visit(BLangQueryAction queryAction) {
BLangStatementExpression stmtExpr = queryDesugar.desugarQueryAction(queryAction, env);
result = rewrite(stmtExpr, env);
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
if (constSymbol.literalType.tag != TypeTags.NIL && constSymbol.value.value == null) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.type, constantRef.value);
}
BLangSimpleVariableDef getIteratorVariableDefinition(DiagnosticPos pos, BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = pos;
iteratorInvocation.expr = dataReference;
iteratorInvocation.symbol = iteratorInvokableSymbol;
iteratorInvocation.type = iteratorInvokableSymbol.retType;
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
iteratorInvokableSymbol.retType, this.env.scope.owner);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$",
iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);
}
BLangSimpleVariableDef getIteratorNextVariableDefinition(DiagnosticPos pos, BType nillableResultType,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$",
nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(pos, resultVariable);
}
BLangAssignment getIteratorNextAssignment(DiagnosticPos pos,
BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInAssignment = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
nextInvocation.expr.type = types.getSafeType(nextInvocation.expr.type, true, false);
return ASTBuilderUtil.createAssignmentStmt(pos, resultReferenceInAssignment, nextInvocation, false);
}
BLangInvocation createIteratorNextInvocation(DiagnosticPos pos, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next");
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol = getNextFunc((BObjectType) iteratorSymbol.type).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.type = nextFuncSymbol.retType;
return nextInvocation;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (bAttachedFunction.funcName.value.equals("next")) {
return bAttachedFunction;
}
}
return null;
}
BLangFieldBasedAccess getValueAccessExpression(DiagnosticPos pos, BType varType, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, "value");
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = pos;
fieldBasedAccessExpression.type = varType;
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.type;
return fieldBasedAccessExpression;
}
private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.body.expr.pos;
returnNode.setExpression(bLangArrowFunction.body.expr);
blockNode.addStatement(returnNode);
return blockNode;
}
private BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.type = retType;
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
BLangExpression onExpr,
List<BLangExpression> args,
BType retType,
DiagnosticPos pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = onExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.type, names.fromString(functionName));
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(onExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.type = retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType;
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.type = new BArrayType(symTable.anyType);
return expr;
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangVariableReference expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.type = iExpr.symbol.type;
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.type)) {
return expr;
}
if (expr.type.tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), expr.type, expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.type)) {
return expr;
}
if (expr.type.tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(), expr.type,
expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatementLink link = new BLangStatementLink();
link.parent = currentLink;
currentLink = link;
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
link.statement = stmt;
stmt.statementLink = link;
currentLink = link.parent;
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(DiagnosticPos pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.intType;
return literal;
}
private BLangLiteral createByteLiteral(DiagnosticPos pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.type = targetType;
conversionExpr.targetType = targetType;
return conversionExpr;
}
private BType getElementType(BType type) {
if (type.tag != TypeTags.ARRAY) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol) ||
(invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {
return;
}
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;
if (invokableNode.workers.size() == 0 && invokableNode.symbol.type.getReturnType().isNullable()
&& (funcBody.stmts.size() < 1
|| funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
DiagnosticPos invPos = invokableNode.pos;
DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src, invPos.eLine, invPos.eLine, invPos.sCol,
invPos.sCol);
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
funcBody.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || symbol.type.tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
List<BLangExpression> restArgs = iExpr.restArgs;
int originalRequiredArgCount = iExpr.requiredArgs.size();
BLangExpression varargRef = null;
BLangBlockStmt blockStmt = null;
int restArgCount = restArgs.size();
if (restArgCount > 0 &&
restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR &&
originalRequiredArgCount < invokableSymbol.params.size()) {
BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr;
DiagnosticPos varargExpPos = expr.pos;
BType varargVarType = expr.type;
String varargVarName = DESUGARED_VARARG_KEY + this.varargCount++;
BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID,
varargVarType, this.env.scope.owner);
varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);
BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);
varDef.var = var;
varDef.type = varargVarType;
blockStmt = createBlockStmt(varargExpPos);
blockStmt.stmts.add(varDef);
}
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol, varargRef);
}
if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) {
if (invokableSymbol.restParam == null) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
List<BLangExpression> exprs = new ArrayList<>();
BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;
BType elemType = arrayType.eType;
for (BLangExpression restArg : restArgs) {
exprs.add(addConversionExprIfRequired(restArg, elemType));
}
arrayLiteral.exprs = exprs;
arrayLiteral.type = arrayType;
if (restArgCount != 0) {
iExpr.restArgs = new ArrayList<>();
}
iExpr.restArgs.add(arrayLiteral);
return;
}
if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
if (iExpr.requiredArgs.size() == originalRequiredArgCount) {
return;
}
BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);
stmtExpression.type = firstNonRestArg.type;
iExpr.requiredArgs.add(0, stmtExpression);
if (invokableSymbol.restParam == null) {
return;
}
BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);
BLangInvocation sliceInvocation =
createLangLibInvocationNode(SLICE_LANGLIB_METHOD, varargRef,
new ArrayList<BLangExpression>() {{
add(startIndex);
}},
varargRef.type, varargRef.pos);
restArgs.remove(0);
restArgs.add(addConversionExprIfRequired(sliceInvocation, invokableSymbol.restParam.type));
return;
}
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.type = restParamType;
BType elemType = restParamType.eType;
DiagnosticPos pos = restArgs.get(0).pos;
List<BLangExpression> exprs = new ArrayList<>();
for (int i = 0; i < restArgCount - 1; i++) {
exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType));
}
arrayLiteral.exprs = exprs;
BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
pushRestArgsExpr.pos = pos;
pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1);
String name = DESUGARED_VARARG_KEY + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType,
this.env.scope.owner);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.type = restParamType;
BLangBlockStmt pushBlockStmt = createBlockStmt(pos);
pushBlockStmt.stmts.add(varDef);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
new ArrayList<BLangExpression>() {{
add(pushRestArgsExpr);
}}, restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);
stmtExpression.type = restParamType;
iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }};
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new HashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
int varargIndex = 0;
BType varargType = null;
boolean tupleTypedVararg = false;
if (varargRef != null) {
varargType = varargRef.type;
tupleTypedVararg = varargType.tag == TypeTags.TUPLE;
}
for (int i = 0; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.get(param.name.value));
} else if (varargRef == null) {
BLangExpression expr = new BLangIgnoreExpr();
expr.type = param.type;
args.add(expr);
} else {
BLangIndexBasedAccess memberAccessExpr =
(BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
memberAccessExpr.pos = varargRef.pos;
memberAccessExpr.expr = varargRef;
memberAccessExpr.indexExpr = rewriteExpr(createIntLiteral(varargIndex));
memberAccessExpr.type = tupleTypedVararg ? ((BTupleType) varargType).tupleTypes.get(varargIndex) :
((BArrayType) varargType).eType;
varargIndex++;
args.add(addConversionExprIfRequired(memberAccessExpr, param.type));
}
}
iExpr.requiredArgs = args;
}
private BLangMatchTypedBindingPatternClause getSafeAssignErrorPattern(
DiagnosticPos pos, BSymbol invokableSymbol, List<BType> equivalentErrorTypes, boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = ((BInvokableType) invokableSymbol.type).retType;
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<BType>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable patternFailureCaseVar = ASTBuilderUtil.createVariable(pos,
patternFailureCaseVarName, symTable.errorType, null, new BVarSymbol(0,
names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
BLangVariableReference patternFailureCaseVarRef = ASTBuilderUtil.createVariableRef(pos,
patternFailureCaseVar.symbol);
BLangBlockStmt patternBlockFailureCase = (BLangBlockStmt) TreeBuilder.createBlockNode();
patternBlockFailureCase.pos = pos;
if (!isCheckPanicExpr && returnOnError) {
BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode();
returnStmt.pos = pos;
returnStmt.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(returnStmt);
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(panicNode);
}
return ASTBuilderUtil.createMatchStatementPattern(pos, patternFailureCaseVar, patternBlockFailureCase);
}
private BLangMatchTypedBindingPatternClause getSafeAssignSuccessPattern(DiagnosticPos pos, BType lhsType,
boolean isVarDef, BVarSymbol varSymbol, BLangExpression lhsExpr) {
String patternSuccessCaseVarName = GEN_VAR_PREFIX.value + "t_match";
BLangSimpleVariable patternSuccessCaseVar = ASTBuilderUtil.createVariable(pos,
patternSuccessCaseVarName, lhsType, null, new BVarSymbol(0,
names.fromString(patternSuccessCaseVarName),
this.env.scope.owner.pkgID, lhsType, this.env.scope.owner));
BLangExpression varRefExpr;
if (isVarDef) {
varRefExpr = ASTBuilderUtil.createVariableRef(pos, varSymbol);
} else {
varRefExpr = lhsExpr;
}
BLangVariableReference patternSuccessCaseVarRef = ASTBuilderUtil.createVariableRef(pos,
patternSuccessCaseVar.symbol);
BLangAssignment assignmentStmtSuccessCase = ASTBuilderUtil.createAssignmentStmt(pos,
varRefExpr, patternSuccessCaseVarRef, false);
BLangBlockStmt patternBlockSuccessCase = ASTBuilderUtil.createBlockStmt(pos,
new ArrayList<BLangStatement>() {{
add(assignmentStmtSuccessCase);
}});
return ASTBuilderUtil.createMatchStatementPattern(pos,
patternSuccessCaseVar, patternBlockSuccessCase);
}
private BLangStatement generateIfElseStmt(BLangMatch matchStmt, BLangSimpleVariable matchExprVar) {
List<BLangMatchBindingPatternClause> patterns = matchStmt.patternClauses;
BLangIf parentIfNode = generateIfElseStmt(patterns.get(0), matchExprVar);
BLangIf currentIfNode = parentIfNode;
for (int i = 1; i < patterns.size(); i++) {
BLangMatchBindingPatternClause patternClause = patterns.get(i);
if (i == patterns.size() - 1 && patternClause.isLastPattern) {
currentIfNode.elseStmt = getMatchPatternElseBody(patternClause, matchExprVar);
} else {
currentIfNode.elseStmt = generateIfElseStmt(patternClause, matchExprVar);
currentIfNode = (BLangIf) currentIfNode.elseStmt;
}
}
return parentIfNode;
}
/**
* Generate an if-else statement from the given match statement.
*
* @param pattern match pattern statement node
* @param matchExprVar variable node of the match expression
* @return if else statement node
*/
private BLangIf generateIfElseStmt(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) {
BLangExpression ifCondition = createPatternIfCondition(pattern, matchExprVar.symbol);
if (NodeKind.MATCH_TYPED_PATTERN_CLAUSE == pattern.getKind()) {
BLangBlockStmt patternBody = getMatchPatternBody(pattern, matchExprVar);
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, patternBody, null);
}
BType expectedType = matchExprVar.type;
if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) {
BLangMatchStructuredBindingPatternClause matchPattern = (BLangMatchStructuredBindingPatternClause) pattern;
expectedType = getStructuredBindingPatternType(matchPattern.bindingPatternVariable);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
BLangSimpleVariableDef varDef = forceCastIfApplicable(matchExprVar.symbol, pattern.pos, expectedType);
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, varDef.var.symbol);
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
if (structuredPattern.typeGuardExpr != null) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(structuredPattern.pos);
blockStmt.addStatement(varDef);
blockStmt.addStatement(varDefStmt);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt,
structuredPattern.typeGuardExpr);
stmtExpr.type = symTable.booleanType;
ifCondition = ASTBuilderUtil
.createBinaryExpr(pattern.pos, ifCondition, stmtExpr, symTable.booleanType, OperatorKind.AND,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.AND, symTable.booleanType,
symTable.booleanType));
} else {
structuredPattern.body.stmts.add(0, varDef);
structuredPattern.body.stmts.add(1, varDefStmt);
}
}
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, pattern.body, null);
}
private BLangBlockStmt getMatchPatternBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body;
BLangMatchTypedBindingPatternClause patternClause = (BLangMatchTypedBindingPatternClause) pattern;
if (patternClause.variable.name.value.equals(Names.IGNORE.value)) {
return patternClause.body;
}
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(patternClause.pos,
matchExprVar.symbol);
BLangExpression patternVarExpr = addConversionExprIfRequired(matchExprVarRef, patternClause.variable.type);
BLangSimpleVariable patternVar = ASTBuilderUtil.createVariable(patternClause.pos, "",
patternClause.variable.type, patternVarExpr, patternClause.variable.symbol);
BLangSimpleVariableDef patternVarDef = ASTBuilderUtil.createVariableDef(patternVar.pos, patternVar);
patternClause.body.stmts.add(0, patternVarDef);
body = patternClause.body;
return body;
}
private BLangBlockStmt getMatchPatternElseBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body = pattern.body;
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, matchExprVar.symbol);
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
structuredPattern.body.stmts.add(0, varDefStmt);
body = structuredPattern.body;
}
return body;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.type;
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
return expr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.type = lhsType;
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
return conversionExpr;
}
private BLangExpression createPatternIfCondition(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol) {
BType patternType;
switch (patternClause.getKind()) {
case MATCH_STATIC_PATTERN_CLAUSE:
BLangMatchStaticBindingPatternClause staticPattern =
(BLangMatchStaticBindingPatternClause) patternClause;
patternType = staticPattern.literal.type;
break;
case MATCH_STRUCTURED_PATTERN_CLAUSE:
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) patternClause;
patternType = getStructuredBindingPatternType(structuredPattern.bindingPatternVariable);
break;
default:
BLangMatchTypedBindingPatternClause simplePattern = (BLangMatchTypedBindingPatternClause) patternClause;
patternType = simplePattern.variable.type;
break;
}
BLangExpression binaryExpr;
BType[] memberTypes;
if (patternType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) patternType;
memberTypes = unionType.getMemberTypes().toArray(new BType[0]);
} else {
memberTypes = new BType[1];
memberTypes[0] = patternType;
}
if (memberTypes.length == 1) {
binaryExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
} else {
BLangExpression lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
BLangExpression rhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[1]);
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.type, rhsExpr.type));
for (int i = 2; i < memberTypes.length; i++) {
lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[i]);
rhsExpr = binaryExpr;
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.type, rhsExpr.type));
}
}
return binaryExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
List<BField> fields = new ArrayList<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName,
env.enclPkg.symbol.pkgID, fieldType, recordSymbol);
fields.add(new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BMapType) ((BLangSimpleVariable) recordVariable.restParam).type).constraint :
symTable.anydataType;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,
recordVarType,
bindingPatternVariable.pos);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.addTypeDefinition(recordVarType, recordSymbol, recordTypeNode, env);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + errorCount++),
env.enclPkg.symbol.pkgID,
null, null);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
TypeDefBuilderHelper.addTypeDefinition(detailType, detailType.tsymbol, recordTypeNode, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol,
((BErrorType) errorVariable.type).reasonType,
detailType);
errorTypeSymbol.type = errorType;
TypeDefBuilderHelper.addTypeDefinition(errorType, errorTypeSymbol, createErrorTypeNode(errorType), env);
return errorType;
}
return bindingPatternVariable.type;
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(
Flags.PUBLIC,
names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID,
symTable.pureType,
null);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.type,
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + errorNo + "$detailType"),
env.enclPkg.symbol.pkgID, null, null);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(
Flags.PUBLIC, fieldName, detailRecordTypeSymbol.pkgID, fieldType, detailRecordTypeSymbol);
detailRecordType.fields.add(new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordTypeSymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType);
}
BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.type = errorType;
return errorTypeNode;
}
private BLangExpression createPatternMatchBinaryExpr(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol, BType patternType) {
DiagnosticPos pos = patternClause.pos;
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
if (NodeKind.MATCH_STATIC_PATTERN_CLAUSE == patternClause.getKind()) {
BLangMatchStaticBindingPatternClause pattern = (BLangMatchStaticBindingPatternClause) patternClause;
return createBinaryExpression(pos, varRef, pattern.literal);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == patternClause.getKind()) {
return createIsLikeExpression(pos, ASTBuilderUtil.createVariableRef(pos, varSymbol), patternType);
}
if (patternType == symTable.nilType) {
BLangLiteral bLangLiteral = ASTBuilderUtil.createLiteral(pos, symTable.nilType, null);
return ASTBuilderUtil.createBinaryExpr(pos, varRef, bLangLiteral, symTable.booleanType,
OperatorKind.EQUAL, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.EQUAL,
symTable.anyType, symTable.nilType));
} else {
return createIsAssignableExpression(pos, varSymbol, patternType);
}
}
private BLangExpression createBinaryExpression(DiagnosticPos pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.type = symTable.anyType;
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.type, expression.type);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.type,
binaryExpr);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsAssignableExpr createIsAssignableExpression(DiagnosticPos pos,
BVarSymbol varSymbol,
BType patternType) {
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
return ASTBuilderUtil.createIsAssignableExpr(pos, varRef, patternType, symTable.booleanType, names);
}
private BLangIsLikeExpr createIsLikeExpression(DiagnosticPos pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.type = variable.type;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,
BVarSymbol symbol) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(variable.pos, symbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, variable.name);
fieldAccess.symbol = variable.symbol;
fieldAccess.type = variable.type;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private void addMatchExprDefaultCase(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes;
List<BType> unmatchedTypes = new ArrayList<>();
if (bLangMatchExpression.expr.type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) bLangMatchExpression.expr.type;
exprTypes = new ArrayList<>(unionType.getMemberTypes());
} else {
exprTypes = Lists.of(bLangMatchExpression.type);
}
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
if (this.types.isAssignable(type, pattern.variable.type)) {
assignable = true;
break;
}
}
if (!assignable) {
unmatchedTypes.add(type);
}
}
if (unmatchedTypes.isEmpty()) {
return;
}
BType defaultPatternType;
if (unmatchedTypes.size() == 1) {
defaultPatternType = unmatchedTypes.get(0);
} else {
defaultPatternType = BUnionType.create(null, new LinkedHashSet<>(unmatchedTypes));
}
String patternCaseVarName = GEN_VAR_PREFIX.value + "t_match_default";
BLangSimpleVariable patternMatchCaseVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos,
patternCaseVarName, defaultPatternType, null, new BVarSymbol(0, names.fromString(patternCaseVarName),
this.env.scope.owner.pkgID, defaultPatternType, this.env.scope.owner));
BLangMatchExprPatternClause defaultPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
defaultPattern.variable = patternMatchCaseVar;
defaultPattern.expr = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, patternMatchCaseVar.symbol);
defaultPattern.pos = bLangMatchExpression.pos;
bLangMatchExpression.patternClauses.add(defaultPattern);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.lhsVar || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.type;
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName,
accessExpr.type, null, new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.type, this.env.scope.owner));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.type, tempResultVar);
BLangMatch matcEXpr = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matcEXpr));
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.type = originalExprType;
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successPattern = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {
BType originalType = accessExpr.originalType;
if (TypeTags.isXMLTypeTag(originalType.tag)) {
accessExpr.type = BUnionType.create(null, originalType, symTable.errorType);
} else {
accessExpr.type = originalType;
}
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.type);
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(accessExpr.pos, accessExpr.expr, new ArrayList<>());
if (accessExpr.nilSafeNavigation) {
matchStmt.patternClauses.add(getMatchNullPattern(accessExpr, tempResultVar));
matchStmt.type = type;
}
if (accessExpr.errorSafeNavigation) {
matchStmt.patternClauses.add(getMatchErrorPattern(accessExpr, tempResultVar));
matchStmt.type = type;
matchStmt.pos = accessExpr.pos;
}
BLangMatchTypedBindingPatternClause successPattern =
getSuccessPattern(accessExpr, tempResultVar, accessExpr.errorSafeNavigation);
matchStmt.patternClauses.add(successPattern);
this.matchStmtStack.push(matchStmt);
if (this.successPattern != null) {
this.successPattern.body = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(matchStmt));
}
this.successPattern = successPattern;
}
private BLangMatchTypedBindingPatternClause getMatchErrorPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(expr.pos, errorPatternVarName,
symTable.errorType, null, new BVarSymbol(0, names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, errorPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause errorPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, errorPatternVar, patternBody);
return errorPattern;
}
private BLangMatchExprPatternClause getMatchNullPatternGivenExpression(DiagnosticPos pos,
BLangExpression expr) {
String nullPatternVarName = IGNORE.toString();
BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(pos, nullPatternVarName, symTable.nilType,
null, new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner));
BLangMatchExprPatternClause nullPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
nullPattern.variable = errorPatternVar;
nullPattern.expr = expr;
nullPattern.pos = pos;
return nullPattern;
}
private BLangMatchTypedBindingPatternClause getMatchNullPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
BLangSimpleVariable nullPatternVar = ASTBuilderUtil.createVariable(expr.pos, nullPatternVarName,
symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, nullPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause nullPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, nullPatternVar, patternBody);
return nullPattern;
}
private BLangMatchTypedBindingPatternClause getSuccessPattern(BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar, boolean liftError) {
BType type = types.getSafeType(accessExpr.expr.type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
BVarSymbol successPatternSymbol;
if (type.tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner);
} else {
successPatternSymbol = new BVarSymbol(0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
accessExpr.expr = ASTBuilderUtil.createVariableRef(accessExpr.pos, successPatternVar.symbol);
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
if (TypeTags.isXMLTypeTag(accessExpr.expr.type.tag)) {
accessExpr.type = BUnionType.create(null, accessExpr.originalType, symTable.errorType, symTable.nilType);
} else {
accessExpr.type = accessExpr.originalType;
}
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(accessExpr, tempResultVarRef.type);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause successPattern =
ASTBuilderUtil.createMatchStatementPattern(accessExpr.pos, successPatternVar, patternBody);
this.safeNavigationAssignment = assignmentStmt;
return successPattern;
}
private boolean safeNavigateLHS(BLangExpression expr) {
if (expr.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR && expr.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) {
return false;
}
BLangExpression varRef = ((BLangAccessExpression) expr).expr;
if (varRef.type.isNullable()) {
return true;
}
return safeNavigateLHS(varRef);
}
private BLangStatement rewriteSafeNavigationAssignment(BLangAccessExpression accessExpr, BLangExpression rhsExpr,
boolean safeAssignment) {
this.accessExprStack = new Stack<>();
List<BLangStatement> stmts = new ArrayList<>();
createLHSSafeNavigation(stmts, accessExpr.expr);
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos,
cloneExpression(accessExpr), rhsExpr);
stmts.add(assignment);
return ASTBuilderUtil.createBlockStmt(accessExpr.pos, stmts);
}
private void createLHSSafeNavigation(List<BLangStatement> stmts, BLangExpression expr) {
NodeKind kind = expr.getKind();
boolean root = false;
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
BLangAccessExpression accessExpr = (BLangAccessExpression) expr;
createLHSSafeNavigation(stmts, accessExpr.expr);
accessExpr.expr = accessExprStack.pop();
} else {
root = true;
}
if (expr.getKind() == NodeKind.INVOCATION) {
BLangInvocation invocation = (BLangInvocation) expr;
BVarSymbol interMediateSymbol = new BVarSymbol(0, names.fromString(GEN_VAR_PREFIX.value
+ "i_intermediate"), this.env.scope.owner.pkgID, invocation.type, this.env.scope.owner);
BLangSimpleVariable intermediateVariable = ASTBuilderUtil.createVariable(expr.pos,
interMediateSymbol.name.value, invocation.type, invocation, interMediateSymbol);
BLangSimpleVariableDef intermediateVariableDefinition = ASTBuilderUtil.createVariableDef(invocation.pos,
intermediateVariable);
stmts.add(intermediateVariableDefinition);
expr = ASTBuilderUtil.createVariableRef(invocation.pos, interMediateSymbol);
}
if (expr.type.isNullable()) {
BLangTypeTestExpr isNillTest = ASTBuilderUtil.createTypeTestExpr(expr.pos, expr, getNillTypeNode());
isNillTest.type = symTable.booleanType;
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(expr.pos);
expr = cloneExpression(expr);
expr.type = types.getSafeType(expr.type, true, false);
if (isDefaultableMappingType(expr.type) && !root) {
BLangRecordLiteral jsonLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
jsonLiteral.type = expr.type;
jsonLiteral.pos = expr.pos;
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(expr.pos,
expr, jsonLiteral);
thenStmt.addStatement(assignment);
} else {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = ERROR_REASON_NULL_REFERENCE_ERROR;
literal.type = symTable.stringType;
BLangInvocation errorCtorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
errorCtorInvocation.pos = expr.pos;
errorCtorInvocation.argExprs.add(literal);
errorCtorInvocation.requiredArgs.add(literal);
errorCtorInvocation.type = symTable.errorType;
errorCtorInvocation.symbol = symTable.errorConstructor;
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.expr = errorCtorInvocation;
panicNode.pos = expr.pos;
thenStmt.addStatement(panicNode);
}
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(expr.pos, isNillTest, thenStmt, null);
stmts.add(ifelse);
}
accessExprStack.push(expr);
}
BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.type = symTable.nilType;
return nillTypeNode;
}
private BLangVariableReference cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
case INVOCATION:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangVariableReference varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression(originalAccessExpr.expr);
}
varRef.type = types.getSafeType(originalAccessExpr.expr.type, true, false);
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
case INVOCATION:
accessExpr = null;
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.lhsVar = originalAccessExpr.lhsVar;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.type = originalAccessExpr.originalType;
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.booleanType;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
default:
return false;
}
}
private BLangFunction createInitFunctionForObjectType(BLangObjectTypeNode structureTypeNode, SymbolEnv env) {
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(structureTypeNode, env,
Names.GENERATED_INIT_SUFFIX, names, symTable);
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) structureTypeNode.type.tsymbol);
typeSymbol.generatedInitializerFunc = new BAttachedFunction(Names.GENERATED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.type);
structureTypeNode.generatedInitFunction = initFunction;
initFunction.returnTypeNode.type = symTable.nilType;
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.type, null, binaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = binaryExpr.type;
result = rewriteExpr(stmtExpr);
}
/**
* Split packahe init function into several smaller functions.
*
* @param packageNode package node
* @param env symbol environment
* @return initial init function but trimmed in size
*/
private BLangFunction splitInitFunction(BLangPackage packageNode, SymbolEnv env) {
int methodSize = INIT_METHOD_SPLIT_SIZE;
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) packageNode.initFunction.body;
if (funcBody.stmts.size() < methodSize || !isJvmTarget) {
return packageNode.initFunction;
}
BLangFunction initFunction = packageNode.initFunction;
List<BLangFunction> generatedFunctions = new ArrayList<>();
List<BLangStatement> stmts = new ArrayList<>(funcBody.stmts);
funcBody.stmts.clear();
BLangFunction newFunc = initFunction;
BLangBlockFunctionBody newFuncBody = (BLangBlockFunctionBody) newFunc.body;
int varDefIndex = 0;
for (int i = 0; i < stmts.size(); i++) {
if (stmts.get(i).getKind() == NodeKind.VARIABLE_DEF) {
break;
}
varDefIndex++;
if (i > 0 && i % methodSize == 0) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.add(stmts.get(i));
}
List<BLangStatement> chunkStmts = new ArrayList<>();
for (int i = varDefIndex; i < stmts.size(); i++) {
BLangStatement stmt = stmts.get(i);
chunkStmts.add(stmt);
varDefIndex++;
if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).expr.getKind() == NodeKind.SERVICE_CONSTRUCTOR) &&
(newFuncBody.stmts.size() + chunkStmts.size() > methodSize)) {
if (newFuncBody.stmts.size() + chunkStmts.size() > methodSize) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.addAll(chunkStmts);
chunkStmts.clear();
} else if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).varRef instanceof BLangPackageVarRef) &&
Symbols.isFlagOn(((BLangPackageVarRef) ((BLangAssignment) stmt).varRef).varSymbol.flags,
Flags.LISTENER)
) {
break;
}
}
newFuncBody.stmts.addAll(chunkStmts);
for (int i = varDefIndex; i < stmts.size(); i++) {
if (i > 0 && i % methodSize == 0) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.add(stmts.get(i));
}
generatedFunctions.add(newFunc);
for (int j = 0; j < generatedFunctions.size() - 1; j++) {
BLangFunction thisFunction = generatedFunctions.get(j);
BLangCheckedExpr checkedExpr =
ASTBuilderUtil.createCheckExpr(initFunction.pos,
createInvocationNode(generatedFunctions.get(j + 1).name.value,
new ArrayList<>(), symTable.errorOrNilType),
symTable.nilType);
checkedExpr.equivalentErrorTypeList.add(symTable.errorType);
BLangExpressionStmt expressionStmt = ASTBuilderUtil
.createExpressionStmt(thisFunction.pos, (BLangBlockFunctionBody) thisFunction.body);
expressionStmt.expr = checkedExpr;
expressionStmt.expr.pos = initFunction.pos;
if (j > 0) {
thisFunction = rewrite(thisFunction, env);
packageNode.functions.add(thisFunction);
packageNode.topLevelNodes.add(thisFunction);
}
}
if (generatedFunctions.size() > 1) {
BLangFunction lastFunc = generatedFunctions.get(generatedFunctions.size() - 1);
lastFunc = rewrite(lastFunc, env);
packageNode.functions.add(lastFunc);
packageNode.topLevelNodes.add(lastFunc);
}
return generatedFunctions.get(0);
}
/**
* Create an intermediate package init function.
*
* @param pkgNode package node
* @param env symbol environment of package
*/
private BLangFunction createIntermediateInitFunction(BLangPackage pkgNode, SymbolEnv env) {
String alias = pkgNode.symbol.pkgID.toString();
BLangFunction initFunction = ASTBuilderUtil
.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
new Name(Names.INIT_FUNCTION_SUFFIX.value
+ this.initFuncIndex++), symTable);
createInvokableSymbol(initFunction, env);
return initFunction;
}
private BType getRestType(BInvokableSymbol invokableSymbol) {
if (invokableSymbol != null && invokableSymbol.restParam != null) {
return invokableSymbol.restParam.type;
}
return null;
}
private BType getRestType(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.type;
}
return null;
}
private BVarSymbol getRestSymbol(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.symbol;
}
return null;
}
private boolean isComputedKey(RecordLiteralNode.RecordField field) {
if (!field.isKeyValueField()) {
return false;
}
return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;
}
private BLangStatementExpression rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {
List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields;
BType type = mappingConstructorExpr.type;
DiagnosticPos pos = mappingConstructorExpr.pos;
BLangRecordLiteral recordLiteral = type.tag == TypeTags.RECORD ? new BLangStructLiteral(pos, type) :
new BLangMapLiteral(pos, type);
String name = DESUGARED_MAPPING_CONSTR_KEY + this.annonVarCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner);
BLangSimpleVariable var = createVariable(pos, name, type, recordLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.type = type;
BLangBlockStmt blockStmt = createBlockStmt(pos);
blockStmt.stmts.add(varDef);
BLangSimpleVarRef mappingVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
BLangRecordLiteral.BLangRecordKey key = keyValueField.key;
BLangExpression keyExpr = key.expr;
BLangExpression indexExpr = key.computedKey ? keyExpr :
keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF ?
createStringLiteral(pos, ((BLangSimpleVarRef) keyExpr).variableName.value) :
((BLangLiteral) keyExpr);;
addMemberStoreForKeyValuePair(pos, blockStmt, mappingVarRef, indexExpr, keyValueField.valueExpr);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;
addMemberStoreForKeyValuePair(pos, blockStmt, mappingVarRef,
createStringLiteral(pos, varRefField.variableName.value),
varRefField);
} else {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = generateMapEntriesInvocation(spreadOpField.expr, spreadOpField.expr.type);
types.setForeachTypedBindingPatternType(foreach);
BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type,
this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBodyBlock = ASTBuilderUtil.createBlockStmt(pos);
BTupleType foreachVarRefType = (BTupleType) foreachVarRef.type;
BLangIndexBasedAccess indexExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexExpr.pos = pos;
indexExpr.expr = foreachVarRef;
indexExpr.indexExpr = rewriteExpr(createIntLiteral(0));
indexExpr.type = foreachVarRefType.tupleTypes.get(0);
BLangIndexBasedAccess valueExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
valueExpr.pos = pos;
valueExpr.expr = foreachVarRef;
valueExpr.indexExpr = rewriteExpr(createIntLiteral(1));
valueExpr.type = foreachVarRefType.tupleTypes.get(1);
addMemberStoreForKeyValuePair(pos, foreachBodyBlock, mappingVarRef, indexExpr, valueExpr);
foreach.body = foreachBodyBlock;
blockStmt.addStatement(foreach);
}
}
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, mappingVarRef);
stmtExpression.type = type;
return stmtExpression;
}
private void addMemberStoreForKeyValuePair(DiagnosticPos pos, BLangBlockStmt blockStmt,
BLangExpression mappingVarRef, BLangExpression indexExpr,
BLangExpression value) {
BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt);
assignmentStmt.expr = rewriteExpr(value);
BLangIndexBasedAccess indexAccessNode = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexAccessNode.pos = pos;
indexAccessNode.expr = mappingVarRef;
indexAccessNode.indexExpr = rewriteExpr(indexExpr);
indexAccessNode.type = value.type;
assignmentStmt.varRef = indexAccessNode;
}
private Map<String, BLangExpression> getKeyValuePairs(BLangStatementExpression desugaredMappingConst) {
List<BLangStatement> stmts = ((BLangBlockStmt) desugaredMappingConst.stmt).stmts;
Map<String, BLangExpression> keyValuePairs = new HashMap<>();
for (int i = 1; i < stmts.size(); i++) {
BLangAssignment assignmentStmt = (BLangAssignment) stmts.get(i);
BLangExpression indexExpr = ((BLangIndexBasedAccess) assignmentStmt.varRef).indexExpr;
if (indexExpr.getKind() != NodeKind.LITERAL) {
continue;
}
keyValuePairs.put((String) ((BLangLiteral) indexExpr).value, assignmentStmt.expr);
}
return keyValuePairs;
}
} | class Desugar extends BLangNodeVisitor {
private static final CompilerContext.Key<Desugar> DESUGAR_KEY =
new CompilerContext.Key<>();
private static final String QUERY_TABLE_WITH_JOIN_CLAUSE = "queryTableWithJoinClause";
private static final String QUERY_TABLE_WITHOUT_JOIN_CLAUSE = "queryTableWithoutJoinClause";
private static final String BASE_64 = "base64";
private static final String ERROR_REASON_FUNCTION_NAME = "reason";
private static final String ERROR_DETAIL_FUNCTION_NAME = "detail";
private static final String TO_STRING_FUNCTION_NAME = "toString";
private static final String LENGTH_FUNCTION_NAME = "length";
private static final String ERROR_REASON_NULL_REFERENCE_ERROR = "NullReferenceException";
private static final String CONSTRUCT_FROM = "constructFrom";
private static final String SLICE_LANGLIB_METHOD = "slice";
private static final String PUSH_LANGLIB_METHOD = "push";
private static final String DESUGARED_VARARG_KEY = "$vararg$";
public static final String XML_INTERNAL_SELECT_DESCENDANTS = "selectDescendants";
public static final String XML_INTERNAL_CHILDREN = "children";
public static final String XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT = "getFilteredChildrenFlat";
public static final String XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING = "getElementNameNilLifting";
public static final String XML_INTERNAL_GET_ATTRIBUTE = "getAttribute";
public static final String XML_INTERNAL_GET_ELEMENTS = "getElements";
private SymbolTable symTable;
private SymbolResolver symResolver;
private final SymbolEnter symbolEnter;
private ClosureDesugar closureDesugar;
private QueryDesugar queryDesugar;
private AnnotationDesugar annotationDesugar;
private Types types;
private Names names;
private ServiceDesugar serviceDesugar;
private BLangNode result;
private NodeCloner nodeCloner;
private SemanticAnalyzer semanticAnalyzer;
private BLangStatementLink currentLink;
public Stack<BLangLockStmt> enclLocks = new Stack<>();
private SymbolEnv env;
private int lambdaFunctionCount = 0;
private int transactionIndex = 0;
private int recordCount = 0;
private int errorCount = 0;
private int annonVarCount = 0;
private int initFuncIndex = 0;
private int indexExprCount = 0;
private int letCount = 0;
private int varargCount = 0;
private Stack<BLangMatch> matchStmtStack = new Stack<>();
Stack<BLangExpression> accessExprStack = new Stack<>();
private BLangMatchTypedBindingPatternClause successPattern;
private BLangAssignment safeNavigationAssignment;
static boolean isJvmTarget = false;
public static Desugar getInstance(CompilerContext context) {
Desugar desugar = context.get(DESUGAR_KEY);
if (desugar == null) {
desugar = new Desugar(context);
}
return desugar;
}
private Desugar(CompilerContext context) {
isJvmTarget = true;
context.put(DESUGAR_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.closureDesugar = ClosureDesugar.getInstance(context);
this.queryDesugar = QueryDesugar.getInstance(context);
this.annotationDesugar = AnnotationDesugar.getInstance(context);
this.types = Types.getInstance(context);
this.names = Names.getInstance(context);
this.names = Names.getInstance(context);
this.serviceDesugar = ServiceDesugar.getInstance(context);
this.nodeCloner = NodeCloner.getInstance(context);
this.semanticAnalyzer = SemanticAnalyzer.getInstance(context);
}
public BLangPackage perform(BLangPackage pkgNode) {
annotationDesugar.initializeAnnotationMap(pkgNode);
SymbolEnv env = this.symTable.pkgEnvMap.get(pkgNode.symbol);
return rewrite(pkgNode, env);
}
private void addAttachedFunctionsToPackageLevel(BLangPackage pkgNode, SymbolEnv env) {
for (BLangTypeDefinition typeDef : pkgNode.typeDefinitions) {
if (typeDef.typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
continue;
}
if (typeDef.symbol.tag == SymTag.OBJECT) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeDef.typeNode;
objectTypeNode.functions.forEach(f -> {
if (!pkgNode.objAttachedFunctions.contains(f.symbol)) {
pkgNode.functions.add(f);
pkgNode.topLevelNodes.add(f);
}
});
if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) {
continue;
}
BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(objectTypeNode, env);
tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,
tempGeneratedInitFunction.symbol.scope, env);
this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env);
objectTypeNode.generatedInitFunction = tempGeneratedInitFunction;
pkgNode.functions.add(objectTypeNode.generatedInitFunction);
pkgNode.topLevelNodes.add(objectTypeNode.generatedInitFunction);
if (objectTypeNode.initFunction != null) {
pkgNode.functions.add(objectTypeNode.initFunction);
pkgNode.topLevelNodes.add(objectTypeNode.initFunction);
}
} else if (typeDef.symbol.tag == SymTag.RECORD) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeDef.typeNode;
recordTypeNode.initFunction = rewrite(
TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
pkgNode.functions.add(recordTypeNode.initFunction);
pkgNode.topLevelNodes.add(recordTypeNode.initFunction);
}
}
}
private BLangFunction createGeneratedInitializerFunction(BLangObjectTypeNode objectTypeNode, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForObjectType(objectTypeNode, env);
if (objectTypeNode.initFunction == null) {
return generatedInitFunc;
}
BAttachedFunction initializerFunc = ((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc;
BAttachedFunction generatedInitializerFunc =
((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc;
addRequiredParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc,
generatedInitializerFunc);
addRestParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc, generatedInitializerFunc);
generatedInitFunc.returnTypeNode = objectTypeNode.initFunction.returnTypeNode;
generatedInitializerFunc.symbol.retType = generatedInitFunc.returnTypeNode.type;
((BInvokableType) generatedInitFunc.symbol.type).paramTypes = initializerFunc.type.paramTypes;
((BInvokableType) generatedInitFunc.symbol.type).retType = initializerFunc.type.retType;
((BInvokableType) generatedInitFunc.symbol.type).restType = initializerFunc.type.restType;
generatedInitializerFunc.type = initializerFunc.type;
generatedInitFunc.desugared = false;
return generatedInitFunc;
}
private void addRequiredParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc,
BAttachedFunction generatedInitializerFunc) {
if (initFunction.requiredParams.isEmpty()) {
return;
}
for (BLangSimpleVariable requiredParameter : initFunction.requiredParams) {
BLangSimpleVariable var =
ASTBuilderUtil.createVariable(initFunction.pos,
requiredParameter.name.getValue(), requiredParameter.type,
createRequiredParamExpr(requiredParameter.expr),
new BVarSymbol(0, names.fromString(requiredParameter.name.getValue()),
requiredParameter.symbol.pkgID,
requiredParameter.type, requiredParameter.symbol.owner));
generatedInitFunc.requiredParams.add(var);
generatedInitializerFunc.symbol.params.add(var.symbol);
}
}
private BLangExpression createRequiredParamExpr(BLangExpression expr) {
if (expr == null) {
return null;
}
if (expr.getKind() == NodeKind.LAMBDA) {
BLangFunction func = ((BLangLambdaFunction) expr).function;
return createLambdaFunction(func.pos, func.name.value, func.requiredParams, func.returnTypeNode, func.body);
}
BLangExpression expression = this.nodeCloner.clone(expr);
if (expression.getKind() == NodeKind.ARROW_EXPR) {
BLangIdentifier func = (BLangIdentifier) ((BLangArrowFunction) expression).functionName;
((BLangArrowFunction) expression).functionName = ASTBuilderUtil.createIdentifier(func.pos,
"$" + func.getValue() + "$");
}
return expression;
}
private void addRestParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc,
BAttachedFunction generatedInitializerFunc) {
if (initFunction.restParam == null) {
return;
}
BLangSimpleVariable restParam = initFunction.restParam;
generatedInitFunc.restParam =
ASTBuilderUtil.createVariable(initFunction.pos,
restParam.name.getValue(), restParam.type, null, new BVarSymbol(0,
names.fromString(restParam.name.getValue()), restParam.symbol.pkgID,
restParam.type, restParam.symbol.owner));
generatedInitializerFunc.symbol.restParam = generatedInitFunc.restParam.symbol;
}
/**
* Create package init functions.
*
* @param pkgNode package node
* @param env symbol environment of package
*/
private void createPackageInitFunctions(BLangPackage pkgNode, SymbolEnv env) {
String alias = pkgNode.symbol.pkgID.toString();
pkgNode.initFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
Names.INIT_FUNCTION_SUFFIX,
symTable);
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body;
for (BLangXMLNS xmlns : pkgNode.xmlnsList) {
initFnBody.addStatement(createNamespaceDeclrStatement(xmlns));
}
pkgNode.startFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
Names.START_FUNCTION_SUFFIX,
symTable);
pkgNode.stopFunction = ASTBuilderUtil.createInitFunctionWithNilReturn(pkgNode.pos, alias,
Names.STOP_FUNCTION_SUFFIX);
createInvokableSymbol(pkgNode.initFunction, env);
createInvokableSymbol(pkgNode.startFunction, env);
createInvokableSymbol(pkgNode.stopFunction, env);
}
private void addUserDefinedModuleInitInvocationAndReturn(BLangPackage pkgNode) {
Optional<BLangFunction> userDefInitOptional = pkgNode.functions.stream()
.filter(bLangFunction -> !bLangFunction.attachedFunction &&
bLangFunction.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value))
.findFirst();
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body;
if (!userDefInitOptional.isPresent()) {
addNilReturnStatement(initFnBody);
return;
}
BLangFunction userDefInit = userDefInitOptional.get();
BLangInvocation userDefInitInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
userDefInitInvocation.pos = pkgNode.initFunction.pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(userDefInit.name.value);
userDefInitInvocation.name = name;
userDefInitInvocation.symbol = userDefInit.symbol;
BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
pkgAlias.setLiteral(false);
pkgAlias.setValue(pkgNode.packageID.name.value);
userDefInitInvocation.pkgAlias = pkgAlias;
userDefInitInvocation.type = userDefInit.returnTypeNode.type;
userDefInitInvocation.requiredArgs = Collections.emptyList();
BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode();
returnStmt.pos = pkgNode.initFunction.pos;
returnStmt.expr = userDefInitInvocation;
initFnBody.stmts.add(returnStmt);
}
/**
* Create invokable symbol for function.
*
* @param bLangFunction function node
* @param env Symbol environment
*/
private void createInvokableSymbol(BLangFunction bLangFunction, SymbolEnv env) {
BType returnType = bLangFunction.returnTypeNode.type == null ?
symResolver.resolveTypeNode(bLangFunction.returnTypeNode, env) : bLangFunction.returnTypeNode.type;
BInvokableType invokableType = new BInvokableType(new ArrayList<>(), getRestType(bLangFunction),
returnType, null);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(bLangFunction.flagSet),
new Name(bLangFunction.name.value), env.enclPkg.packageID, invokableType, env.enclPkg.symbol, true);
functionSymbol.retType = returnType;
for (BLangVariable param : bLangFunction.requiredParams) {
functionSymbol.params.add(param.symbol);
}
functionSymbol.scope = new Scope(functionSymbol);
bLangFunction.symbol = functionSymbol;
}
/**
* Add nil return statement.
*
* @param bLangBlockStmt block statement node
*/
private void addNilReturnStatement(BlockNode bLangBlockStmt) {
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(((BLangNode) bLangBlockStmt).pos, symTable.nilType);
bLangBlockStmt.addStatement(returnStmt);
}
/**
* Create namespace declaration statement for XMNLNS.
*
* @param xmlns XMLNS node
* @return XMLNS statement
*/
private BLangXMLNSStatement createNamespaceDeclrStatement(BLangXMLNS xmlns) {
BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode();
xmlnsStmt.xmlnsDecl = xmlns;
xmlnsStmt.pos = xmlns.pos;
return xmlnsStmt;
}
@Override
public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.DESUGAR)) {
result = pkgNode;
return;
}
createPackageInitFunctions(pkgNode, env);
addAttachedFunctionsToPackageLevel(pkgNode, env);
pkgNode.constants.stream()
.filter(constant -> constant.expr.getKind() == NodeKind.LITERAL ||
constant.expr.getKind() == NodeKind.NUMERIC_LITERAL)
.forEach(constant -> pkgNode.typeDefinitions.add(constant.associatedTypeDefinition));
BLangBlockStmt serviceAttachments = serviceDesugar.rewriteServiceVariables(pkgNode.services, env);
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body;
for (BLangConstant constant : pkgNode.constants) {
if (constant.symbol.type.tag == TypeTags.MAP) {
BLangSimpleVarRef constVarRef = ASTBuilderUtil.createVariableRef(constant.pos, constant.symbol);
constant.expr = rewrite(constant.expr, SymbolEnv.createTypeEnv(constant.typeNode,
pkgNode.initFunction.symbol.scope, env));
BLangInvocation frozenConstValExpr =
createLangLibInvocationNode(
"cloneReadOnly", constant.expr, new ArrayList<>(), constant.expr.type, constant.pos);
BLangAssignment constInit =
ASTBuilderUtil.createAssignmentStmt(constant.pos, constVarRef, frozenConstValExpr);
initFnBody.stmts.add(constInit);
}
}
pkgNode.globalVars.forEach(globalVar -> {
BLangAssignment assignment = createAssignmentStmt(globalVar);
if (assignment.expr != null) {
initFnBody.stmts.add(assignment);
}
});
pkgNode.services.forEach(service -> serviceDesugar.engageCustomServiceDesugar(service, env));
annotationDesugar.rewritePackageAnnotations(pkgNode, env);
addUserDefinedModuleInitInvocationAndReturn(pkgNode);
pkgNode.typeDefinitions.sort(Comparator.comparing(t -> t.precedence));
pkgNode.typeDefinitions = rewrite(pkgNode.typeDefinitions, env);
pkgNode.xmlnsList = rewrite(pkgNode.xmlnsList, env);
pkgNode.constants = rewrite(pkgNode.constants, env);
pkgNode.globalVars = rewrite(pkgNode.globalVars, env);
pkgNode.functions = rewrite(pkgNode.functions, env);
serviceDesugar.rewriteListeners(pkgNode.globalVars, env, pkgNode.startFunction, pkgNode.stopFunction);
ASTBuilderUtil.appendStatements(serviceAttachments, (BLangBlockFunctionBody) pkgNode.initFunction.body);
addNilReturnStatement((BLangBlockFunctionBody) pkgNode.startFunction.body);
addNilReturnStatement((BLangBlockFunctionBody) pkgNode.stopFunction.body);
pkgNode.initFunction = splitInitFunction(pkgNode, env);
pkgNode.initFunction = rewrite(pkgNode.initFunction, env);
pkgNode.startFunction = rewrite(pkgNode.startFunction, env);
pkgNode.stopFunction = rewrite(pkgNode.stopFunction, env);
closureDesugar.visit(pkgNode);
for (BLangTestablePackage testablePkg : pkgNode.getTestablePkgs()) {
rewrite(testablePkg, this.symTable.pkgEnvMap.get(testablePkg.symbol));
}
pkgNode.completedPhases.add(CompilerPhase.DESUGAR);
initFuncIndex = 0;
result = pkgNode;
}
@Override
public void visit(BLangImportPackage importPkgNode) {
BPackageSymbol pkgSymbol = importPkgNode.symbol;
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol);
rewrite(pkgEnv.node, pkgEnv);
result = importPkgNode;
}
@Override
public void visit(BLangTypeDefinition typeDef) {
if (typeDef.typeNode.getKind() == NodeKind.OBJECT_TYPE
|| typeDef.typeNode.getKind() == NodeKind.RECORD_TYPE) {
typeDef.typeNode = rewrite(typeDef.typeNode, env);
}
typeDef.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = typeDef;
}
@Override
public void visit(BLangObjectTypeNode objectTypeNode) {
objectTypeNode.fields.addAll(objectTypeNode.referencedFields);
if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) {
result = objectTypeNode;
return;
}
for (BLangSimpleVariable bLangSimpleVariable : objectTypeNode.fields) {
bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env);
}
Map<BSymbol, BLangStatement> initFuncStmts = objectTypeNode.generatedInitFunction.initFunctionStmts;
for (BLangSimpleVariable field : objectTypeNode.fields) {
if (!initFuncStmts.containsKey(field.symbol) && field.expr != null) {
initFuncStmts.put(field.symbol,
createStructFieldUpdate(objectTypeNode.generatedInitFunction, field,
objectTypeNode.generatedInitFunction.receiver.symbol));
}
}
BLangStatement[] initStmts = initFuncStmts.values().toArray(new BLangStatement[0]);
BLangBlockFunctionBody generatedInitFnBody =
(BLangBlockFunctionBody) objectTypeNode.generatedInitFunction.body;
int i;
for (i = 0; i < initStmts.length; i++) {
generatedInitFnBody.stmts.add(i, initStmts[i]);
}
if (objectTypeNode.initFunction != null) {
((BLangReturn) generatedInitFnBody.stmts.get(i)).expr =
createUserDefinedInitInvocation(objectTypeNode);
}
for (BLangFunction fn : objectTypeNode.functions) {
rewrite(fn, this.env);
}
rewrite(objectTypeNode.generatedInitFunction, this.env);
rewrite(objectTypeNode.initFunction, this.env);
result = objectTypeNode;
}
private BLangInvocation createUserDefinedInitInvocation(BLangObjectTypeNode objectTypeNode) {
ArrayList<BLangExpression> paramRefs = new ArrayList<>();
for (BLangSimpleVariable var : objectTypeNode.generatedInitFunction.requiredParams) {
paramRefs.add(ASTBuilderUtil.createVariableRef(objectTypeNode.pos, var.symbol));
}
BLangInvocation invocation = ASTBuilderUtil.createInvocationExprMethod(objectTypeNode.pos,
((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc.symbol,
paramRefs, Collections.emptyList(), symResolver);
if (objectTypeNode.generatedInitFunction.restParam != null) {
BLangSimpleVarRef restVarRef = ASTBuilderUtil.createVariableRef(objectTypeNode.pos,
objectTypeNode.generatedInitFunction.restParam.symbol);
BLangRestArgsExpression bLangRestArgsExpression = new BLangRestArgsExpression();
bLangRestArgsExpression.expr = restVarRef;
bLangRestArgsExpression.pos = objectTypeNode.generatedInitFunction.pos;
bLangRestArgsExpression.type = objectTypeNode.generatedInitFunction.restParam.type;
bLangRestArgsExpression.expectedType = bLangRestArgsExpression.type;
invocation.restArgs.add(bLangRestArgsExpression);
}
invocation.exprSymbol =
((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc.symbol.receiverSymbol;
return rewriteExpr(invocation);
}
@Override
public void visit(BLangRecordTypeNode recordTypeNode) {
recordTypeNode.fields.addAll(recordTypeNode.referencedFields);
for (BLangSimpleVariable bLangSimpleVariable : recordTypeNode.fields) {
bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env);
}
if (recordTypeNode.initFunction == null) {
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
env.enclPkg.addFunction(recordTypeNode.initFunction);
env.enclPkg.topLevelNodes.add(recordTypeNode.initFunction);
}
for (BLangSimpleVariable field : recordTypeNode.fields) {
if (!recordTypeNode.initFunction.initFunctionStmts.containsKey(field.symbol) &&
!Symbols.isOptional(field.symbol) && field.expr != null) {
recordTypeNode.initFunction.initFunctionStmts
.put(field.symbol, createStructFieldUpdate(recordTypeNode.initFunction, field,
recordTypeNode.initFunction.receiver.symbol));
}
}
BLangStatement[] initStmts = recordTypeNode.initFunction.initFunctionStmts
.values().toArray(new BLangStatement[0]);
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) recordTypeNode.initFunction.body;
for (int i = 0; i < recordTypeNode.initFunction.initFunctionStmts.size(); i++) {
initFnBody.stmts.add(i, initStmts[i]);
}
if (recordTypeNode.isAnonymous && recordTypeNode.isLocal) {
BLangUserDefinedType userDefinedType = desugarLocalAnonRecordTypeNode(recordTypeNode);
TypeDefBuilderHelper.addTypeDefinition(recordTypeNode.type, recordTypeNode.type.tsymbol, recordTypeNode,
env);
recordTypeNode.desugared = true;
result = userDefinedType;
return;
}
result = recordTypeNode;
}
private BLangUserDefinedType desugarLocalAnonRecordTypeNode(BLangRecordTypeNode recordTypeNode) {
return ASTBuilderUtil.createUserDefineTypeNode(recordTypeNode.symbol.name.value, recordTypeNode.type,
recordTypeNode.pos);
}
@Override
public void visit(BLangArrayType arrayType) {
arrayType.elemtype = rewrite(arrayType.elemtype, env);
result = arrayType;
}
@Override
public void visit(BLangConstrainedType constrainedType) {
constrainedType.constraint = rewrite(constrainedType.constraint, env);
result = constrainedType;
}
@Override
public void visit(BLangStreamType streamType) {
streamType.constraint = rewrite(streamType.constraint, env);
streamType.error = rewrite(streamType.error, env);
result = streamType;
}
@Override
public void visit(BLangValueType valueType) {
result = valueType;
}
@Override
public void visit(BLangUserDefinedType userDefinedType) {
result = userDefinedType;
}
@Override
public void visit(BLangUnionTypeNode unionTypeNode) {
List<BLangType> rewrittenMembers = new ArrayList<>();
unionTypeNode.memberTypeNodes.forEach(typeNode -> rewrittenMembers.add(rewrite(typeNode, env)));
unionTypeNode.memberTypeNodes = rewrittenMembers;
result = unionTypeNode;
}
@Override
public void visit(BLangErrorType errorType) {
errorType.detailType = rewrite(errorType.detailType, env);
result = errorType;
}
@Override
public void visit(BLangFunctionTypeNode functionTypeNode) {
functionTypeNode.params.forEach(param -> rewrite(param.typeNode, env));
functionTypeNode.returnTypeNode = rewrite(functionTypeNode.returnTypeNode, env);
result = functionTypeNode;
}
@Override
public void visit(BLangBuiltInRefTypeNode refTypeNode) {
result = refTypeNode;
}
@Override
public void visit(BLangTupleTypeNode tupleTypeNode) {
List<BLangType> rewrittenMembers = new ArrayList<>();
tupleTypeNode.memberTypeNodes.forEach(member -> rewrittenMembers.add(rewrite(member, env)));
tupleTypeNode.memberTypeNodes = rewrittenMembers;
tupleTypeNode.restParamType = rewrite(tupleTypeNode.restParamType, env);
result = tupleTypeNode;
}
@Override
public void visit(BLangBlockFunctionBody body) {
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(body.stmts, bodyEnv);
result = body;
}
@Override
public void visit(BLangExprFunctionBody exprBody) {
BLangBlockFunctionBody body = ASTBuilderUtil.createBlockFunctionBody(exprBody.pos, new ArrayList<>());
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(exprBody.pos, body);
returnStmt.expr = rewriteExpr(exprBody.expr);
result = body;
}
@Override
public void visit(BLangExternalFunctionBody body) {
for (BLangAnnotationAttachment attachment : body.annAttachments) {
rewrite(attachment, env);
}
result = body;
}
@Override | class Desugar extends BLangNodeVisitor {
private static final CompilerContext.Key<Desugar> DESUGAR_KEY =
new CompilerContext.Key<>();
private static final String QUERY_TABLE_WITH_JOIN_CLAUSE = "queryTableWithJoinClause";
private static final String QUERY_TABLE_WITHOUT_JOIN_CLAUSE = "queryTableWithoutJoinClause";
private static final String BASE_64 = "base64";
private static final String ERROR_REASON_FUNCTION_NAME = "reason";
private static final String ERROR_DETAIL_FUNCTION_NAME = "detail";
private static final String TO_STRING_FUNCTION_NAME = "toString";
private static final String LENGTH_FUNCTION_NAME = "length";
private static final String ERROR_REASON_NULL_REFERENCE_ERROR = "NullReferenceException";
private static final String CONSTRUCT_FROM = "constructFrom";
private static final String SLICE_LANGLIB_METHOD = "slice";
private static final String PUSH_LANGLIB_METHOD = "push";
private static final String DESUGARED_VARARG_KEY = "$vararg$";
public static final String XML_INTERNAL_SELECT_DESCENDANTS = "selectDescendants";
public static final String XML_INTERNAL_CHILDREN = "children";
public static final String XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT = "getFilteredChildrenFlat";
public static final String XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING = "getElementNameNilLifting";
public static final String XML_INTERNAL_GET_ATTRIBUTE = "getAttribute";
public static final String XML_INTERNAL_GET_ELEMENTS = "getElements";
private SymbolTable symTable;
private SymbolResolver symResolver;
private final SymbolEnter symbolEnter;
private ClosureDesugar closureDesugar;
private QueryDesugar queryDesugar;
private AnnotationDesugar annotationDesugar;
private Types types;
private Names names;
private ServiceDesugar serviceDesugar;
private BLangNode result;
private NodeCloner nodeCloner;
private SemanticAnalyzer semanticAnalyzer;
private BLangStatementLink currentLink;
public Stack<BLangLockStmt> enclLocks = new Stack<>();
private SymbolEnv env;
private int lambdaFunctionCount = 0;
private int transactionIndex = 0;
private int recordCount = 0;
private int errorCount = 0;
private int annonVarCount = 0;
private int initFuncIndex = 0;
private int indexExprCount = 0;
private int letCount = 0;
private int varargCount = 0;
private Stack<BLangMatch> matchStmtStack = new Stack<>();
Stack<BLangExpression> accessExprStack = new Stack<>();
private BLangMatchTypedBindingPatternClause successPattern;
private BLangAssignment safeNavigationAssignment;
static boolean isJvmTarget = false;
public static Desugar getInstance(CompilerContext context) {
Desugar desugar = context.get(DESUGAR_KEY);
if (desugar == null) {
desugar = new Desugar(context);
}
return desugar;
}
private Desugar(CompilerContext context) {
isJvmTarget = true;
context.put(DESUGAR_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.closureDesugar = ClosureDesugar.getInstance(context);
this.queryDesugar = QueryDesugar.getInstance(context);
this.annotationDesugar = AnnotationDesugar.getInstance(context);
this.types = Types.getInstance(context);
this.names = Names.getInstance(context);
this.names = Names.getInstance(context);
this.serviceDesugar = ServiceDesugar.getInstance(context);
this.nodeCloner = NodeCloner.getInstance(context);
this.semanticAnalyzer = SemanticAnalyzer.getInstance(context);
}
public BLangPackage perform(BLangPackage pkgNode) {
annotationDesugar.initializeAnnotationMap(pkgNode);
SymbolEnv env = this.symTable.pkgEnvMap.get(pkgNode.symbol);
return rewrite(pkgNode, env);
}
private void addAttachedFunctionsToPackageLevel(BLangPackage pkgNode, SymbolEnv env) {
for (BLangTypeDefinition typeDef : pkgNode.typeDefinitions) {
if (typeDef.typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
continue;
}
if (typeDef.symbol.tag == SymTag.OBJECT) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeDef.typeNode;
objectTypeNode.functions.forEach(f -> {
if (!pkgNode.objAttachedFunctions.contains(f.symbol)) {
pkgNode.functions.add(f);
pkgNode.topLevelNodes.add(f);
}
});
if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) {
continue;
}
BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(objectTypeNode, env);
tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,
tempGeneratedInitFunction.symbol.scope, env);
this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env);
objectTypeNode.generatedInitFunction = tempGeneratedInitFunction;
pkgNode.functions.add(objectTypeNode.generatedInitFunction);
pkgNode.topLevelNodes.add(objectTypeNode.generatedInitFunction);
if (objectTypeNode.initFunction != null) {
pkgNode.functions.add(objectTypeNode.initFunction);
pkgNode.topLevelNodes.add(objectTypeNode.initFunction);
}
} else if (typeDef.symbol.tag == SymTag.RECORD) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeDef.typeNode;
recordTypeNode.initFunction = rewrite(
TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
pkgNode.functions.add(recordTypeNode.initFunction);
pkgNode.topLevelNodes.add(recordTypeNode.initFunction);
}
}
}
private BLangFunction createGeneratedInitializerFunction(BLangObjectTypeNode objectTypeNode, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForObjectType(objectTypeNode, env);
if (objectTypeNode.initFunction == null) {
return generatedInitFunc;
}
BAttachedFunction initializerFunc = ((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc;
BAttachedFunction generatedInitializerFunc =
((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc;
addRequiredParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc,
generatedInitializerFunc);
addRestParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc, generatedInitializerFunc);
generatedInitFunc.returnTypeNode = objectTypeNode.initFunction.returnTypeNode;
generatedInitializerFunc.symbol.retType = generatedInitFunc.returnTypeNode.type;
((BInvokableType) generatedInitFunc.symbol.type).paramTypes = initializerFunc.type.paramTypes;
((BInvokableType) generatedInitFunc.symbol.type).retType = initializerFunc.type.retType;
((BInvokableType) generatedInitFunc.symbol.type).restType = initializerFunc.type.restType;
generatedInitializerFunc.type = initializerFunc.type;
generatedInitFunc.desugared = false;
return generatedInitFunc;
}
private void addRequiredParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc,
BAttachedFunction generatedInitializerFunc) {
if (initFunction.requiredParams.isEmpty()) {
return;
}
for (BLangSimpleVariable requiredParameter : initFunction.requiredParams) {
BLangSimpleVariable var =
ASTBuilderUtil.createVariable(initFunction.pos,
requiredParameter.name.getValue(), requiredParameter.type,
createRequiredParamExpr(requiredParameter.expr),
new BVarSymbol(0, names.fromString(requiredParameter.name.getValue()),
requiredParameter.symbol.pkgID,
requiredParameter.type, requiredParameter.symbol.owner));
generatedInitFunc.requiredParams.add(var);
generatedInitializerFunc.symbol.params.add(var.symbol);
}
}
private BLangExpression createRequiredParamExpr(BLangExpression expr) {
if (expr == null) {
return null;
}
if (expr.getKind() == NodeKind.LAMBDA) {
BLangFunction func = ((BLangLambdaFunction) expr).function;
return createLambdaFunction(func.pos, func.name.value, func.requiredParams, func.returnTypeNode, func.body);
}
BLangExpression expression = this.nodeCloner.clone(expr);
if (expression.getKind() == NodeKind.ARROW_EXPR) {
BLangIdentifier func = (BLangIdentifier) ((BLangArrowFunction) expression).functionName;
((BLangArrowFunction) expression).functionName = ASTBuilderUtil.createIdentifier(func.pos,
"$" + func.getValue() + "$");
}
return expression;
}
private void addRestParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc,
BAttachedFunction generatedInitializerFunc) {
if (initFunction.restParam == null) {
return;
}
BLangSimpleVariable restParam = initFunction.restParam;
generatedInitFunc.restParam =
ASTBuilderUtil.createVariable(initFunction.pos,
restParam.name.getValue(), restParam.type, null, new BVarSymbol(0,
names.fromString(restParam.name.getValue()), restParam.symbol.pkgID,
restParam.type, restParam.symbol.owner));
generatedInitializerFunc.symbol.restParam = generatedInitFunc.restParam.symbol;
}
/**
* Create package init functions.
*
* @param pkgNode package node
* @param env symbol environment of package
*/
private void createPackageInitFunctions(BLangPackage pkgNode, SymbolEnv env) {
String alias = pkgNode.symbol.pkgID.toString();
pkgNode.initFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
Names.INIT_FUNCTION_SUFFIX,
symTable);
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body;
for (BLangXMLNS xmlns : pkgNode.xmlnsList) {
initFnBody.addStatement(createNamespaceDeclrStatement(xmlns));
}
pkgNode.startFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
Names.START_FUNCTION_SUFFIX,
symTable);
pkgNode.stopFunction = ASTBuilderUtil.createInitFunctionWithNilReturn(pkgNode.pos, alias,
Names.STOP_FUNCTION_SUFFIX);
createInvokableSymbol(pkgNode.initFunction, env);
createInvokableSymbol(pkgNode.startFunction, env);
createInvokableSymbol(pkgNode.stopFunction, env);
}
private void addUserDefinedModuleInitInvocationAndReturn(BLangPackage pkgNode) {
Optional<BLangFunction> userDefInitOptional = pkgNode.functions.stream()
.filter(bLangFunction -> !bLangFunction.attachedFunction &&
bLangFunction.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value))
.findFirst();
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body;
if (!userDefInitOptional.isPresent()) {
addNilReturnStatement(initFnBody);
return;
}
BLangFunction userDefInit = userDefInitOptional.get();
BLangInvocation userDefInitInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
userDefInitInvocation.pos = pkgNode.initFunction.pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(userDefInit.name.value);
userDefInitInvocation.name = name;
userDefInitInvocation.symbol = userDefInit.symbol;
BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
pkgAlias.setLiteral(false);
pkgAlias.setValue(pkgNode.packageID.name.value);
userDefInitInvocation.pkgAlias = pkgAlias;
userDefInitInvocation.type = userDefInit.returnTypeNode.type;
userDefInitInvocation.requiredArgs = Collections.emptyList();
BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode();
returnStmt.pos = pkgNode.initFunction.pos;
returnStmt.expr = userDefInitInvocation;
initFnBody.stmts.add(returnStmt);
}
/**
* Create invokable symbol for function.
*
* @param bLangFunction function node
* @param env Symbol environment
*/
private void createInvokableSymbol(BLangFunction bLangFunction, SymbolEnv env) {
BType returnType = bLangFunction.returnTypeNode.type == null ?
symResolver.resolveTypeNode(bLangFunction.returnTypeNode, env) : bLangFunction.returnTypeNode.type;
BInvokableType invokableType = new BInvokableType(new ArrayList<>(), getRestType(bLangFunction),
returnType, null);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(bLangFunction.flagSet),
new Name(bLangFunction.name.value), env.enclPkg.packageID, invokableType, env.enclPkg.symbol, true);
functionSymbol.retType = returnType;
for (BLangVariable param : bLangFunction.requiredParams) {
functionSymbol.params.add(param.symbol);
}
functionSymbol.scope = new Scope(functionSymbol);
bLangFunction.symbol = functionSymbol;
}
/**
* Add nil return statement.
*
* @param bLangBlockStmt block statement node
*/
private void addNilReturnStatement(BlockNode bLangBlockStmt) {
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(((BLangNode) bLangBlockStmt).pos, symTable.nilType);
bLangBlockStmt.addStatement(returnStmt);
}
/**
* Create namespace declaration statement for XMNLNS.
*
* @param xmlns XMLNS node
* @return XMLNS statement
*/
private BLangXMLNSStatement createNamespaceDeclrStatement(BLangXMLNS xmlns) {
BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode();
xmlnsStmt.xmlnsDecl = xmlns;
xmlnsStmt.pos = xmlns.pos;
return xmlnsStmt;
}
@Override
public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.DESUGAR)) {
result = pkgNode;
return;
}
createPackageInitFunctions(pkgNode, env);
addAttachedFunctionsToPackageLevel(pkgNode, env);
pkgNode.constants.stream()
.filter(constant -> constant.expr.getKind() == NodeKind.LITERAL ||
constant.expr.getKind() == NodeKind.NUMERIC_LITERAL)
.forEach(constant -> pkgNode.typeDefinitions.add(constant.associatedTypeDefinition));
BLangBlockStmt serviceAttachments = serviceDesugar.rewriteServiceVariables(pkgNode.services, env);
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body;
for (BLangConstant constant : pkgNode.constants) {
if (constant.symbol.type.tag == TypeTags.MAP) {
BLangSimpleVarRef constVarRef = ASTBuilderUtil.createVariableRef(constant.pos, constant.symbol);
constant.expr = rewrite(constant.expr, SymbolEnv.createTypeEnv(constant.typeNode,
pkgNode.initFunction.symbol.scope, env));
BLangInvocation frozenConstValExpr =
createLangLibInvocationNode(
"cloneReadOnly", constant.expr, new ArrayList<>(), constant.expr.type, constant.pos);
BLangAssignment constInit =
ASTBuilderUtil.createAssignmentStmt(constant.pos, constVarRef, frozenConstValExpr);
initFnBody.stmts.add(constInit);
}
}
pkgNode.globalVars.forEach(globalVar -> {
BLangAssignment assignment = createAssignmentStmt(globalVar);
if (assignment.expr != null) {
initFnBody.stmts.add(assignment);
}
});
pkgNode.services.forEach(service -> serviceDesugar.engageCustomServiceDesugar(service, env));
annotationDesugar.rewritePackageAnnotations(pkgNode, env);
addUserDefinedModuleInitInvocationAndReturn(pkgNode);
pkgNode.typeDefinitions.sort(Comparator.comparing(t -> t.precedence));
pkgNode.typeDefinitions = rewrite(pkgNode.typeDefinitions, env);
pkgNode.xmlnsList = rewrite(pkgNode.xmlnsList, env);
pkgNode.constants = rewrite(pkgNode.constants, env);
pkgNode.globalVars = rewrite(pkgNode.globalVars, env);
pkgNode.functions = rewrite(pkgNode.functions, env);
serviceDesugar.rewriteListeners(pkgNode.globalVars, env, pkgNode.startFunction, pkgNode.stopFunction);
ASTBuilderUtil.appendStatements(serviceAttachments, (BLangBlockFunctionBody) pkgNode.initFunction.body);
addNilReturnStatement((BLangBlockFunctionBody) pkgNode.startFunction.body);
addNilReturnStatement((BLangBlockFunctionBody) pkgNode.stopFunction.body);
pkgNode.initFunction = splitInitFunction(pkgNode, env);
pkgNode.initFunction = rewrite(pkgNode.initFunction, env);
pkgNode.startFunction = rewrite(pkgNode.startFunction, env);
pkgNode.stopFunction = rewrite(pkgNode.stopFunction, env);
closureDesugar.visit(pkgNode);
for (BLangTestablePackage testablePkg : pkgNode.getTestablePkgs()) {
rewrite(testablePkg, this.symTable.pkgEnvMap.get(testablePkg.symbol));
}
pkgNode.completedPhases.add(CompilerPhase.DESUGAR);
initFuncIndex = 0;
result = pkgNode;
}
@Override
public void visit(BLangImportPackage importPkgNode) {
BPackageSymbol pkgSymbol = importPkgNode.symbol;
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol);
rewrite(pkgEnv.node, pkgEnv);
result = importPkgNode;
}
@Override
public void visit(BLangTypeDefinition typeDef) {
if (typeDef.typeNode.getKind() == NodeKind.OBJECT_TYPE
|| typeDef.typeNode.getKind() == NodeKind.RECORD_TYPE) {
typeDef.typeNode = rewrite(typeDef.typeNode, env);
}
typeDef.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = typeDef;
}
@Override
public void visit(BLangObjectTypeNode objectTypeNode) {
objectTypeNode.fields.addAll(objectTypeNode.referencedFields);
if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) {
result = objectTypeNode;
return;
}
for (BLangSimpleVariable bLangSimpleVariable : objectTypeNode.fields) {
bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env);
}
Map<BSymbol, BLangStatement> initFuncStmts = objectTypeNode.generatedInitFunction.initFunctionStmts;
for (BLangSimpleVariable field : objectTypeNode.fields) {
if (!initFuncStmts.containsKey(field.symbol) && field.expr != null) {
initFuncStmts.put(field.symbol,
createStructFieldUpdate(objectTypeNode.generatedInitFunction, field,
objectTypeNode.generatedInitFunction.receiver.symbol));
}
}
BLangStatement[] initStmts = initFuncStmts.values().toArray(new BLangStatement[0]);
BLangBlockFunctionBody generatedInitFnBody =
(BLangBlockFunctionBody) objectTypeNode.generatedInitFunction.body;
int i;
for (i = 0; i < initStmts.length; i++) {
generatedInitFnBody.stmts.add(i, initStmts[i]);
}
if (objectTypeNode.initFunction != null) {
((BLangReturn) generatedInitFnBody.stmts.get(i)).expr =
createUserDefinedInitInvocation(objectTypeNode);
}
for (BLangFunction fn : objectTypeNode.functions) {
rewrite(fn, this.env);
}
rewrite(objectTypeNode.generatedInitFunction, this.env);
rewrite(objectTypeNode.initFunction, this.env);
result = objectTypeNode;
}
private BLangInvocation createUserDefinedInitInvocation(BLangObjectTypeNode objectTypeNode) {
ArrayList<BLangExpression> paramRefs = new ArrayList<>();
for (BLangSimpleVariable var : objectTypeNode.generatedInitFunction.requiredParams) {
paramRefs.add(ASTBuilderUtil.createVariableRef(objectTypeNode.pos, var.symbol));
}
BLangInvocation invocation = ASTBuilderUtil.createInvocationExprMethod(objectTypeNode.pos,
((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc.symbol,
paramRefs, Collections.emptyList(), symResolver);
if (objectTypeNode.generatedInitFunction.restParam != null) {
BLangSimpleVarRef restVarRef = ASTBuilderUtil.createVariableRef(objectTypeNode.pos,
objectTypeNode.generatedInitFunction.restParam.symbol);
BLangRestArgsExpression bLangRestArgsExpression = new BLangRestArgsExpression();
bLangRestArgsExpression.expr = restVarRef;
bLangRestArgsExpression.pos = objectTypeNode.generatedInitFunction.pos;
bLangRestArgsExpression.type = objectTypeNode.generatedInitFunction.restParam.type;
bLangRestArgsExpression.expectedType = bLangRestArgsExpression.type;
invocation.restArgs.add(bLangRestArgsExpression);
}
invocation.exprSymbol =
((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc.symbol.receiverSymbol;
return rewriteExpr(invocation);
}
@Override
public void visit(BLangRecordTypeNode recordTypeNode) {
recordTypeNode.fields.addAll(recordTypeNode.referencedFields);
for (BLangSimpleVariable bLangSimpleVariable : recordTypeNode.fields) {
bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env);
}
if (recordTypeNode.initFunction == null) {
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
env.enclPkg.addFunction(recordTypeNode.initFunction);
env.enclPkg.topLevelNodes.add(recordTypeNode.initFunction);
}
for (BLangSimpleVariable field : recordTypeNode.fields) {
if (!recordTypeNode.initFunction.initFunctionStmts.containsKey(field.symbol) &&
!Symbols.isOptional(field.symbol) && field.expr != null) {
recordTypeNode.initFunction.initFunctionStmts
.put(field.symbol, createStructFieldUpdate(recordTypeNode.initFunction, field,
recordTypeNode.initFunction.receiver.symbol));
}
}
BLangStatement[] initStmts = recordTypeNode.initFunction.initFunctionStmts
.values().toArray(new BLangStatement[0]);
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) recordTypeNode.initFunction.body;
for (int i = 0; i < recordTypeNode.initFunction.initFunctionStmts.size(); i++) {
initFnBody.stmts.add(i, initStmts[i]);
}
if (recordTypeNode.isAnonymous && recordTypeNode.isLocal) {
BLangUserDefinedType userDefinedType = desugarLocalAnonRecordTypeNode(recordTypeNode);
TypeDefBuilderHelper.addTypeDefinition(recordTypeNode.type, recordTypeNode.type.tsymbol, recordTypeNode,
env);
recordTypeNode.desugared = true;
result = userDefinedType;
return;
}
result = recordTypeNode;
}
private BLangUserDefinedType desugarLocalAnonRecordTypeNode(BLangRecordTypeNode recordTypeNode) {
return ASTBuilderUtil.createUserDefineTypeNode(recordTypeNode.symbol.name.value, recordTypeNode.type,
recordTypeNode.pos);
}
@Override
public void visit(BLangArrayType arrayType) {
arrayType.elemtype = rewrite(arrayType.elemtype, env);
result = arrayType;
}
@Override
public void visit(BLangConstrainedType constrainedType) {
constrainedType.constraint = rewrite(constrainedType.constraint, env);
result = constrainedType;
}
@Override
public void visit(BLangStreamType streamType) {
streamType.constraint = rewrite(streamType.constraint, env);
streamType.error = rewrite(streamType.error, env);
result = streamType;
}
@Override
public void visit(BLangValueType valueType) {
result = valueType;
}
@Override
public void visit(BLangUserDefinedType userDefinedType) {
result = userDefinedType;
}
@Override
public void visit(BLangUnionTypeNode unionTypeNode) {
List<BLangType> rewrittenMembers = new ArrayList<>();
unionTypeNode.memberTypeNodes.forEach(typeNode -> rewrittenMembers.add(rewrite(typeNode, env)));
unionTypeNode.memberTypeNodes = rewrittenMembers;
result = unionTypeNode;
}
@Override
public void visit(BLangErrorType errorType) {
errorType.detailType = rewrite(errorType.detailType, env);
result = errorType;
}
@Override
public void visit(BLangFunctionTypeNode functionTypeNode) {
functionTypeNode.params.forEach(param -> rewrite(param.typeNode, env));
functionTypeNode.returnTypeNode = rewrite(functionTypeNode.returnTypeNode, env);
result = functionTypeNode;
}
@Override
public void visit(BLangBuiltInRefTypeNode refTypeNode) {
result = refTypeNode;
}
@Override
public void visit(BLangTupleTypeNode tupleTypeNode) {
List<BLangType> rewrittenMembers = new ArrayList<>();
tupleTypeNode.memberTypeNodes.forEach(member -> rewrittenMembers.add(rewrite(member, env)));
tupleTypeNode.memberTypeNodes = rewrittenMembers;
tupleTypeNode.restParamType = rewrite(tupleTypeNode.restParamType, env);
result = tupleTypeNode;
}
@Override
public void visit(BLangBlockFunctionBody body) {
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(body.stmts, bodyEnv);
result = body;
}
@Override
public void visit(BLangExprFunctionBody exprBody) {
BLangBlockFunctionBody body = ASTBuilderUtil.createBlockFunctionBody(exprBody.pos, new ArrayList<>());
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(exprBody.pos, body);
returnStmt.expr = rewriteExpr(exprBody.expr);
result = body;
}
@Override
public void visit(BLangExternalFunctionBody body) {
for (BLangAnnotationAttachment attachment : body.annAttachments) {
rewrite(attachment, env);
}
result = body;
}
@Override |
Yeah I wasn't sure how this architecture was intended to work. I feel like we should have a consistent way to set up the configuration but we do not yet. So again I opted to preserve the existing behavior as much as possible. | public void stop() {
if (oldSystemProps != null) {
for (Map.Entry<String, String> e : oldSystemProps.entrySet()) {
if (e.getValue() == null) {
System.clearProperty(e.getKey());
} else {
System.setProperty(e.getKey(), e.getValue());
}
}
}
oldSystemProps = null;
for (QuarkusTestResourceLifecycleManager testResource : testResources) {
try {
testResource.stop();
} catch (Exception e) {
throw new RuntimeException("Unable to stop Quarkus test resource " + testResource, e);
}
}
ConfigProviderResolver cpr = ConfigProviderResolver.instance();
try {
cpr.releaseConfig(cpr.getConfig());
} catch (IllegalStateException ignored) {
}
} | cpr.releaseConfig(cpr.getConfig()); | public void stop() {
if (oldSystemProps != null) {
for (Map.Entry<String, String> e : oldSystemProps.entrySet()) {
if (e.getValue() == null) {
System.clearProperty(e.getKey());
} else {
System.setProperty(e.getKey(), e.getValue());
}
}
}
oldSystemProps = null;
for (QuarkusTestResourceLifecycleManager testResource : testResources) {
try {
testResource.stop();
} catch (Exception e) {
throw new RuntimeException("Unable to stop Quarkus test resource " + testResource, e);
}
}
ConfigProviderResolver cpr = ConfigProviderResolver.instance();
try {
cpr.releaseConfig(cpr.getConfig());
} catch (IllegalStateException ignored) {
}
} | class TestResourceManager {
private final List<QuarkusTestResourceLifecycleManager> testResources;
private Map<String, String> oldSystemProps;
public TestResourceManager(Class<?> testClass) {
testResources = getTestResources(testClass);
}
public Map<String, String> start() {
Map<String, String> ret = new HashMap<>();
for (QuarkusTestResourceLifecycleManager testResource : testResources) {
try {
ret.putAll(testResource.start());
} catch (Exception e) {
throw new RuntimeException("Unable to start Quarkus test resource " + testResource, e);
}
}
oldSystemProps = new HashMap<>();
for (Map.Entry<String, String> i : ret.entrySet()) {
oldSystemProps.put(i.getKey(), System.getProperty(i.getKey()));
if (i.getValue() == null) {
System.clearProperty(i.getKey());
} else {
System.setProperty(i.getKey(), i.getValue());
}
}
return ret;
}
public void inject(Object testInstance) {
for (QuarkusTestResourceLifecycleManager testResource : testResources) {
testResource.inject(testInstance);
}
}
@SuppressWarnings("unchecked")
private List<QuarkusTestResourceLifecycleManager> getTestResources(Class<?> testClass) {
IndexView index = indexTestClasses(testClass);
Set<Class<? extends QuarkusTestResourceLifecycleManager>> testResourceRunnerClasses = new LinkedHashSet<>();
Set<AnnotationInstance> testResourceAnnotations = new HashSet<>();
testResourceAnnotations.addAll(index.getAnnotations(DotName.createSimple(QuarkusTestResource.class.getName())));
for (AnnotationInstance annotation : index
.getAnnotations(DotName.createSimple(QuarkusTestResource.List.class.getName()))) {
Collections.addAll(testResourceAnnotations, annotation.value().asNestedArray());
}
for (AnnotationInstance annotation : testResourceAnnotations) {
try {
testResourceRunnerClasses.add((Class<? extends QuarkusTestResourceLifecycleManager>) Class
.forName(annotation.value().asString()));
} catch (ClassNotFoundException e) {
throw new RuntimeException("Unable to find the class for the test resource " + annotation.value().asString());
}
}
List<QuarkusTestResourceLifecycleManager> testResourceRunners = new ArrayList<>();
for (Class<? extends QuarkusTestResourceLifecycleManager> testResourceRunnerClass : testResourceRunnerClasses) {
try {
testResourceRunners.add(testResourceRunnerClass.getConstructor().newInstance());
} catch (InstantiationException | IllegalAccessException | IllegalArgumentException
| InvocationTargetException | NoSuchMethodException | SecurityException e) {
throw new RuntimeException("Unable to instantiate the test resource " + testResourceRunnerClass);
}
}
for (QuarkusTestResourceLifecycleManager quarkusTestResourceLifecycleManager : ServiceLoader
.load(QuarkusTestResourceLifecycleManager.class)) {
testResourceRunners.add(quarkusTestResourceLifecycleManager);
}
Collections.sort(testResourceRunners, new QuarkusTestResourceLifecycleManagerComparator());
return testResourceRunners;
}
private IndexView indexTestClasses(Class<?> testClass) {
final Indexer indexer = new Indexer();
final Path testClassesLocation = getTestClassesLocation(testClass);
try {
if (Files.isDirectory(testClassesLocation)) {
indexTestClassesDir(indexer, testClassesLocation);
} else {
try (FileSystem jarFs = FileSystems.newFileSystem(testClassesLocation, null)) {
for (Path p : jarFs.getRootDirectories()) {
indexTestClassesDir(indexer, p);
}
}
}
} catch (IOException e) {
throw new RuntimeException("Unable to index the test-classes/ directory.", e);
}
return indexer.complete();
}
private void indexTestClassesDir(Indexer indexer, final Path testClassesLocation) throws IOException {
Files.walkFileTree(testClassesLocation, new FileVisitor<Path>() {
@Override
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs)
throws IOException {
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (!file.toString().endsWith(".class")) {
return FileVisitResult.CONTINUE;
}
try (InputStream inputStream = Files.newInputStream(file, StandardOpenOption.READ)) {
indexer.index(inputStream);
} catch (Exception e) {
}
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
return FileVisitResult.CONTINUE;
}
});
}
} | class TestResourceManager {
private final List<QuarkusTestResourceLifecycleManager> testResources;
private Map<String, String> oldSystemProps;
public TestResourceManager(Class<?> testClass) {
testResources = getTestResources(testClass);
}
public Map<String, String> start() {
Map<String, String> ret = new HashMap<>();
for (QuarkusTestResourceLifecycleManager testResource : testResources) {
try {
ret.putAll(testResource.start());
} catch (Exception e) {
throw new RuntimeException("Unable to start Quarkus test resource " + testResource, e);
}
}
oldSystemProps = new HashMap<>();
for (Map.Entry<String, String> i : ret.entrySet()) {
oldSystemProps.put(i.getKey(), System.getProperty(i.getKey()));
if (i.getValue() == null) {
System.clearProperty(i.getKey());
} else {
System.setProperty(i.getKey(), i.getValue());
}
}
return ret;
}
public void inject(Object testInstance) {
for (QuarkusTestResourceLifecycleManager testResource : testResources) {
testResource.inject(testInstance);
}
}
@SuppressWarnings("unchecked")
private List<QuarkusTestResourceLifecycleManager> getTestResources(Class<?> testClass) {
IndexView index = indexTestClasses(testClass);
Set<Class<? extends QuarkusTestResourceLifecycleManager>> testResourceRunnerClasses = new LinkedHashSet<>();
Set<AnnotationInstance> testResourceAnnotations = new HashSet<>();
testResourceAnnotations.addAll(index.getAnnotations(DotName.createSimple(QuarkusTestResource.class.getName())));
for (AnnotationInstance annotation : index
.getAnnotations(DotName.createSimple(QuarkusTestResource.List.class.getName()))) {
Collections.addAll(testResourceAnnotations, annotation.value().asNestedArray());
}
for (AnnotationInstance annotation : testResourceAnnotations) {
try {
testResourceRunnerClasses.add((Class<? extends QuarkusTestResourceLifecycleManager>) Class
.forName(annotation.value().asString()));
} catch (ClassNotFoundException e) {
throw new RuntimeException("Unable to find the class for the test resource " + annotation.value().asString());
}
}
List<QuarkusTestResourceLifecycleManager> testResourceRunners = new ArrayList<>();
for (Class<? extends QuarkusTestResourceLifecycleManager> testResourceRunnerClass : testResourceRunnerClasses) {
try {
testResourceRunners.add(testResourceRunnerClass.getConstructor().newInstance());
} catch (InstantiationException | IllegalAccessException | IllegalArgumentException
| InvocationTargetException | NoSuchMethodException | SecurityException e) {
throw new RuntimeException("Unable to instantiate the test resource " + testResourceRunnerClass);
}
}
for (QuarkusTestResourceLifecycleManager quarkusTestResourceLifecycleManager : ServiceLoader
.load(QuarkusTestResourceLifecycleManager.class)) {
testResourceRunners.add(quarkusTestResourceLifecycleManager);
}
Collections.sort(testResourceRunners, new QuarkusTestResourceLifecycleManagerComparator());
return testResourceRunners;
}
private IndexView indexTestClasses(Class<?> testClass) {
final Indexer indexer = new Indexer();
final Path testClassesLocation = getTestClassesLocation(testClass);
try {
if (Files.isDirectory(testClassesLocation)) {
indexTestClassesDir(indexer, testClassesLocation);
} else {
try (FileSystem jarFs = FileSystems.newFileSystem(testClassesLocation, null)) {
for (Path p : jarFs.getRootDirectories()) {
indexTestClassesDir(indexer, p);
}
}
}
} catch (IOException e) {
throw new RuntimeException("Unable to index the test-classes/ directory.", e);
}
return indexer.complete();
}
private void indexTestClassesDir(Indexer indexer, final Path testClassesLocation) throws IOException {
Files.walkFileTree(testClassesLocation, new FileVisitor<Path>() {
@Override
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs)
throws IOException {
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (!file.toString().endsWith(".class")) {
return FileVisitResult.CONTINUE;
}
try (InputStream inputStream = Files.newInputStream(file, StandardOpenOption.READ)) {
indexer.index(inputStream);
} catch (Exception e) {
}
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
return FileVisitResult.CONTINUE;
}
});
}
} |
Just curious, was there a case when it actually was a `String`? Which option was that? | public String[] getOptionValues(String name) {
final Object o = options.get(name);
return o == null ? null : o instanceof String ? new String[] { o.toString() } : (String[]) o;
} | return o == null ? null : o instanceof String ? new String[] { o.toString() } : (String[]) o; | public String[] getOptionValues(String name) {
final Object o = options.get(name);
return o == null ? null : o instanceof String ? new String[] { o.toString() } : (String[]) o;
} | class BootstrapMavenOptions {
public static Map<String, Object> parse(String cmdLine) {
if (cmdLine == null) {
return Collections.emptyMap();
}
final String[] args = cmdLine.split("\\s+");
if (args.length == 0) {
return Collections.emptyMap();
}
final String mavenHome = PropertyUtils.getProperty("maven.home");
if (mavenHome == null) {
return invokeParser(Thread.currentThread().getContextClassLoader(), args);
}
final Path mvnLib = Paths.get(mavenHome).resolve("lib");
if (!Files.exists(mvnLib)) {
throw new IllegalStateException("Maven lib dir does not exist: " + mvnLib);
}
final URL[] urls;
try (Stream<Path> files = Files.list(mvnLib)) {
final List<URL> list = files.map(p -> {
try {
return p.toUri().toURL();
} catch (MalformedURLException e) {
throw new IllegalStateException("Failed to translate " + p + " to URL", e);
}
}).collect(Collectors.toCollection(ArrayList::new));
list.add(getClassOrigin(BootstrapMavenOptions.class).toUri().toURL());
urls = list.toArray(new URL[list.size()]);
} catch (Exception e) {
throw new IllegalStateException("Failed to create a URL list out of " + mvnLib + " content", e);
}
final ClassLoader originalCl = Thread.currentThread().getContextClassLoader();
try (URLClassLoader ucl = new URLClassLoader(urls, null)) {
Thread.currentThread().setContextClassLoader(ucl);
return invokeParser(ucl, args);
} catch (IOException e) {
throw new IllegalStateException("Failed to close URL classloader", e);
} finally {
Thread.currentThread().setContextClassLoader(originalCl);
}
}
public static BootstrapMavenOptions newInstance(String cmdLine) {
return new BootstrapMavenOptions(parse(cmdLine));
}
private final Map<String, Object> options;
private BootstrapMavenOptions(Map<String, Object> options) {
this.options = options;
}
public boolean hasOption(String name) {
return options.containsKey(name);
}
public String getOptionValue(String name) {
final Object o = options.get(name);
return o == null ? null : o.toString();
}
public boolean isEmpty() {
return options.isEmpty();
}
@SuppressWarnings("unchecked")
private static Map<String, Object> invokeParser(ClassLoader cl, String[] args) {
try {
final Class<?> parserCls = cl.loadClass("io.quarkus.bootstrap.resolver.maven.options.BootstrapMavenOptionsParser");
final Method parseMethod = parserCls.getMethod("parse", String[].class);
return (Map<String, Object>) parseMethod.invoke(null, (Object) args);
} catch (Exception e) {
throw new IllegalStateException("Failed to parse command line arguments " + Arrays.asList(args), e);
}
}
/**
* Returns the JAR or the root directory that contains the class file that is on the
* classpath of the context classloader
*/
public static Path getClassOrigin(Class<?> cls) throws IOException {
return getResourceOrigin(cls.getClassLoader(), cls.getName().replace('.', '/') + ".class");
}
public static Path getResourceOrigin(ClassLoader cl, final String name) throws IOException {
URL url = cl.getResource(name);
if (url == null) {
throw new IOException("Failed to locate the origin of " + name);
}
String classLocation = url.toExternalForm();
if (url.getProtocol().equals("jar")) {
classLocation = classLocation.substring(4, classLocation.length() - name.length() - 2);
} else {
classLocation = classLocation.substring(0, classLocation.length() - name.length());
}
return urlSpecToPath(classLocation);
}
private static Path urlSpecToPath(String urlSpec) throws IOException {
try {
return Paths.get(new URL(urlSpec).toURI());
} catch (Throwable e) {
throw new IOException(
"Failed to create an instance of " + Path.class.getName() + " from " + urlSpec, e);
}
}
} | class BootstrapMavenOptions {
public static Map<String, Object> parse(String cmdLine) {
if (cmdLine == null) {
return Collections.emptyMap();
}
final String[] args = cmdLine.split("\\s+");
if (args.length == 0) {
return Collections.emptyMap();
}
final String mavenHome = PropertyUtils.getProperty("maven.home");
if (mavenHome == null) {
return invokeParser(Thread.currentThread().getContextClassLoader(), args);
}
final Path mvnLib = Paths.get(mavenHome).resolve("lib");
if (!Files.exists(mvnLib)) {
throw new IllegalStateException("Maven lib dir does not exist: " + mvnLib);
}
final URL[] urls;
try (Stream<Path> files = Files.list(mvnLib)) {
final List<URL> list = files.map(p -> {
try {
return p.toUri().toURL();
} catch (MalformedURLException e) {
throw new IllegalStateException("Failed to translate " + p + " to URL", e);
}
}).collect(Collectors.toCollection(ArrayList::new));
list.add(getClassOrigin(BootstrapMavenOptions.class).toUri().toURL());
urls = list.toArray(new URL[list.size()]);
} catch (Exception e) {
throw new IllegalStateException("Failed to create a URL list out of " + mvnLib + " content", e);
}
final ClassLoader originalCl = Thread.currentThread().getContextClassLoader();
try (URLClassLoader ucl = new URLClassLoader(urls, null)) {
Thread.currentThread().setContextClassLoader(ucl);
return invokeParser(ucl, args);
} catch (IOException e) {
throw new IllegalStateException("Failed to close URL classloader", e);
} finally {
Thread.currentThread().setContextClassLoader(originalCl);
}
}
public static BootstrapMavenOptions newInstance(String cmdLine) {
return new BootstrapMavenOptions(parse(cmdLine));
}
private final Map<String, Object> options;
private BootstrapMavenOptions(Map<String, Object> options) {
this.options = options;
}
public boolean hasOption(String name) {
return options.containsKey(name);
}
public String getOptionValue(String name) {
final Object o = options.get(name);
return o == null ? null : o.toString();
}
public boolean isEmpty() {
return options.isEmpty();
}
@SuppressWarnings("unchecked")
private static Map<String, Object> invokeParser(ClassLoader cl, String[] args) {
try {
final Class<?> parserCls = cl.loadClass("io.quarkus.bootstrap.resolver.maven.options.BootstrapMavenOptionsParser");
final Method parseMethod = parserCls.getMethod("parse", String[].class);
return (Map<String, Object>) parseMethod.invoke(null, (Object) args);
} catch (Exception e) {
throw new IllegalStateException("Failed to parse command line arguments " + Arrays.asList(args), e);
}
}
/**
* Returns the JAR or the root directory that contains the class file that is on the
* classpath of the context classloader
*/
public static Path getClassOrigin(Class<?> cls) throws IOException {
return getResourceOrigin(cls.getClassLoader(), cls.getName().replace('.', '/') + ".class");
}
public static Path getResourceOrigin(ClassLoader cl, final String name) throws IOException {
URL url = cl.getResource(name);
if (url == null) {
throw new IOException("Failed to locate the origin of " + name);
}
String classLocation = url.toExternalForm();
if (url.getProtocol().equals("jar")) {
classLocation = classLocation.substring(4, classLocation.length() - name.length() - 2);
} else {
classLocation = classLocation.substring(0, classLocation.length() - name.length());
}
return urlSpecToPath(classLocation);
}
private static Path urlSpecToPath(String urlSpec) throws IOException {
try {
return Paths.get(new URL(urlSpec).toURI());
} catch (Throwable e) {
throw new IOException(
"Failed to create an instance of " + Path.class.getName() + " from " + urlSpec, e);
}
}
} |
We should use logger here and at other places in this class. It is available from TestSuiteBase, which gets it from DocumentClientTest | public void beforeClass() throws Exception {
System.out.println("OrderbyDocumentQueryTest.beforeClass");
client = clientBuilder().build();
createdDatabase = getSharedCosmosDatabase(client);
createdCollection = getSharedMultiPartitionCosmosContainer(client);
System.out.println("bef: truncate collection");
truncateCollection(createdCollection);
System.out.println("after: truncate collection");
List<Map<String, Object>> keyValuePropsList = new ArrayList<>();
Map<String, Object> props;
System.out.println("bef: create 30 docs");
for(int i = 0; i < 30; i++) {
props = new HashMap<>();
props.put("propInt", i);
props.put("propStr", String.valueOf(i));
keyValuePropsList.add(props);
}
System.out.println("OrderbyDocumentQueryTest.beforeClass : created 30 docs");
props = new HashMap<>();
keyValuePropsList.add(props);
createdDocuments = bulkInsert(createdCollection, keyValuePropsList);
for(int i = 0; i < 10; i++) {
Map<String, Object> p = new HashMap<>();
p.put("propScopedPartitionInt", i);
CosmosItemProperties doc = getDocumentDefinition("duplicateParitionKeyValue", UUID.randomUUID().toString(), p);
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
options.partitionKey(new PartitionKey(doc.get("mypk")));
createdDocuments.add(createDocument(createdCollection, doc).read(options).block().properties());
}
System.out.println("OrderbyDocumentQueryTest.beforeClass : created 10 docs");
numberOfPartitions = CosmosBridgeInternal.getAsyncDocumentClient(client)
.readPartitionKeyRanges("dbs/" + createdDatabase.id() + "/colls/" + createdCollection.id(), null)
.flatMap(p -> Flux.fromIterable(p.results())).collectList().single().block().size();
System.out.println("numberOfPartitions = " + numberOfPartitions);
waitIfNeededForReplicasToCatchUp(clientBuilder());
} | System.out.println("OrderbyDocumentQueryTest.beforeClass"); | public void beforeClass() throws Exception {
client = clientBuilder().build();
createdDatabase = getSharedCosmosDatabase(client);
createdCollection = getSharedMultiPartitionCosmosContainer(client);
truncateCollection(createdCollection);
List<Map<String, Object>> keyValuePropsList = new ArrayList<>();
Map<String, Object> props;
for(int i = 0; i < 30; i++) {
props = new HashMap<>();
props.put("propInt", i);
props.put("propStr", String.valueOf(i));
keyValuePropsList.add(props);
}
props = new HashMap<>();
keyValuePropsList.add(props);
createdDocuments = bulkInsert(createdCollection, keyValuePropsList);
for(int i = 0; i < 10; i++) {
Map<String, Object> p = new HashMap<>();
p.put("propScopedPartitionInt", i);
CosmosItemProperties doc = getDocumentDefinition("duplicateParitionKeyValue", UUID.randomUUID().toString(), p);
CosmosItemRequestOptions options = new CosmosItemRequestOptions();
options.partitionKey(new PartitionKey(doc.get("mypk")));
createdDocuments.add(createDocument(createdCollection, doc).read(options).block().properties());
}
numberOfPartitions = CosmosBridgeInternal.getAsyncDocumentClient(client)
.readPartitionKeyRanges("dbs/" + createdDatabase.id() + "/colls/" + createdCollection.id(), null)
.flatMap(p -> Flux.fromIterable(p.results())).collectList().single().block().size();
waitIfNeededForReplicasToCatchUp(clientBuilder());
} | class OrderbyDocumentQueryTest extends TestSuiteBase {
private final double minQueryRequestChargePerPartition = 2.0;
private CosmosClient client;
private CosmosContainer createdCollection;
private CosmosDatabase createdDatabase;
private List<CosmosItemProperties> createdDocuments = new ArrayList<>();
private int numberOfPartitions;
@Factory(dataProvider = "clientBuildersWithDirect")
public OrderbyDocumentQueryTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "simple" }, timeOut = TIMEOUT, dataProvider = "queryMetricsArgProvider")
public void queryDocumentsValidateContent(boolean qmEnabled) throws Exception {
CosmosItemProperties expectedDocument = createdDocuments.get(0);
String query = String.format("SELECT * from root r where r.propStr = '%s'"
+ " ORDER BY r.propInt"
, expectedDocument.getString("propStr"));
FeedOptions options = new FeedOptions();
options.enableCrossPartitionQuery(true);
options.populateQueryMetrics(qmEnabled);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options);
List<String> expectedResourceIds = new ArrayList<>();
expectedResourceIds.add(expectedDocument.resourceId());
Map<String, ResourceValidator<CosmosItemProperties>> resourceIDToValidator = new HashMap<>();
resourceIDToValidator.put(expectedDocument.resourceId(),
new ResourceValidator.Builder<CosmosItemProperties>().areEqual(expectedDocument).build());
FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator.Builder<CosmosItemProperties>()
.numberOfPages(1)
.containsExactly(expectedResourceIds)
.validateAllResources(resourceIDToValidator)
.totalRequestChargeIsAtLeast(numberOfPartitions * minQueryRequestChargePerPartition)
.allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>().hasRequestChargeHeader().build())
.hasValidQueryMetrics(qmEnabled)
.build();
validateQuerySuccess(queryObservable, validator);
}
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void queryDocuments_NoResults() throws Exception {
String query = "SELECT * from root r where r.id = '2' ORDER BY r.propInt";
FeedOptions options = new FeedOptions();
options.enableCrossPartitionQuery(true);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options);
FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator.Builder<CosmosItemProperties>()
.containsExactly(new ArrayList<>())
.numberOfPages(1)
.totalRequestChargeIsAtLeast(numberOfPartitions * minQueryRequestChargePerPartition)
.allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>()
.hasRequestChargeHeader().build())
.build();
validateQuerySuccess(queryObservable, validator);
}
@DataProvider(name = "sortOrder")
public Object[][] sortOrder() {
return new Object[][] { { "ASC" }, {"DESC"} };
}
@Test(groups = { "simple" }, timeOut = TIMEOUT, dataProvider = "sortOrder")
public void queryOrderBy(String sortOrder) throws Exception {
String query = String.format("SELECT * FROM r ORDER BY r.propInt %s", sortOrder);
FeedOptions options = new FeedOptions();
options.enableCrossPartitionQuery(true);
int pageSize = 3;
options.maxItemCount(pageSize);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options);
Comparator<Integer> validatorComparator = Comparator.nullsFirst(Comparator.<Integer>naturalOrder());
List<String> expectedResourceIds = sortDocumentsAndCollectResourceIds("propInt", d -> d.getInt("propInt"), validatorComparator);
if ("DESC".equals(sortOrder)) {
Collections.reverse(expectedResourceIds);
}
int expectedPageSize = expectedNumberOfPages(expectedResourceIds.size(), pageSize);
FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator.Builder<CosmosItemProperties>()
.containsExactly(expectedResourceIds)
.numberOfPages(expectedPageSize)
.allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>()
.hasRequestChargeHeader().build())
.totalRequestChargeIsAtLeast(numberOfPartitions * minQueryRequestChargePerPartition)
.build();
validateQuerySuccess(queryObservable, validator);
}
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void queryOrderByInt() throws Exception {
String query = "SELECT * FROM r ORDER BY r.propInt";
FeedOptions options = new FeedOptions();
options.enableCrossPartitionQuery(true);
int pageSize = 3;
options.maxItemCount(pageSize);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options);
Comparator<Integer> validatorComparator = Comparator.nullsFirst(Comparator.<Integer>naturalOrder());
List<String> expectedResourceIds = sortDocumentsAndCollectResourceIds("propInt", d -> d.getInt("propInt"), validatorComparator);
int expectedPageSize = expectedNumberOfPages(expectedResourceIds.size(), pageSize);
FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator.Builder<CosmosItemProperties>()
.containsExactly(expectedResourceIds)
.numberOfPages(expectedPageSize)
.allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>()
.hasRequestChargeHeader().build())
.totalRequestChargeIsAtLeast(numberOfPartitions * minQueryRequestChargePerPartition)
.build();
validateQuerySuccess(queryObservable, validator);
}
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void queryOrderByString() throws Exception {
String query = "SELECT * FROM r ORDER BY r.propStr";
FeedOptions options = new FeedOptions();
options.enableCrossPartitionQuery(true);
int pageSize = 3;
options.maxItemCount(pageSize);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options);
Comparator<String> validatorComparator = Comparator.nullsFirst(Comparator.<String>naturalOrder());
List<String> expectedResourceIds = sortDocumentsAndCollectResourceIds("propStr", d -> d.getString("propStr"), validatorComparator);
int expectedPageSize = expectedNumberOfPages(expectedResourceIds.size(), pageSize);
FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator.Builder<CosmosItemProperties>()
.containsExactly(expectedResourceIds)
.numberOfPages(expectedPageSize)
.allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>()
.hasRequestChargeHeader().build())
.totalRequestChargeIsAtLeast(numberOfPartitions * minQueryRequestChargePerPartition)
.build();
validateQuerySuccess(queryObservable, validator);
}
@DataProvider(name = "topValue")
public Object[][] topValueParameter() {
return new Object[][] { { 0 }, { 1 }, { 5 }, { createdDocuments.size() - 1 }, { createdDocuments.size() },
{ createdDocuments.size() + 1 }, { 2 * createdDocuments.size() } };
}
@Test(groups = { "simple" }, timeOut = TIMEOUT, dataProvider = "topValue")
public void queryOrderWithTop(int topValue) throws Exception {
String query = String.format("SELECT TOP %d * FROM r ORDER BY r.propInt", topValue);
FeedOptions options = new FeedOptions();
options.enableCrossPartitionQuery(true);
int pageSize = 3;
options.maxItemCount(pageSize);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options);
Comparator<Integer> validatorComparator = Comparator.nullsFirst(Comparator.<Integer>naturalOrder());
List<String> expectedResourceIds =
sortDocumentsAndCollectResourceIds("propInt", d -> d.getInt("propInt"), validatorComparator)
.stream().limit(topValue).collect(Collectors.toList());
int expectedPageSize = expectedNumberOfPages(expectedResourceIds.size(), pageSize);
FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator.Builder<CosmosItemProperties>()
.containsExactly(expectedResourceIds)
.numberOfPages(expectedPageSize)
.allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>()
.hasRequestChargeHeader().build())
.totalRequestChargeIsAtLeast(numberOfPartitions * (topValue > 0 ? minQueryRequestChargePerPartition : 1))
.build();
validateQuerySuccess(queryObservable, validator);
}
private <T> List<String> sortDocumentsAndCollectResourceIds(String propName, Function<CosmosItemProperties, T> extractProp, Comparator<T> comparer) {
return createdDocuments.stream()
.filter(d -> d.getMap().containsKey(propName))
.sorted((d1, d2) -> comparer.compare(extractProp.apply(d1), extractProp.apply(d2)))
.map(Resource::resourceId).collect(Collectors.toList());
}
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void queryScopedToSinglePartition_StartWithContinuationToken() throws Exception {
String query = "SELECT * FROM r ORDER BY r.propScopedPartitionInt ASC";
FeedOptions options = new FeedOptions();
options.partitionKey(new PartitionKey("duplicateParitionKeyValue"));
options.maxItemCount(3);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options);
TestSubscriber<FeedResponse<CosmosItemProperties>> subscriber = new TestSubscriber<>();
queryObservable.take(1).subscribe(subscriber);
subscriber.awaitTerminalEvent();
subscriber.assertComplete();
subscriber.assertNoErrors();
assertThat(subscriber.valueCount()).isEqualTo(1);
FeedResponse<CosmosItemProperties> page = (FeedResponse<CosmosItemProperties>) subscriber.getEvents().get(0).get(0);
assertThat(page.results()).hasSize(3);
assertThat(page.continuationToken()).isNotEmpty();
options.requestContinuation(page.continuationToken());
queryObservable = createdCollection.queryItems(query, options);
List<CosmosItemProperties> expectedDocs = createdDocuments.stream()
.filter(d -> (StringUtils.equals("duplicateParitionKeyValue", d.getString("mypk"))))
.filter(d -> (d.getInt("propScopedPartitionInt") > 2)).collect(Collectors.toList());
int expectedPageSize = (expectedDocs.size() + options.maxItemCount() - 1) / options.maxItemCount();
assertThat(expectedDocs).hasSize(10 - 3);
FeedResponseListValidator<CosmosItemProperties> validator = null;
validator = new FeedResponseListValidator.Builder<CosmosItemProperties>()
.containsExactly(expectedDocs.stream()
.sorted((e1, e2) -> Integer.compare(e1.getInt("propScopedPartitionInt"), e2.getInt("propScopedPartitionInt")))
.map(d -> d.resourceId()).collect(Collectors.toList()))
.numberOfPages(expectedPageSize)
.allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>()
.requestChargeGreaterThanOrEqualTo(1.0).build())
.build();
validateQuerySuccess(queryObservable, validator);
}
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void orderByContinuationTokenRoundTrip() throws Exception {
{
OrderByContinuationToken orderByContinuationToken = new OrderByContinuationToken(
new CompositeContinuationToken(
"asdf",
new Range<String>("A", "D", false, true)),
new QueryItem[] {new QueryItem("{\"item\" : 42}")},
"rid",
false);
String serialized = orderByContinuationToken.toString();
ValueHolder<OrderByContinuationToken> outOrderByContinuationToken = new ValueHolder<OrderByContinuationToken>();
assertThat(OrderByContinuationToken.tryParse(serialized, outOrderByContinuationToken)).isTrue();
OrderByContinuationToken deserialized = outOrderByContinuationToken.v;
CompositeContinuationToken compositeContinuationToken = deserialized.getCompositeContinuationToken();
String token = compositeContinuationToken.getToken();
Range<String> range = compositeContinuationToken.getRange();
assertThat(token).isEqualTo("asdf");
assertThat(range.getMin()).isEqualTo("A");
assertThat(range.getMax()).isEqualTo("D");
assertThat(range.isMinInclusive()).isEqualTo(false);
assertThat(range.isMaxInclusive()).isEqualTo(true);
QueryItem[] orderByItems = deserialized.getOrderByItems();
assertThat(orderByItems).isNotNull();
assertThat(orderByItems.length).isEqualTo(1);
assertThat(orderByItems[0].getItem()).isEqualTo(42);
String rid = deserialized.getRid();
assertThat(rid).isEqualTo("rid");
boolean inclusive = deserialized.getInclusive();
assertThat(inclusive).isEqualTo(false);
}
{
ValueHolder<OrderByContinuationToken> outOrderByContinuationToken = new ValueHolder<OrderByContinuationToken>();
assertThat(OrderByContinuationToken.tryParse("{\"property\" : \"Not a valid Order By Token\"}", outOrderByContinuationToken)).isFalse();
}
}
@Test(groups = { "simple" }, timeOut = TIMEOUT * 10, dataProvider = "sortOrder",
retryAnalyzer = RetryAnalyzer.class)
public void queryDocumentsWithOrderByContinuationTokensInteger(String sortOrder) throws Exception {
String query = String.format("SELECT * FROM c ORDER BY c.propInt %s", sortOrder);
Comparator<Integer> order = sortOrder.equals("ASC")?Comparator.naturalOrder():Comparator.reverseOrder();
Comparator<Integer> validatorComparator = Comparator.nullsFirst(order);
List<String> expectedResourceIds = sortDocumentsAndCollectResourceIds("propInt", d -> d.getInt("propInt"), validatorComparator);
this.queryWithContinuationTokensAndPageSizes(query, new int[] { 1, 5, 10, 100}, expectedResourceIds);
}
@Test(groups = { "simple" }, timeOut = TIMEOUT * 10, dataProvider = "sortOrder")
public void queryDocumentsWithOrderByContinuationTokensString(String sortOrder) throws Exception {
String query = String.format("SELECT * FROM c ORDER BY c.id %s", sortOrder);
Comparator<String> order = sortOrder.equals("ASC")?Comparator.naturalOrder():Comparator.reverseOrder();
Comparator<String> validatorComparator = Comparator.nullsFirst(order);
List<String> expectedResourceIds = sortDocumentsAndCollectResourceIds("id", d -> d.getString("id"), validatorComparator);
this.queryWithContinuationTokensAndPageSizes(query, new int[] { 1, 5, 10, 100 }, expectedResourceIds);
}
@Test(groups = { "simple" }, timeOut = TIMEOUT * 10, dataProvider = "sortOrder")
public void queryDocumentsWithInvalidOrderByContinuationTokensString(String sortOrder) throws Exception {
String query = String.format("SELECT * FROM c ORDER BY c.id %s", sortOrder);
Comparator<String> validatorComparator;
if(sortOrder.equals("ASC")) {
validatorComparator = Comparator.nullsFirst(Comparator.<String>naturalOrder());
}else{
validatorComparator = Comparator.nullsFirst(Comparator.<String>reverseOrder());
}
List<String> expectedResourceIds = sortDocumentsAndCollectResourceIds("id", d -> d.getString("id"), validatorComparator);
this.assertInvalidContinuationToken(query, new int[] { 1, 5, 10, 100 }, expectedResourceIds);
}
public CosmosItemProperties createDocument(CosmosContainer cosmosContainer, Map<String, Object> keyValueProps)
throws CosmosClientException {
CosmosItemProperties docDefinition = getDocumentDefinition(keyValueProps);
return cosmosContainer.createItem(docDefinition).block().properties();
}
public List<CosmosItemProperties> bulkInsert(CosmosContainer cosmosContainer, List<Map<String, Object>> keyValuePropsList) {
ArrayList<CosmosItemProperties> result = new ArrayList<CosmosItemProperties>();
for(Map<String, Object> keyValueProps: keyValuePropsList) {
CosmosItemProperties docDefinition = getDocumentDefinition(keyValueProps);
result.add(docDefinition);
}
return bulkInsertBlocking(cosmosContainer, result);
}
@BeforeMethod(groups = { "simple" })
public void beforeMethod() throws Exception {
TimeUnit.SECONDS.sleep(10);
}
@BeforeClass(groups = { "simple" }, timeOut = SETUP_TIMEOUT)
@AfterClass(groups = { "simple" }, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void assertInvalidContinuationToken(String query, int[] pageSize, List<String> expectedIds) {
String requestContinuation = null;
do {
FeedOptions options = new FeedOptions();
options.maxItemCount(1);
options.enableCrossPartitionQuery(true);
options.maxDegreeOfParallelism(2);
OrderByContinuationToken orderByContinuationToken = new OrderByContinuationToken(
new CompositeContinuationToken(
"asdf",
new Range<String>("A", "D", false, true)),
new QueryItem[] {new QueryItem("{\"item\" : 42}")},
"rid",
false);
options.requestContinuation(orderByContinuationToken.toString());
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query,
options);
TestSubscriber<FeedResponse<CosmosItemProperties>> testSubscriber = new TestSubscriber<>();
queryObservable.subscribe(testSubscriber);
testSubscriber.awaitTerminalEvent(TIMEOUT, TimeUnit.MILLISECONDS);
testSubscriber.assertError(CosmosClientException.class);
} while (requestContinuation != null);
}
private void queryWithContinuationTokensAndPageSizes(String query, int[] pageSizes, List<String> expectedIds) {
for (int pageSize : pageSizes) {
List<CosmosItemProperties> receivedDocuments = this.queryWithContinuationTokens(query, pageSize);
List<String> actualIds = new ArrayList<String>();
for (CosmosItemProperties document : receivedDocuments) {
actualIds.add(document.resourceId());
}
assertThat(actualIds).containsExactlyElementsOf(expectedIds);
}
}
private List<CosmosItemProperties> queryWithContinuationTokens(String query, int pageSize) {
String requestContinuation = null;
List<String> continuationTokens = new ArrayList<String>();
List<CosmosItemProperties> receivedDocuments = new ArrayList<CosmosItemProperties>();
do {
FeedOptions options = new FeedOptions();
options.maxItemCount(pageSize);
options.enableCrossPartitionQuery(true);
options.maxDegreeOfParallelism(2);
options.requestContinuation(requestContinuation);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query,
options);
TestSubscriber<FeedResponse<CosmosItemProperties>> testSubscriber = new TestSubscriber<>();
queryObservable.subscribe(testSubscriber);
testSubscriber.awaitTerminalEvent(TIMEOUT, TimeUnit.MILLISECONDS);
testSubscriber.assertNoErrors();
testSubscriber.assertComplete();
FeedResponse<CosmosItemProperties> firstPage = (FeedResponse<CosmosItemProperties>) testSubscriber.getEvents().get(0).get(0);
requestContinuation = firstPage.continuationToken();
receivedDocuments.addAll(firstPage.results());
continuationTokens.add(requestContinuation);
} while (requestContinuation != null);
return receivedDocuments;
}
private static CosmosItemProperties getDocumentDefinition(String partitionKey, String id, Map<String, Object> keyValuePair) {
StringBuilder sb = new StringBuilder();
sb.append("{\n");
for(String key: keyValuePair.keySet()) {
Object val = keyValuePair.get(key);
sb.append(" ");
sb.append("\"").append(key).append("\"").append(" :" );
if (val == null) {
sb.append("null");
} else {
sb.append(toJson(val));
}
sb.append(",\n");
}
sb.append(String.format(" \"id\": \"%s\",\n", id));
sb.append(String.format(" \"mypk\": \"%s\"\n", partitionKey));
sb.append("}");
return new CosmosItemProperties(sb.toString());
}
private static CosmosItemProperties getDocumentDefinition(Map<String, Object> keyValuePair) {
String uuid = UUID.randomUUID().toString();
return getDocumentDefinition(uuid, uuid, keyValuePair);
}
private static String toJson(Object object){
try {
return Utils.getSimpleObjectMapper().writeValueAsString(object);
} catch (JsonProcessingException e) {
throw new IllegalStateException(e);
}
}
} | class OrderbyDocumentQueryTest extends TestSuiteBase {
private final double minQueryRequestChargePerPartition = 2.0;
private CosmosClient client;
private CosmosContainer createdCollection;
private CosmosDatabase createdDatabase;
private List<CosmosItemProperties> createdDocuments = new ArrayList<>();
private int numberOfPartitions;
@Factory(dataProvider = "clientBuildersWithDirect")
public OrderbyDocumentQueryTest(CosmosClientBuilder clientBuilder) {
super(clientBuilder);
}
@Test(groups = { "simple" }, timeOut = TIMEOUT, dataProvider = "queryMetricsArgProvider")
public void queryDocumentsValidateContent(boolean qmEnabled) throws Exception {
CosmosItemProperties expectedDocument = createdDocuments.get(0);
String query = String.format("SELECT * from root r where r.propStr = '%s'"
+ " ORDER BY r.propInt"
, expectedDocument.getString("propStr"));
FeedOptions options = new FeedOptions();
options.enableCrossPartitionQuery(true);
options.populateQueryMetrics(qmEnabled);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options);
List<String> expectedResourceIds = new ArrayList<>();
expectedResourceIds.add(expectedDocument.resourceId());
Map<String, ResourceValidator<CosmosItemProperties>> resourceIDToValidator = new HashMap<>();
resourceIDToValidator.put(expectedDocument.resourceId(),
new ResourceValidator.Builder<CosmosItemProperties>().areEqual(expectedDocument).build());
FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator.Builder<CosmosItemProperties>()
.numberOfPages(1)
.containsExactly(expectedResourceIds)
.validateAllResources(resourceIDToValidator)
.totalRequestChargeIsAtLeast(numberOfPartitions * minQueryRequestChargePerPartition)
.allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>().hasRequestChargeHeader().build())
.hasValidQueryMetrics(qmEnabled)
.build();
validateQuerySuccess(queryObservable, validator);
}
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void queryDocuments_NoResults() throws Exception {
String query = "SELECT * from root r where r.id = '2' ORDER BY r.propInt";
FeedOptions options = new FeedOptions();
options.enableCrossPartitionQuery(true);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options);
FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator.Builder<CosmosItemProperties>()
.containsExactly(new ArrayList<>())
.numberOfPages(1)
.totalRequestChargeIsAtLeast(numberOfPartitions * minQueryRequestChargePerPartition)
.allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>()
.hasRequestChargeHeader().build())
.build();
validateQuerySuccess(queryObservable, validator);
}
@DataProvider(name = "sortOrder")
public Object[][] sortOrder() {
return new Object[][] { { "ASC" }, {"DESC"} };
}
@Test(groups = { "simple" }, timeOut = TIMEOUT, dataProvider = "sortOrder")
public void queryOrderBy(String sortOrder) throws Exception {
String query = String.format("SELECT * FROM r ORDER BY r.propInt %s", sortOrder);
FeedOptions options = new FeedOptions();
options.enableCrossPartitionQuery(true);
int pageSize = 3;
options.maxItemCount(pageSize);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options);
Comparator<Integer> validatorComparator = Comparator.nullsFirst(Comparator.<Integer>naturalOrder());
List<String> expectedResourceIds = sortDocumentsAndCollectResourceIds("propInt", d -> d.getInt("propInt"), validatorComparator);
if ("DESC".equals(sortOrder)) {
Collections.reverse(expectedResourceIds);
}
int expectedPageSize = expectedNumberOfPages(expectedResourceIds.size(), pageSize);
FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator.Builder<CosmosItemProperties>()
.containsExactly(expectedResourceIds)
.numberOfPages(expectedPageSize)
.allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>()
.hasRequestChargeHeader().build())
.totalRequestChargeIsAtLeast(numberOfPartitions * minQueryRequestChargePerPartition)
.build();
validateQuerySuccess(queryObservable, validator);
}
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void queryOrderByInt() throws Exception {
String query = "SELECT * FROM r ORDER BY r.propInt";
FeedOptions options = new FeedOptions();
options.enableCrossPartitionQuery(true);
int pageSize = 3;
options.maxItemCount(pageSize);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options);
Comparator<Integer> validatorComparator = Comparator.nullsFirst(Comparator.<Integer>naturalOrder());
List<String> expectedResourceIds = sortDocumentsAndCollectResourceIds("propInt", d -> d.getInt("propInt"), validatorComparator);
int expectedPageSize = expectedNumberOfPages(expectedResourceIds.size(), pageSize);
FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator.Builder<CosmosItemProperties>()
.containsExactly(expectedResourceIds)
.numberOfPages(expectedPageSize)
.allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>()
.hasRequestChargeHeader().build())
.totalRequestChargeIsAtLeast(numberOfPartitions * minQueryRequestChargePerPartition)
.build();
validateQuerySuccess(queryObservable, validator);
}
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void queryOrderByString() throws Exception {
String query = "SELECT * FROM r ORDER BY r.propStr";
FeedOptions options = new FeedOptions();
options.enableCrossPartitionQuery(true);
int pageSize = 3;
options.maxItemCount(pageSize);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options);
Comparator<String> validatorComparator = Comparator.nullsFirst(Comparator.<String>naturalOrder());
List<String> expectedResourceIds = sortDocumentsAndCollectResourceIds("propStr", d -> d.getString("propStr"), validatorComparator);
int expectedPageSize = expectedNumberOfPages(expectedResourceIds.size(), pageSize);
FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator.Builder<CosmosItemProperties>()
.containsExactly(expectedResourceIds)
.numberOfPages(expectedPageSize)
.allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>()
.hasRequestChargeHeader().build())
.totalRequestChargeIsAtLeast(numberOfPartitions * minQueryRequestChargePerPartition)
.build();
validateQuerySuccess(queryObservable, validator);
}
@DataProvider(name = "topValue")
public Object[][] topValueParameter() {
return new Object[][] { { 0 }, { 1 }, { 5 }, { createdDocuments.size() - 1 }, { createdDocuments.size() },
{ createdDocuments.size() + 1 }, { 2 * createdDocuments.size() } };
}
@Test(groups = { "simple" }, timeOut = TIMEOUT, dataProvider = "topValue")
public void queryOrderWithTop(int topValue) throws Exception {
String query = String.format("SELECT TOP %d * FROM r ORDER BY r.propInt", topValue);
FeedOptions options = new FeedOptions();
options.enableCrossPartitionQuery(true);
int pageSize = 3;
options.maxItemCount(pageSize);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options);
Comparator<Integer> validatorComparator = Comparator.nullsFirst(Comparator.<Integer>naturalOrder());
List<String> expectedResourceIds =
sortDocumentsAndCollectResourceIds("propInt", d -> d.getInt("propInt"), validatorComparator)
.stream().limit(topValue).collect(Collectors.toList());
int expectedPageSize = expectedNumberOfPages(expectedResourceIds.size(), pageSize);
FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator.Builder<CosmosItemProperties>()
.containsExactly(expectedResourceIds)
.numberOfPages(expectedPageSize)
.allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>()
.hasRequestChargeHeader().build())
.totalRequestChargeIsAtLeast(numberOfPartitions * (topValue > 0 ? minQueryRequestChargePerPartition : 1))
.build();
validateQuerySuccess(queryObservable, validator);
}
private <T> List<String> sortDocumentsAndCollectResourceIds(String propName, Function<CosmosItemProperties, T> extractProp, Comparator<T> comparer) {
return createdDocuments.stream()
.filter(d -> d.getMap().containsKey(propName))
.sorted((d1, d2) -> comparer.compare(extractProp.apply(d1), extractProp.apply(d2)))
.map(Resource::resourceId).collect(Collectors.toList());
}
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void queryScopedToSinglePartition_StartWithContinuationToken() throws Exception {
String query = "SELECT * FROM r ORDER BY r.propScopedPartitionInt ASC";
FeedOptions options = new FeedOptions();
options.partitionKey(new PartitionKey("duplicateParitionKeyValue"));
options.maxItemCount(3);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options);
TestSubscriber<FeedResponse<CosmosItemProperties>> subscriber = new TestSubscriber<>();
queryObservable.take(1).subscribe(subscriber);
subscriber.awaitTerminalEvent();
subscriber.assertComplete();
subscriber.assertNoErrors();
assertThat(subscriber.valueCount()).isEqualTo(1);
FeedResponse<CosmosItemProperties> page = (FeedResponse<CosmosItemProperties>) subscriber.getEvents().get(0).get(0);
assertThat(page.results()).hasSize(3);
assertThat(page.continuationToken()).isNotEmpty();
options.requestContinuation(page.continuationToken());
queryObservable = createdCollection.queryItems(query, options);
List<CosmosItemProperties> expectedDocs = createdDocuments.stream()
.filter(d -> (StringUtils.equals("duplicateParitionKeyValue", d.getString("mypk"))))
.filter(d -> (d.getInt("propScopedPartitionInt") > 2)).collect(Collectors.toList());
int expectedPageSize = (expectedDocs.size() + options.maxItemCount() - 1) / options.maxItemCount();
assertThat(expectedDocs).hasSize(10 - 3);
FeedResponseListValidator<CosmosItemProperties> validator = null;
validator = new FeedResponseListValidator.Builder<CosmosItemProperties>()
.containsExactly(expectedDocs.stream()
.sorted((e1, e2) -> Integer.compare(e1.getInt("propScopedPartitionInt"), e2.getInt("propScopedPartitionInt")))
.map(d -> d.resourceId()).collect(Collectors.toList()))
.numberOfPages(expectedPageSize)
.allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>()
.requestChargeGreaterThanOrEqualTo(1.0).build())
.build();
validateQuerySuccess(queryObservable, validator);
}
@Test(groups = { "simple" }, timeOut = TIMEOUT)
public void orderByContinuationTokenRoundTrip() throws Exception {
{
OrderByContinuationToken orderByContinuationToken = new OrderByContinuationToken(
new CompositeContinuationToken(
"asdf",
new Range<String>("A", "D", false, true)),
new QueryItem[] {new QueryItem("{\"item\" : 42}")},
"rid",
false);
String serialized = orderByContinuationToken.toString();
ValueHolder<OrderByContinuationToken> outOrderByContinuationToken = new ValueHolder<OrderByContinuationToken>();
assertThat(OrderByContinuationToken.tryParse(serialized, outOrderByContinuationToken)).isTrue();
OrderByContinuationToken deserialized = outOrderByContinuationToken.v;
CompositeContinuationToken compositeContinuationToken = deserialized.getCompositeContinuationToken();
String token = compositeContinuationToken.getToken();
Range<String> range = compositeContinuationToken.getRange();
assertThat(token).isEqualTo("asdf");
assertThat(range.getMin()).isEqualTo("A");
assertThat(range.getMax()).isEqualTo("D");
assertThat(range.isMinInclusive()).isEqualTo(false);
assertThat(range.isMaxInclusive()).isEqualTo(true);
QueryItem[] orderByItems = deserialized.getOrderByItems();
assertThat(orderByItems).isNotNull();
assertThat(orderByItems.length).isEqualTo(1);
assertThat(orderByItems[0].getItem()).isEqualTo(42);
String rid = deserialized.getRid();
assertThat(rid).isEqualTo("rid");
boolean inclusive = deserialized.getInclusive();
assertThat(inclusive).isEqualTo(false);
}
{
ValueHolder<OrderByContinuationToken> outOrderByContinuationToken = new ValueHolder<OrderByContinuationToken>();
assertThat(OrderByContinuationToken.tryParse("{\"property\" : \"Not a valid Order By Token\"}", outOrderByContinuationToken)).isFalse();
}
}
@Test(groups = { "simple" }, timeOut = TIMEOUT * 10, dataProvider = "sortOrder",
retryAnalyzer = RetryAnalyzer.class)
public void queryDocumentsWithOrderByContinuationTokensInteger(String sortOrder) throws Exception {
String query = String.format("SELECT * FROM c ORDER BY c.propInt %s", sortOrder);
Comparator<Integer> order = sortOrder.equals("ASC")?Comparator.naturalOrder():Comparator.reverseOrder();
Comparator<Integer> validatorComparator = Comparator.nullsFirst(order);
List<String> expectedResourceIds = sortDocumentsAndCollectResourceIds("propInt", d -> d.getInt("propInt"), validatorComparator);
this.queryWithContinuationTokensAndPageSizes(query, new int[] { 1, 5, 10, 100}, expectedResourceIds);
}
@Test(groups = { "simple" }, timeOut = TIMEOUT * 10, dataProvider = "sortOrder")
public void queryDocumentsWithOrderByContinuationTokensString(String sortOrder) throws Exception {
String query = String.format("SELECT * FROM c ORDER BY c.id %s", sortOrder);
Comparator<String> order = sortOrder.equals("ASC")?Comparator.naturalOrder():Comparator.reverseOrder();
Comparator<String> validatorComparator = Comparator.nullsFirst(order);
List<String> expectedResourceIds = sortDocumentsAndCollectResourceIds("id", d -> d.getString("id"), validatorComparator);
this.queryWithContinuationTokensAndPageSizes(query, new int[] { 1, 5, 10, 100 }, expectedResourceIds);
}
@Test(groups = { "simple" }, timeOut = TIMEOUT * 10, dataProvider = "sortOrder")
public void queryDocumentsWithInvalidOrderByContinuationTokensString(String sortOrder) throws Exception {
String query = String.format("SELECT * FROM c ORDER BY c.id %s", sortOrder);
Comparator<String> validatorComparator;
if(sortOrder.equals("ASC")) {
validatorComparator = Comparator.nullsFirst(Comparator.<String>naturalOrder());
}else{
validatorComparator = Comparator.nullsFirst(Comparator.<String>reverseOrder());
}
List<String> expectedResourceIds = sortDocumentsAndCollectResourceIds("id", d -> d.getString("id"), validatorComparator);
this.assertInvalidContinuationToken(query, new int[] { 1, 5, 10, 100 }, expectedResourceIds);
}
public CosmosItemProperties createDocument(CosmosContainer cosmosContainer, Map<String, Object> keyValueProps)
throws CosmosClientException {
CosmosItemProperties docDefinition = getDocumentDefinition(keyValueProps);
return cosmosContainer.createItem(docDefinition).block().properties();
}
public List<CosmosItemProperties> bulkInsert(CosmosContainer cosmosContainer, List<Map<String, Object>> keyValuePropsList) {
ArrayList<CosmosItemProperties> result = new ArrayList<CosmosItemProperties>();
for(Map<String, Object> keyValueProps: keyValuePropsList) {
CosmosItemProperties docDefinition = getDocumentDefinition(keyValueProps);
result.add(docDefinition);
}
return bulkInsertBlocking(cosmosContainer, result);
}
@BeforeMethod(groups = { "simple" })
public void beforeMethod() throws Exception {
TimeUnit.SECONDS.sleep(10);
}
@BeforeClass(groups = { "simple" }, timeOut = SETUP_TIMEOUT)
@AfterClass(groups = { "simple" }, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
safeClose(client);
}
private void assertInvalidContinuationToken(String query, int[] pageSize, List<String> expectedIds) {
String requestContinuation = null;
do {
FeedOptions options = new FeedOptions();
options.maxItemCount(1);
options.enableCrossPartitionQuery(true);
options.maxDegreeOfParallelism(2);
OrderByContinuationToken orderByContinuationToken = new OrderByContinuationToken(
new CompositeContinuationToken(
"asdf",
new Range<String>("A", "D", false, true)),
new QueryItem[] {new QueryItem("{\"item\" : 42}")},
"rid",
false);
options.requestContinuation(orderByContinuationToken.toString());
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query,
options);
TestSubscriber<FeedResponse<CosmosItemProperties>> testSubscriber = new TestSubscriber<>();
queryObservable.subscribe(testSubscriber);
testSubscriber.awaitTerminalEvent(TIMEOUT, TimeUnit.MILLISECONDS);
testSubscriber.assertError(CosmosClientException.class);
} while (requestContinuation != null);
}
private void queryWithContinuationTokensAndPageSizes(String query, int[] pageSizes, List<String> expectedIds) {
for (int pageSize : pageSizes) {
List<CosmosItemProperties> receivedDocuments = this.queryWithContinuationTokens(query, pageSize);
List<String> actualIds = new ArrayList<String>();
for (CosmosItemProperties document : receivedDocuments) {
actualIds.add(document.resourceId());
}
assertThat(actualIds).containsExactlyElementsOf(expectedIds);
}
}
private List<CosmosItemProperties> queryWithContinuationTokens(String query, int pageSize) {
String requestContinuation = null;
List<String> continuationTokens = new ArrayList<String>();
List<CosmosItemProperties> receivedDocuments = new ArrayList<CosmosItemProperties>();
do {
FeedOptions options = new FeedOptions();
options.maxItemCount(pageSize);
options.enableCrossPartitionQuery(true);
options.maxDegreeOfParallelism(2);
options.requestContinuation(requestContinuation);
Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query,
options);
TestSubscriber<FeedResponse<CosmosItemProperties>> testSubscriber = new TestSubscriber<>();
queryObservable.subscribe(testSubscriber);
testSubscriber.awaitTerminalEvent(TIMEOUT, TimeUnit.MILLISECONDS);
testSubscriber.assertNoErrors();
testSubscriber.assertComplete();
FeedResponse<CosmosItemProperties> firstPage = (FeedResponse<CosmosItemProperties>) testSubscriber.getEvents().get(0).get(0);
requestContinuation = firstPage.continuationToken();
receivedDocuments.addAll(firstPage.results());
continuationTokens.add(requestContinuation);
} while (requestContinuation != null);
return receivedDocuments;
}
private static CosmosItemProperties getDocumentDefinition(String partitionKey, String id, Map<String, Object> keyValuePair) {
StringBuilder sb = new StringBuilder();
sb.append("{\n");
for(String key: keyValuePair.keySet()) {
Object val = keyValuePair.get(key);
sb.append(" ");
sb.append("\"").append(key).append("\"").append(" :" );
if (val == null) {
sb.append("null");
} else {
sb.append(toJson(val));
}
sb.append(",\n");
}
sb.append(String.format(" \"id\": \"%s\",\n", id));
sb.append(String.format(" \"mypk\": \"%s\"\n", partitionKey));
sb.append("}");
return new CosmosItemProperties(sb.toString());
}
private static CosmosItemProperties getDocumentDefinition(Map<String, Object> keyValuePair) {
String uuid = UUID.randomUUID().toString();
return getDocumentDefinition(uuid, uuid, keyValuePair);
}
private static String toJson(Object object){
try {
return Utils.getSimpleObjectMapper().writeValueAsString(object);
} catch (JsonProcessingException e) {
throw new IllegalStateException(e);
}
}
} |
Please use `Entry` instead of `Map.Entry` | public void init(final ShardingSphereMetaData metaData, final SQLStatement sqlStatement) {
resource = metaData.getResource();
dataSourcePropsMap = new LinkedHashMap<>(metaData.getResource().getDataSources().size(), 1);
for (Map.Entry<String, DataSource> entry : metaData.getResource().getDataSources().entrySet()) {
dataSourcePropsMap.put(entry.getKey(), DataSourcePropertiesCreator.create(entry.getValue()));
}
dataSourceNames = dataSourcePropsMap.keySet().iterator();
} | for (Map.Entry<String, DataSource> entry : metaData.getResource().getDataSources().entrySet()) { | public void init(final ShardingSphereMetaData metaData, final SQLStatement sqlStatement) {
resource = metaData.getResource();
dataSourcePropsMap = new LinkedHashMap<>(metaData.getResource().getDataSources().size(), 1);
for (Entry<String, DataSource> entry : metaData.getResource().getDataSources().entrySet()) {
dataSourcePropsMap.put(entry.getKey(), DataSourcePropertiesCreator.create(entry.getValue()));
}
dataSourceNames = dataSourcePropsMap.keySet().iterator();
} | class DataSourceQueryResultSet implements DistSQLResultSet {
private static final String CONNECTION_TIMEOUT_MILLISECONDS = "connectionTimeoutMilliseconds";
private static final String IDLE_TIMEOUT_MILLISECONDS = "idleTimeoutMilliseconds";
private static final String MAX_LIFETIME_MILLISECONDS = "maxLifetimeMilliseconds";
private static final String MAX_POOL_SIZE = "maxPoolSize";
private static final String MIN_POOL_SIZE = "minPoolSize";
private static final String READ_ONLY = "readOnly";
private ShardingSphereResource resource;
private Map<String, DataSourceProperties> dataSourcePropsMap;
private Iterator<String> dataSourceNames;
@Override
@Override
public Collection<String> getColumnNames() {
return Arrays.asList("name", "type", "host", "port", "db", "connection_timeout_milliseconds", "idle_timeout_milliseconds",
"max_lifetime_milliseconds", "max_pool_size", "min_pool_size", "read_only", "other_attributes");
}
@Override
public boolean next() {
return dataSourceNames.hasNext();
}
@Override
public Collection<Object> getRowData() {
String dataSourceName = dataSourceNames.next();
DataSourceMetaData metaData = resource.getDataSourcesMetaData().getDataSourceMetaData(dataSourceName);
Collection<Object> result = new LinkedList<>();
result.add(dataSourceName);
result.add(resource.getDatabaseType().getName());
result.add(metaData.getHostname());
result.add(metaData.getPort());
result.add(metaData.getCatalog());
DataSourceProperties dataSourceProperties = dataSourcePropsMap.get(dataSourceName);
Map<String, Object> standardProperties = dataSourceProperties.getPoolPropertySynonyms().getStandardProperties();
result.add(getStandardProperty(standardProperties, CONNECTION_TIMEOUT_MILLISECONDS));
result.add(getStandardProperty(standardProperties, IDLE_TIMEOUT_MILLISECONDS));
result.add(getStandardProperty(standardProperties, MAX_LIFETIME_MILLISECONDS));
result.add(getStandardProperty(standardProperties, MAX_POOL_SIZE));
result.add(getStandardProperty(standardProperties, MIN_POOL_SIZE));
result.add(getStandardProperty(standardProperties, READ_ONLY));
Map<String, Object> otherProperties = dataSourceProperties.getCustomDataSourceProperties().getProperties();
result.add(otherProperties.isEmpty() ? "" : new Gson().toJson(otherProperties));
return result;
}
private String getStandardProperty(final Map<String, Object> standardProperties, final String key) {
if (standardProperties.containsKey(key) && null != standardProperties.get(key)) {
return standardProperties.get(key).toString();
}
return "";
}
@Override
public String getType() {
return ShowResourcesStatement.class.getCanonicalName();
}
} | class DataSourceQueryResultSet implements DistSQLResultSet {
private static final String CONNECTION_TIMEOUT_MILLISECONDS = "connectionTimeoutMilliseconds";
private static final String IDLE_TIMEOUT_MILLISECONDS = "idleTimeoutMilliseconds";
private static final String MAX_LIFETIME_MILLISECONDS = "maxLifetimeMilliseconds";
private static final String MAX_POOL_SIZE = "maxPoolSize";
private static final String MIN_POOL_SIZE = "minPoolSize";
private static final String READ_ONLY = "readOnly";
private ShardingSphereResource resource;
private Map<String, DataSourceProperties> dataSourcePropsMap;
private Iterator<String> dataSourceNames;
@Override
@Override
public Collection<String> getColumnNames() {
return Arrays.asList("name", "type", "host", "port", "db", "connection_timeout_milliseconds", "idle_timeout_milliseconds",
"max_lifetime_milliseconds", "max_pool_size", "min_pool_size", "read_only", "other_attributes");
}
@Override
public boolean next() {
return dataSourceNames.hasNext();
}
@Override
public Collection<Object> getRowData() {
String dataSourceName = dataSourceNames.next();
DataSourceMetaData metaData = resource.getDataSourcesMetaData().getDataSourceMetaData(dataSourceName);
Collection<Object> result = new LinkedList<>();
result.add(dataSourceName);
result.add(resource.getDatabaseType().getName());
result.add(metaData.getHostname());
result.add(metaData.getPort());
result.add(metaData.getCatalog());
DataSourceProperties dataSourceProperties = dataSourcePropsMap.get(dataSourceName);
Map<String, Object> standardProperties = dataSourceProperties.getPoolPropertySynonyms().getStandardProperties();
result.add(getStandardProperty(standardProperties, CONNECTION_TIMEOUT_MILLISECONDS));
result.add(getStandardProperty(standardProperties, IDLE_TIMEOUT_MILLISECONDS));
result.add(getStandardProperty(standardProperties, MAX_LIFETIME_MILLISECONDS));
result.add(getStandardProperty(standardProperties, MAX_POOL_SIZE));
result.add(getStandardProperty(standardProperties, MIN_POOL_SIZE));
result.add(getStandardProperty(standardProperties, READ_ONLY));
Map<String, Object> otherProperties = dataSourceProperties.getCustomDataSourceProperties().getProperties();
result.add(otherProperties.isEmpty() ? "" : new Gson().toJson(otherProperties));
return result;
}
private String getStandardProperty(final Map<String, Object> standardProperties, final String key) {
if (standardProperties.containsKey(key) && null != standardProperties.get(key)) {
return standardProperties.get(key).toString();
}
return "";
}
@Override
public String getType() {
return ShowResourcesStatement.class.getCanonicalName();
}
} |
I would add a comment here to explain what we're doing. | public void startDev() {
Project project = getProject();
QuarkusPluginExtension extension = (QuarkusPluginExtension) project.getExtensions().findByName("quarkus");
if (!getSourceDir().isDirectory()) {
throw new GradleException("The `src/main/java` directory is required, please create it.");
}
if (!extension().outputDirectory().isDirectory()) {
throw new GradleException("The project has no output yet, " +
"this should not happen as build should have been executed first. " +
"Do the project have any source files?");
}
DevModeContext context = new DevModeContext();
try {
List<String> args = new ArrayList<>();
args.add(JavaBinFinder.findBin());
if (getDebug() == null) {
try (Socket socket = new Socket(InetAddress.getByAddress(new byte[] { 127, 0, 0, 1 }), 5005)) {
System.err.println("Port 5005 in use, not starting in debug mode");
} catch (IOException e) {
args.add("-Xdebug");
args.add("-Xrunjdwp:transport=dt_socket,address=5005,server=y,suspend=n");
}
} else if (getDebug().toLowerCase().equals("client")) {
args.add("-Xdebug");
args.add("-Xrunjdwp:transport=dt_socket,address=localhost:5005,server=n,suspend=n");
} else if (getDebug().toLowerCase().equals("true")) {
args.add("-Xdebug");
args.add("-Xrunjdwp:transport=dt_socket,address=localhost:5005,server=y,suspend=y");
} else if (!getDebug().toLowerCase().equals("false")) {
try {
int port = Integer.parseInt(getDebug());
if (port <= 0) {
throw new GradleException("The specified debug port must be greater than 0");
}
args.add("-Xdebug");
args.add("-Xrunjdwp:transport=dt_socket,address=" + port + ",server=y,suspend=y");
} catch (NumberFormatException e) {
throw new GradleException(
"Invalid value for debug parameter: " + getDebug() + " must be true|false|client|{port}");
}
}
if (getJvmArgs() != null) {
args.addAll(Arrays.asList(getJvmArgs().split(" ")));
}
args.add("-XX:TieredStopAtLevel=1");
if (!isPreventnoverify()) {
args.add("-Xverify:none");
}
StringBuilder classPathManifest = new StringBuilder();
final AppModel appModel;
final AppModelResolver modelResolver = extension().resolveAppModel();
try {
final AppArtifact appArtifact = extension.getAppArtifact();
appArtifact.setPath(extension.outputDirectory().toPath());
appModel = modelResolver.resolveModel(appArtifact);
} catch (AppModelResolverException e) {
throw new GradleException("Failed to resolve application model " + extension.getAppArtifact() + " dependencies",
e);
}
for (AppDependency appDep : appModel.getAllDependencies()) {
addToClassPaths(classPathManifest, context, appDep.getArtifact().getPath().toFile());
}
args.add("-Djava.util.logging.manager=org.jboss.logmanager.LogManager");
File wiringClassesDirectory = new File(getBuildDir(), "wiring-classes");
wiringClassesDirectory.mkdirs();
addToClassPaths(classPathManifest, context, wiringClassesDirectory);
addGradlePluginDeps(classPathManifest, context);
File tempFile = new File(getBuildDir(), extension.finalName() + "-dev.jar");
tempFile.delete();
tempFile.deleteOnExit();
StringBuilder resources = new StringBuilder();
String res = null;
for (File file : extension.resourcesDir()) {
if (resources.length() > 0)
resources.append(File.pathSeparator);
resources.append(file.getAbsolutePath());
res = file.getAbsolutePath();
}
DevModeContext.ModuleInfo moduleInfo = new DevModeContext.ModuleInfo(
project.getName(),
project.getProjectDir().getAbsolutePath(),
Collections.singleton(getSourceDir().getAbsolutePath()),
extension.outputDirectory().getAbsolutePath(),
res);
context.getModules().add(moduleInfo);
try (ZipOutputStream out = new ZipOutputStream(new FileOutputStream(tempFile))) {
out.putNextEntry(new ZipEntry("META-INF/"));
Manifest manifest = new Manifest();
manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0");
manifest.getMainAttributes().put(Attributes.Name.CLASS_PATH, classPathManifest.toString());
manifest.getMainAttributes().put(Attributes.Name.MAIN_CLASS, DevModeMain.class.getName());
out.putNextEntry(new ZipEntry("META-INF/MANIFEST.MF"));
manifest.write(out);
out.putNextEntry(new ZipEntry(DevModeMain.DEV_MODE_CONTEXT));
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
ObjectOutputStream obj = new ObjectOutputStream(new DataOutputStream(bytes));
obj.writeObject(context);
obj.close();
out.write(bytes.toByteArray());
}
extension.outputDirectory().mkdirs();
ApplicationInfoUtil.writeApplicationInfoProperties(appModel.getAppArtifact(), extension.outputDirectory().toPath());
args.add("-jar");
args.add(tempFile.getAbsolutePath());
args.add(extension.outputDirectory().getAbsolutePath() + "," + extension.outputConfigDirectory().getAbsolutePath());
args.add(wiringClassesDirectory.getAbsolutePath());
args.add(new File(getBuildDir(), "transformer-cache").getAbsolutePath());
ProcessBuilder pb = new ProcessBuilder(args.toArray(new String[0]));
pb.redirectErrorStream(true);
pb.redirectInput(ProcessBuilder.Redirect.INHERIT);
pb.directory(extension.outputDirectory());
System.out.println("Starting process: ");
pb.command().forEach(System.out::println);
System.out.println("Args: ");
args.forEach(System.out::println);
Process p = pb.start();
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
@Override
public void run() {
p.destroy();
}
}, "Development Mode Shutdown Hook"));
try {
ExecutorService es = Executors.newSingleThreadExecutor();
es.submit(() -> copyOutputToConsole(p.getInputStream()));
p.waitFor();
} catch (Exception e) {
p.destroy();
throw e;
}
} catch (Exception e) {
throw new GradleException("Failed to run", e);
}
} | args.add(extension.outputDirectory().getAbsolutePath() + "," + extension.outputConfigDirectory().getAbsolutePath()); | public void startDev() {
Project project = getProject();
QuarkusPluginExtension extension = (QuarkusPluginExtension) project.getExtensions().findByName("quarkus");
if (!getSourceDir().isDirectory()) {
throw new GradleException("The `src/main/java` directory is required, please create it.");
}
if (!extension().outputDirectory().isDirectory()) {
throw new GradleException("The project has no output yet, " +
"this should not happen as build should have been executed first. " +
"Do the project have any source files?");
}
DevModeContext context = new DevModeContext();
try {
List<String> args = new ArrayList<>();
args.add(JavaBinFinder.findBin());
if (getDebug() == null) {
try (Socket socket = new Socket(InetAddress.getByAddress(new byte[] { 127, 0, 0, 1 }), 5005)) {
System.err.println("Port 5005 in use, not starting in debug mode");
} catch (IOException e) {
args.add("-Xdebug");
args.add("-Xrunjdwp:transport=dt_socket,address=5005,server=y,suspend=n");
}
} else if (getDebug().toLowerCase().equals("client")) {
args.add("-Xdebug");
args.add("-Xrunjdwp:transport=dt_socket,address=localhost:5005,server=n,suspend=n");
} else if (getDebug().toLowerCase().equals("true")) {
args.add("-Xdebug");
args.add("-Xrunjdwp:transport=dt_socket,address=localhost:5005,server=y,suspend=y");
} else if (!getDebug().toLowerCase().equals("false")) {
try {
int port = Integer.parseInt(getDebug());
if (port <= 0) {
throw new GradleException("The specified debug port must be greater than 0");
}
args.add("-Xdebug");
args.add("-Xrunjdwp:transport=dt_socket,address=" + port + ",server=y,suspend=y");
} catch (NumberFormatException e) {
throw new GradleException(
"Invalid value for debug parameter: " + getDebug() + " must be true|false|client|{port}");
}
}
if (getJvmArgs() != null) {
args.addAll(Arrays.asList(getJvmArgs().split(" ")));
}
args.add("-XX:TieredStopAtLevel=1");
if (!isPreventnoverify()) {
args.add("-Xverify:none");
}
StringBuilder classPathManifest = new StringBuilder();
final AppModel appModel;
final AppModelResolver modelResolver = extension().resolveAppModel();
try {
final AppArtifact appArtifact = extension.getAppArtifact();
appArtifact.setPath(extension.outputDirectory().toPath());
appModel = modelResolver.resolveModel(appArtifact);
} catch (AppModelResolverException e) {
throw new GradleException("Failed to resolve application model " + extension.getAppArtifact() + " dependencies",
e);
}
for (AppDependency appDep : appModel.getAllDependencies()) {
addToClassPaths(classPathManifest, context, appDep.getArtifact().getPath().toFile());
}
args.add("-Djava.util.logging.manager=org.jboss.logmanager.LogManager");
File wiringClassesDirectory = new File(getBuildDir(), "wiring-classes");
wiringClassesDirectory.mkdirs();
addToClassPaths(classPathManifest, context, wiringClassesDirectory);
addGradlePluginDeps(classPathManifest, context);
File tempFile = new File(getBuildDir(), extension.finalName() + "-dev.jar");
tempFile.delete();
tempFile.deleteOnExit();
StringBuilder resources = new StringBuilder();
String res = null;
for (File file : extension.resourcesDir()) {
if (resources.length() > 0)
resources.append(File.pathSeparator);
resources.append(file.getAbsolutePath());
res = file.getAbsolutePath();
}
DevModeContext.ModuleInfo moduleInfo = new DevModeContext.ModuleInfo(
project.getName(),
project.getProjectDir().getAbsolutePath(),
Collections.singleton(getSourceDir().getAbsolutePath()),
extension.outputDirectory().getAbsolutePath(),
res);
context.getModules().add(moduleInfo);
try (ZipOutputStream out = new ZipOutputStream(new FileOutputStream(tempFile))) {
out.putNextEntry(new ZipEntry("META-INF/"));
Manifest manifest = new Manifest();
manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0");
manifest.getMainAttributes().put(Attributes.Name.CLASS_PATH, classPathManifest.toString());
manifest.getMainAttributes().put(Attributes.Name.MAIN_CLASS, DevModeMain.class.getName());
out.putNextEntry(new ZipEntry("META-INF/MANIFEST.MF"));
manifest.write(out);
out.putNextEntry(new ZipEntry(DevModeMain.DEV_MODE_CONTEXT));
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
ObjectOutputStream obj = new ObjectOutputStream(new DataOutputStream(bytes));
obj.writeObject(context);
obj.close();
out.write(bytes.toByteArray());
}
extension.outputDirectory().mkdirs();
ApplicationInfoUtil.writeApplicationInfoProperties(appModel.getAppArtifact(), extension.outputDirectory().toPath());
args.add("-jar");
args.add(tempFile.getAbsolutePath());
final String outputClassDirectory = extension.outputDirectory().getAbsolutePath();
final String outputResourcesDirectory = extension.outputConfigDirectory().getAbsolutePath();
if (outputClassDirectory.equals(outputResourcesDirectory)) {
args.add(outputClassDirectory);
} else {
args.add(outputClassDirectory + "," + outputResourcesDirectory);
}
args.add(wiringClassesDirectory.getAbsolutePath());
args.add(new File(getBuildDir(), "transformer-cache").getAbsolutePath());
ProcessBuilder pb = new ProcessBuilder(args.toArray(new String[0]));
pb.redirectErrorStream(true);
pb.redirectInput(ProcessBuilder.Redirect.INHERIT);
pb.directory(extension.outputDirectory());
System.out.println("Starting process: ");
pb.command().forEach(System.out::println);
System.out.println("Args: ");
args.forEach(System.out::println);
Process p = pb.start();
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
@Override
public void run() {
p.destroy();
}
}, "Development Mode Shutdown Hook"));
try {
ExecutorService es = Executors.newSingleThreadExecutor();
es.submit(() -> copyOutputToConsole(p.getInputStream()));
p.waitFor();
} catch (Exception e) {
p.destroy();
throw e;
}
} catch (Exception e) {
throw new GradleException("Failed to run", e);
}
} | class QuarkusDev extends QuarkusTask {
private Set<File> filesIncludedInClasspath = new HashSet<>();
private String debug;
private File buildDir;
private String sourceDir;
private String jvmArgs;
private boolean preventnoverify = false;
public QuarkusDev() {
super("Development mode: enables hot deployment with background compilation");
}
@Optional
@Input
public String getDebug() {
return debug;
}
@Option(description = "If this server should be started in debug mode. " +
"The default is to start in debug mode without suspending and listen on port 5005." +
" It supports the following options:\n" +
" \"false\" - The JVM is not started in debug mode\n" +
" \"true\" - The JVM is started in debug mode and suspends until a debugger is attached to port 5005\n" +
" \"client\" - The JVM is started in client mode, and attempts to connect to localhost:5005\n" +
"\"{port}\" - The JVM is started in debug mode and suspends until a debugger is attached to {port}", option = "debug")
public void setDebug(String debug) {
this.debug = debug;
}
@InputDirectory
@Optional
public File getBuildDir() {
if (buildDir == null)
buildDir = getProject().getBuildDir();
return buildDir;
}
public void setBuildDir(File buildDir) {
this.buildDir = buildDir;
}
@Optional
@InputDirectory
public File getSourceDir() {
if (sourceDir == null)
return extension().sourceDir();
else
return new File(sourceDir);
}
@Option(description = "Set source directory", option = "source-dir")
public void setSourceDir(String sourceDir) {
this.sourceDir = sourceDir;
}
@Optional
@Input
public String getJvmArgs() {
return jvmArgs;
}
@Option(description = "Set JVM arguments", option = "jvm-args")
public void setJvmArgs(String jvmArgs) {
this.jvmArgs = jvmArgs;
}
@Optional
@Input
public boolean isPreventnoverify() {
return preventnoverify;
}
@Option(description = "value is intended to be set to true when some generated bytecode is" +
" erroneous causing the JVM to crash when the verify:none option is set " +
"(which is on by default)", option = "prevent-noverify")
public void setPreventnoverify(boolean preventnoverify) {
this.preventnoverify = preventnoverify;
}
@TaskAction
private void copyOutputToConsole(InputStream is) {
try (InputStreamReader isr = new InputStreamReader(is, StandardCharsets.UTF_8);
BufferedReader br = new BufferedReader(isr)) {
String line;
while ((line = br.readLine()) != null) {
System.out.println(line);
}
} catch (Exception e) {
throw new GradleException("Failed to copy output to console", e);
}
}
private void addGradlePluginDeps(StringBuilder classPathManifest, DevModeContext context) {
Configuration conf = getProject().getBuildscript().getConfigurations().getByName("classpath");
ResolvedDependency quarkusDep = conf.getResolvedConfiguration().getFirstLevelModuleDependencies().stream()
.filter(rd -> "quarkus-gradle-plugin".equals(rd.getModuleName()))
.findFirst()
.orElseThrow(() -> new IllegalStateException("Unable to find quarkus-gradle-plugin dependency"));
quarkusDep.getAllModuleArtifacts().stream()
.map(ra -> ra.getFile())
.forEach(f -> addToClassPaths(classPathManifest, context, f));
}
private void addToClassPaths(StringBuilder classPathManifest, DevModeContext context, File file) {
if (filesIncludedInClasspath.add(file)) {
getProject().getLogger().info("Adding dependency {}", file);
URI uri = file.toPath().toAbsolutePath().toUri();
classPathManifest.append(uri.getPath());
context.getClassPath().add(toUrl(uri));
if (file.isDirectory()) {
classPathManifest.append("/");
}
classPathManifest.append(" ");
}
}
private URL toUrl(URI uri) {
try {
return uri.toURL();
} catch (MalformedURLException e) {
throw new IllegalStateException("Failed to convert URI to URL: " + uri, e);
}
}
} | class QuarkusDev extends QuarkusTask {
private Set<File> filesIncludedInClasspath = new HashSet<>();
private String debug;
private File buildDir;
private String sourceDir;
private String jvmArgs;
private boolean preventnoverify = false;
public QuarkusDev() {
super("Development mode: enables hot deployment with background compilation");
}
@Optional
@Input
public String getDebug() {
return debug;
}
@Option(description = "If this server should be started in debug mode. " +
"The default is to start in debug mode without suspending and listen on port 5005." +
" It supports the following options:\n" +
" \"false\" - The JVM is not started in debug mode\n" +
" \"true\" - The JVM is started in debug mode and suspends until a debugger is attached to port 5005\n" +
" \"client\" - The JVM is started in client mode, and attempts to connect to localhost:5005\n" +
"\"{port}\" - The JVM is started in debug mode and suspends until a debugger is attached to {port}", option = "debug")
public void setDebug(String debug) {
this.debug = debug;
}
@InputDirectory
@Optional
public File getBuildDir() {
if (buildDir == null)
buildDir = getProject().getBuildDir();
return buildDir;
}
public void setBuildDir(File buildDir) {
this.buildDir = buildDir;
}
@Optional
@InputDirectory
public File getSourceDir() {
if (sourceDir == null)
return extension().sourceDir();
else
return new File(sourceDir);
}
@Option(description = "Set source directory", option = "source-dir")
public void setSourceDir(String sourceDir) {
this.sourceDir = sourceDir;
}
@Optional
@Input
public String getJvmArgs() {
return jvmArgs;
}
@Option(description = "Set JVM arguments", option = "jvm-args")
public void setJvmArgs(String jvmArgs) {
this.jvmArgs = jvmArgs;
}
@Optional
@Input
public boolean isPreventnoverify() {
return preventnoverify;
}
@Option(description = "value is intended to be set to true when some generated bytecode is" +
" erroneous causing the JVM to crash when the verify:none option is set " +
"(which is on by default)", option = "prevent-noverify")
public void setPreventnoverify(boolean preventnoverify) {
this.preventnoverify = preventnoverify;
}
@TaskAction
private void copyOutputToConsole(InputStream is) {
try (InputStreamReader isr = new InputStreamReader(is, StandardCharsets.UTF_8);
BufferedReader br = new BufferedReader(isr)) {
String line;
while ((line = br.readLine()) != null) {
System.out.println(line);
}
} catch (Exception e) {
throw new GradleException("Failed to copy output to console", e);
}
}
private void addGradlePluginDeps(StringBuilder classPathManifest, DevModeContext context) {
Configuration conf = getProject().getBuildscript().getConfigurations().getByName("classpath");
ResolvedDependency quarkusDep = conf.getResolvedConfiguration().getFirstLevelModuleDependencies().stream()
.filter(rd -> "quarkus-gradle-plugin".equals(rd.getModuleName()))
.findFirst()
.orElseThrow(() -> new IllegalStateException("Unable to find quarkus-gradle-plugin dependency"));
quarkusDep.getAllModuleArtifacts().stream()
.map(ra -> ra.getFile())
.forEach(f -> addToClassPaths(classPathManifest, context, f));
}
private void addToClassPaths(StringBuilder classPathManifest, DevModeContext context, File file) {
if (filesIncludedInClasspath.add(file)) {
getProject().getLogger().info("Adding dependency {}", file);
URI uri = file.toPath().toAbsolutePath().toUri();
classPathManifest.append(uri.getPath());
context.getClassPath().add(toUrl(uri));
if (file.isDirectory()) {
classPathManifest.append("/");
}
classPathManifest.append(" ");
}
}
private URL toUrl(URI uri) {
try {
return uri.toURL();
} catch (MalformedURLException e) {
throw new IllegalStateException("Failed to convert URI to URL: " + uri, e);
}
}
} |
It's required, I have analyzed [here](https://github.com/apache/flink/pull/19993#discussion_r901261999). > For old code, the unit-test gets stuck in the second dataFuture.get(). After the change, the unit-test worked fine. | public void testCanBeClosed() throws Exception {
long checkpointId = 1L;
ChannelStateWriteRequestDispatcher processor =
new ChannelStateWriteRequestDispatcherImpl(
"dummy task",
0,
getStreamFactoryFactory(),
new ChannelStateSerializerImpl());
try (ChannelStateWriteRequestExecutorImpl worker =
new ChannelStateWriteRequestExecutorImpl(TASK_NAME, processor)) {
worker.start();
worker.submit(
new CheckpointStartRequest(
checkpointId,
new ChannelStateWriter.ChannelStateWriteResult(),
CheckpointStorageLocationReference.getDefault()));
worker.submit(
ChannelStateWriteRequest.write(
checkpointId,
new ResultSubpartitionInfo(0, 0),
new CompletableFuture<>()));
worker.submit(
ChannelStateWriteRequest.write(
checkpointId,
new ResultSubpartitionInfo(0, 0),
new CompletableFuture<>()));
}
} | new CompletableFuture<>())); | public void testCanBeClosed() throws Exception {
long checkpointId = 1L;
ChannelStateWriteRequestDispatcher processor =
new ChannelStateWriteRequestDispatcherImpl(
"dummy task",
0,
getStreamFactoryFactory(),
new ChannelStateSerializerImpl());
try (ChannelStateWriteRequestExecutorImpl worker =
new ChannelStateWriteRequestExecutorImpl(TASK_NAME, processor)) {
worker.start();
worker.submit(
new CheckpointStartRequest(
checkpointId,
new ChannelStateWriter.ChannelStateWriteResult(),
CheckpointStorageLocationReference.getDefault()));
worker.submit(
ChannelStateWriteRequest.write(
checkpointId,
new ResultSubpartitionInfo(0, 0),
new CompletableFuture<>()));
worker.submit(
ChannelStateWriteRequest.write(
checkpointId,
new ResultSubpartitionInfo(0, 0),
new CompletableFuture<>()));
}
} | class ChannelStateWriteRequestExecutorImplTest {
private static final String TASK_NAME = "test task";
@Test(expected = IllegalStateException.class)
public void testCloseAfterSubmit() throws Exception {
testCloseAfterSubmit(ChannelStateWriteRequestExecutor::submit);
}
@Test(expected = IllegalStateException.class)
public void testCloseAfterSubmitPriority() throws Exception {
testCloseAfterSubmit(ChannelStateWriteRequestExecutor::submitPriority);
}
@Test
public void testSubmitFailure() throws Exception {
testSubmitFailure(ChannelStateWriteRequestExecutor::submit);
}
@Test
public void testSubmitPriorityFailure() throws Exception {
testSubmitFailure(ChannelStateWriteRequestExecutor::submitPriority);
}
private void testCloseAfterSubmit(
BiConsumerWithException<
ChannelStateWriteRequestExecutor, ChannelStateWriteRequest, Exception>
requestFun)
throws Exception {
WorkerClosingDeque closingDeque = new WorkerClosingDeque();
ChannelStateWriteRequestExecutorImpl worker =
new ChannelStateWriteRequestExecutorImpl(TASK_NAME, NO_OP, closingDeque);
closingDeque.setWorker(worker);
TestWriteRequest request = new TestWriteRequest();
requestFun.accept(worker, request);
assertTrue(closingDeque.isEmpty());
assertFalse(request.isCancelled());
}
private void testSubmitFailure(
BiConsumerWithException<
ChannelStateWriteRequestExecutor, ChannelStateWriteRequest, Exception>
submitAction)
throws Exception {
TestWriteRequest request = new TestWriteRequest();
LinkedBlockingDeque<ChannelStateWriteRequest> deque = new LinkedBlockingDeque<>();
try {
submitAction.accept(
new ChannelStateWriteRequestExecutorImpl(TASK_NAME, NO_OP, deque), request);
} catch (IllegalStateException e) {
return;
} finally {
assertTrue(request.cancelled);
assertTrue(deque.isEmpty());
}
throw new RuntimeException("expected exception not thrown");
}
@Test
@SuppressWarnings("CallToThreadRun")
public void testCleanup() throws IOException {
TestWriteRequest request = new TestWriteRequest();
LinkedBlockingDeque<ChannelStateWriteRequest> deque = new LinkedBlockingDeque<>();
deque.add(request);
TestRequestDispatcher requestProcessor = new TestRequestDispatcher();
ChannelStateWriteRequestExecutorImpl worker =
new ChannelStateWriteRequestExecutorImpl(TASK_NAME, requestProcessor, deque);
worker.close();
worker.run();
assertTrue(requestProcessor.isStopped());
assertTrue(deque.isEmpty());
assertTrue(request.isCancelled());
}
@Test
public void testIgnoresInterruptsWhileRunning() throws Exception {
TestRequestDispatcher requestProcessor = new TestRequestDispatcher();
LinkedBlockingDeque<ChannelStateWriteRequest> deque = new LinkedBlockingDeque<>();
try (ChannelStateWriteRequestExecutorImpl worker =
new ChannelStateWriteRequestExecutorImpl(TASK_NAME, requestProcessor, deque)) {
worker.start();
worker.getThread().interrupt();
worker.submit(new TestWriteRequest());
worker.getThread().interrupt();
while (!deque.isEmpty()) {
Thread.sleep(100);
}
}
}
@Test(timeout = 10000)
@Test
public void testRecordsException() throws IOException {
TestException testException = new TestException();
TestRequestDispatcher throwingRequestProcessor =
new TestRequestDispatcher() {
@Override
public void dispatch(ChannelStateWriteRequest request) {
throw testException;
}
};
LinkedBlockingDeque<ChannelStateWriteRequest> deque =
new LinkedBlockingDeque<>(Arrays.asList(new TestWriteRequest()));
ChannelStateWriteRequestExecutorImpl worker =
new ChannelStateWriteRequestExecutorImpl(
TASK_NAME, throwingRequestProcessor, deque);
worker.run();
try {
worker.close();
} catch (IOException e) {
if (findThrowable(e, TestException.class)
.filter(found -> found == testException)
.isPresent()) {
return;
} else {
throw e;
}
}
fail("exception not thrown");
}
private static class TestWriteRequest implements ChannelStateWriteRequest {
private boolean cancelled = false;
@Override
public long getCheckpointId() {
return 0;
}
@Override
public void cancel(Throwable cause) {
cancelled = true;
}
public boolean isCancelled() {
return cancelled;
}
}
private static class WorkerClosingDeque extends LinkedBlockingDeque<ChannelStateWriteRequest> {
private ChannelStateWriteRequestExecutor worker;
@Override
public void put(@Nonnull ChannelStateWriteRequest request) throws InterruptedException {
super.putFirst(request);
try {
worker.close();
} catch (IOException e) {
ExceptionUtils.rethrow(e);
}
}
@Override
public void putFirst(@Nonnull ChannelStateWriteRequest request)
throws InterruptedException {
super.putFirst(request);
try {
worker.close();
} catch (IOException e) {
ExceptionUtils.rethrow(e);
}
}
public void setWorker(ChannelStateWriteRequestExecutor worker) {
this.worker = worker;
}
}
private static class TestRequestDispatcher implements ChannelStateWriteRequestDispatcher {
private boolean isStopped;
@Override
public void dispatch(ChannelStateWriteRequest request) {}
@Override
public void fail(Throwable cause) {
isStopped = true;
}
public boolean isStopped() {
return isStopped;
}
}
} | class ChannelStateWriteRequestExecutorImplTest {
private static final String TASK_NAME = "test task";
@Test(expected = IllegalStateException.class)
public void testCloseAfterSubmit() throws Exception {
testCloseAfterSubmit(ChannelStateWriteRequestExecutor::submit);
}
@Test(expected = IllegalStateException.class)
public void testCloseAfterSubmitPriority() throws Exception {
testCloseAfterSubmit(ChannelStateWriteRequestExecutor::submitPriority);
}
@Test
public void testSubmitFailure() throws Exception {
testSubmitFailure(ChannelStateWriteRequestExecutor::submit);
}
@Test
public void testSubmitPriorityFailure() throws Exception {
testSubmitFailure(ChannelStateWriteRequestExecutor::submitPriority);
}
private void testCloseAfterSubmit(
BiConsumerWithException<
ChannelStateWriteRequestExecutor, ChannelStateWriteRequest, Exception>
requestFun)
throws Exception {
WorkerClosingDeque closingDeque = new WorkerClosingDeque();
ChannelStateWriteRequestExecutorImpl worker =
new ChannelStateWriteRequestExecutorImpl(TASK_NAME, NO_OP, closingDeque);
closingDeque.setWorker(worker);
TestWriteRequest request = new TestWriteRequest();
requestFun.accept(worker, request);
assertTrue(closingDeque.isEmpty());
assertFalse(request.isCancelled());
}
private void testSubmitFailure(
BiConsumerWithException<
ChannelStateWriteRequestExecutor, ChannelStateWriteRequest, Exception>
submitAction)
throws Exception {
TestWriteRequest request = new TestWriteRequest();
LinkedBlockingDeque<ChannelStateWriteRequest> deque = new LinkedBlockingDeque<>();
try {
submitAction.accept(
new ChannelStateWriteRequestExecutorImpl(TASK_NAME, NO_OP, deque), request);
} catch (IllegalStateException e) {
return;
} finally {
assertTrue(request.cancelled);
assertTrue(deque.isEmpty());
}
throw new RuntimeException("expected exception not thrown");
}
@Test
@SuppressWarnings("CallToThreadRun")
public void testCleanup() throws IOException {
TestWriteRequest request = new TestWriteRequest();
LinkedBlockingDeque<ChannelStateWriteRequest> deque = new LinkedBlockingDeque<>();
deque.add(request);
TestRequestDispatcher requestProcessor = new TestRequestDispatcher();
ChannelStateWriteRequestExecutorImpl worker =
new ChannelStateWriteRequestExecutorImpl(TASK_NAME, requestProcessor, deque);
worker.close();
worker.run();
assertTrue(requestProcessor.isStopped());
assertTrue(deque.isEmpty());
assertTrue(request.isCancelled());
}
@Test
public void testIgnoresInterruptsWhileRunning() throws Exception {
TestRequestDispatcher requestProcessor = new TestRequestDispatcher();
LinkedBlockingDeque<ChannelStateWriteRequest> deque = new LinkedBlockingDeque<>();
try (ChannelStateWriteRequestExecutorImpl worker =
new ChannelStateWriteRequestExecutorImpl(TASK_NAME, requestProcessor, deque)) {
worker.start();
worker.getThread().interrupt();
worker.submit(new TestWriteRequest());
worker.getThread().interrupt();
while (!deque.isEmpty()) {
Thread.sleep(100);
}
}
}
@Test
@Test
public void testRecordsException() throws IOException {
TestException testException = new TestException();
TestRequestDispatcher throwingRequestProcessor =
new TestRequestDispatcher() {
@Override
public void dispatch(ChannelStateWriteRequest request) {
throw testException;
}
};
LinkedBlockingDeque<ChannelStateWriteRequest> deque =
new LinkedBlockingDeque<>(Arrays.asList(new TestWriteRequest()));
ChannelStateWriteRequestExecutorImpl worker =
new ChannelStateWriteRequestExecutorImpl(
TASK_NAME, throwingRequestProcessor, deque);
worker.run();
try {
worker.close();
} catch (IOException e) {
if (findThrowable(e, TestException.class)
.filter(found -> found == testException)
.isPresent()) {
return;
} else {
throw e;
}
}
fail("exception not thrown");
}
private static class TestWriteRequest implements ChannelStateWriteRequest {
private boolean cancelled = false;
@Override
public long getCheckpointId() {
return 0;
}
@Override
public void cancel(Throwable cause) {
cancelled = true;
}
public boolean isCancelled() {
return cancelled;
}
}
private static class WorkerClosingDeque extends LinkedBlockingDeque<ChannelStateWriteRequest> {
private ChannelStateWriteRequestExecutor worker;
@Override
public void put(@Nonnull ChannelStateWriteRequest request) throws InterruptedException {
super.putFirst(request);
try {
worker.close();
} catch (IOException e) {
ExceptionUtils.rethrow(e);
}
}
@Override
public void putFirst(@Nonnull ChannelStateWriteRequest request)
throws InterruptedException {
super.putFirst(request);
try {
worker.close();
} catch (IOException e) {
ExceptionUtils.rethrow(e);
}
}
public void setWorker(ChannelStateWriteRequestExecutor worker) {
this.worker = worker;
}
}
private static class TestRequestDispatcher implements ChannelStateWriteRequestDispatcher {
private boolean isStopped;
@Override
public void dispatch(ChannelStateWriteRequest request) {}
@Override
public void fail(Throwable cause) {
isStopped = true;
}
public boolean isStopped() {
return isStopped;
}
}
} |
loadAzureVmMetaData is part of init and not meant to call multiple times , why we used AtomicReference here ? | private void loadAzureVmMetaData() {
AzureVMMetadata metadataSnapshot = azureVmMetaDataSingleton.get();
if (metadataSnapshot != null) {
this.populateAzureVmMetaData(metadataSnapshot);
return;
}
URI targetEndpoint = null;
try {
targetEndpoint = new URI(AZURE_VM_METADATA);
} catch (URISyntaxException ex) {
logger.info("Unable to parse azure vm metadata url");
return;
}
HashMap<String, String> headers = new HashMap<>();
headers.put("Metadata", "true");
HttpHeaders httpHeaders = new HttpHeaders(headers);
HttpRequest httpRequest = new HttpRequest(HttpMethod.GET, targetEndpoint, targetEndpoint.getPort(),
httpHeaders);
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest);
httpResponseMono
.flatMap(response -> response.bodyAsString()).map(metadataJson -> parse(metadataJson,
AzureVMMetadata.class)).doOnSuccess(metadata -> {
azureVmMetaDataSingleton.compareAndSet(null, metadata);
this.populateAzureVmMetaData(metadata);
}).onErrorResume(throwable -> {
logger.info("Client is not on azure vm");
logger.debug("Unable to get azure vm metadata", throwable);
return Mono.empty();
}).subscribe();
} | AzureVMMetadata metadataSnapshot = azureVmMetaDataSingleton.get(); | private void loadAzureVmMetaData() {
AzureVMMetadata metadataSnapshot = azureVmMetaDataSingleton.get();
if (metadataSnapshot != null) {
this.populateAzureVmMetaData(metadataSnapshot);
return;
}
URI targetEndpoint = null;
try {
targetEndpoint = new URI(AZURE_VM_METADATA);
} catch (URISyntaxException ex) {
logger.info("Unable to parse azure vm metadata url");
return;
}
HashMap<String, String> headers = new HashMap<>();
headers.put("Metadata", "true");
HttpHeaders httpHeaders = new HttpHeaders(headers);
HttpRequest httpRequest = new HttpRequest(HttpMethod.GET, targetEndpoint, targetEndpoint.getPort(),
httpHeaders);
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest);
httpResponseMono
.flatMap(response -> response.bodyAsString()).map(metadataJson -> parse(metadataJson,
AzureVMMetadata.class)).doOnSuccess(metadata -> {
azureVmMetaDataSingleton.compareAndSet(null, metadata);
this.populateAzureVmMetaData(metadata);
}).onErrorResume(throwable -> {
logger.info("Client is not on azure vm");
logger.debug("Unable to get azure vm metadata", throwable);
return Mono.empty();
}).subscribe();
} | class ClientTelemetry {
public final static int ONE_KB_TO_BYTES = 1024;
public final static int REQUEST_LATENCY_MAX_MILLI_SEC = 300000;
public final static int REQUEST_LATENCY_SUCCESS_PRECISION = 4;
public final static int REQUEST_LATENCY_FAILURE_PRECISION = 2;
public final static String REQUEST_LATENCY_NAME = "RequestLatency";
public final static String REQUEST_LATENCY_UNIT = "MilliSecond";
public final static int REQUEST_CHARGE_MAX = 10000;
public final static int REQUEST_CHARGE_PRECISION = 2;
public final static String REQUEST_CHARGE_NAME = "RequestCharge";
public final static String REQUEST_CHARGE_UNIT = "RU";
public final static String TCP_NEW_CHANNEL_LATENCY_NAME = "TcpNewChannelOpenLatency";
public final static String TCP_NEW_CHANNEL_LATENCY_UNIT = "MilliSecond";
public final static int TCP_NEW_CHANNEL_LATENCY_MAX_MILLI_SEC = 300000;
public final static int TCP_NEW_CHANNEL_LATENCY_PRECISION = 2;
public final static int CPU_MAX = 100;
public final static int CPU_PRECISION = 2;
private final static String CPU_NAME = "CPU";
private final static String CPU_UNIT = "Percentage";
public final static int MEMORY_MAX_IN_MB = 102400;
public final static int MEMORY_PRECISION = 2;
private final static String MEMORY_NAME = "MemoryRemaining";
private final static String MEMORY_UNIT = "MB";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private final static AtomicLong instanceCount = new AtomicLong(0);
private final static AtomicReference<AzureVMMetadata> azureVmMetaDataSingleton =
new AtomicReference<>(null);
private ClientTelemetryInfo clientTelemetryInfo;
private final HttpClient httpClient;
private final ScheduledThreadPoolExecutor scheduledExecutorService = new ScheduledThreadPoolExecutor(1,
new CosmosDaemonThreadFactory("ClientTelemetry-" + instanceCount.incrementAndGet()));
private final Scheduler scheduler = Schedulers.fromExecutor(scheduledExecutorService);
private static final Logger logger = LoggerFactory.getLogger(ClientTelemetry.class);
private volatile boolean isClosed;
private volatile boolean isClientTelemetryEnabled;
private static String AZURE_VM_METADATA = "http:
private static final double PERCENTILE_50 = 50.0;
private static final double PERCENTILE_90 = 90.0;
private static final double PERCENTILE_95 = 95.0;
private static final double PERCENTILE_99 = 99.0;
private static final double PERCENTILE_999 = 99.9;
private final int clientTelemetrySchedulingSec;
private final IAuthorizationTokenProvider tokenProvider;
private final String globalDatabaseAccountName;
public ClientTelemetry(DiagnosticsClientContext diagnosticsClientContext,
Boolean acceleratedNetworking,
String clientId,
String processId,
String userAgent,
ConnectionMode connectionMode,
String globalDatabaseAccountName,
String applicationRegion,
String hostEnvInfo,
HttpClient httpClient,
boolean isClientTelemetryEnabled,
IAuthorizationTokenProvider tokenProvider,
List<String> preferredRegions
) {
clientTelemetryInfo = new ClientTelemetryInfo(
getMachineId(diagnosticsClientContext),
clientId,
processId,
userAgent,
connectionMode,
globalDatabaseAccountName,
applicationRegion,
hostEnvInfo,
acceleratedNetworking,
preferredRegions);
this.isClosed = false;
this.httpClient = httpClient;
this.isClientTelemetryEnabled = isClientTelemetryEnabled;
this.clientTelemetrySchedulingSec = Configs.getClientTelemetrySchedulingInSec();
this.tokenProvider = tokenProvider;
this.globalDatabaseAccountName = globalDatabaseAccountName;
}
public ClientTelemetryInfo getClientTelemetryInfo() {
return clientTelemetryInfo;
}
public static String getMachineId(DiagnosticsClientContext diagnosticsClientContext) {
AzureVMMetadata metadataSnapshot = azureVmMetaDataSingleton.get();
if (metadataSnapshot != null && metadataSnapshot.getVmId() != null) {
String machineId = "vmId:" + metadataSnapshot.getVmId();
if (diagnosticsClientContext != null) {
diagnosticsClientContext.getConfig().withMachineId(machineId);
}
return machineId;
}
if (diagnosticsClientContext == null) {
return "";
}
return diagnosticsClientContext.getConfig().getMachineId();
}
public static void recordValue(ConcurrentDoubleHistogram doubleHistogram, long value) {
try {
doubleHistogram.recordValue(value);
} catch (Exception ex) {
logger.warn("Error while recording value for client telemetry. ", ex);
}
}
public static void recordValue(ConcurrentDoubleHistogram doubleHistogram, double value) {
try {
doubleHistogram.recordValue(value);
} catch (Exception ex) {
logger.warn("Error while recording value for client telemetry. ", ex);
}
}
public boolean isClientTelemetryEnabled() {
return isClientTelemetryEnabled;
}
public void init() {
loadAzureVmMetaData();
sendClientTelemetry().subscribe();
}
public void close() {
this.isClosed = true;
this.scheduledExecutorService.shutdown();
logger.debug("GlobalEndpointManager closed.");
}
private Mono<Void> sendClientTelemetry() {
return Mono.delay(Duration.ofSeconds(clientTelemetrySchedulingSec), CosmosSchedulers.COSMOS_PARALLEL)
.flatMap(t -> {
if (this.isClosed) {
logger.warn("client already closed");
return Mono.empty();
}
if (!Configs.isClientTelemetryEnabled(this.isClientTelemetryEnabled)) {
logger.trace("client telemetry not enabled");
return Mono.empty();
}
readHistogram();
try {
String endpoint = Configs.getClientTelemetryEndpoint();
if (StringUtils.isEmpty(endpoint)) {
logger.info("ClientTelemetry {}",
OBJECT_MAPPER.writeValueAsString(this.clientTelemetryInfo));
clearDataForNextRun();
return this.sendClientTelemetry();
} else {
URI targetEndpoint = new URI(endpoint);
ByteBuffer byteBuffer =
BridgeInternal.serializeJsonToByteBuffer(this.clientTelemetryInfo,
ClientTelemetry.OBJECT_MAPPER);
Flux<byte[]> fluxBytes = Flux.just(RxDocumentServiceRequest.toByteArray(byteBuffer));
Map<String, String> headers = new HashMap<>();
String date = Utils.nowAsRFC1123();
headers.put(HttpConstants.HttpHeaders.X_DATE, date);
String authorization = this.tokenProvider.getUserAuthorizationToken(
"", ResourceType.ClientTelemetry, RequestVerb.POST, headers,
AuthorizationTokenType.PrimaryMasterKey, null);
try {
authorization = URLEncoder.encode(authorization, Constants.UrlEncodingInfo.UTF_8);
} catch (UnsupportedEncodingException e) {
logger.error("Failed to encode authToken. Exception: ", e);
this.clearDataForNextRun();
return this.sendClientTelemetry();
}
HttpHeaders httpHeaders = new HttpHeaders();
httpHeaders.set(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON);
httpHeaders.set(HttpConstants.HttpHeaders.CONTENT_ENCODING, RuntimeConstants.Encoding.GZIP);
httpHeaders.set(HttpConstants.HttpHeaders.X_DATE, date);
httpHeaders.set(HttpConstants.HttpHeaders.DATABASE_ACCOUNT_NAME,
this.globalDatabaseAccountName);
httpHeaders.set(HttpConstants.HttpHeaders.AUTHORIZATION, authorization);
String envName = Configs.getEnvironmentName();
if (StringUtils.isNotEmpty(envName)) {
httpHeaders.set(HttpConstants.HttpHeaders.ENVIRONMENT_NAME, envName);
}
HttpRequest httpRequest = new HttpRequest(HttpMethod.POST, targetEndpoint,
targetEndpoint.getPort(), httpHeaders, fluxBytes);
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest,
Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds()));
return httpResponseMono.flatMap(response -> {
if (response.statusCode() != HttpConstants.StatusCodes.OK) {
logger.error("Client telemetry request did not succeeded, status code {}",
response.statusCode());
}
this.clearDataForNextRun();
return this.sendClientTelemetry();
}).onErrorResume(throwable -> {
logger.error("Error while sending client telemetry request Exception: ", throwable);
this.clearDataForNextRun();
return this.sendClientTelemetry();
});
}
} catch (JsonProcessingException | URISyntaxException ex) {
logger.error("Error while preparing client telemetry. Exception: ", ex);
this.clearDataForNextRun();
return this.sendClientTelemetry();
}
}).onErrorResume(ex -> {
logger.error("sendClientTelemetry() - Unable to send client telemetry" +
". Exception: ", ex);
clearDataForNextRun();
return this.sendClientTelemetry();
}).subscribeOn(scheduler);
}
private void populateAzureVmMetaData(AzureVMMetadata azureVMMetadata) {
this.clientTelemetryInfo.setApplicationRegion(azureVMMetadata.getLocation());
this.clientTelemetryInfo.setVmId(azureVMMetadata.getVmId());
this.clientTelemetryInfo.setHostEnvInfo(azureVMMetadata.getOsType() + "|" + azureVMMetadata.getSku() +
"|" + azureVMMetadata.getVmSize() + "|" + azureVMMetadata.getAzEnvironment());
}
private static <T> T parse(String itemResponseBodyAsString, Class<T> itemClassType) {
try {
return OBJECT_MAPPER.readValue(itemResponseBodyAsString, itemClassType);
} catch (IOException e) {
throw new IllegalStateException(
"Failed to parse string [" + itemResponseBodyAsString + "] to POJO.", e);
}
}
private void clearDataForNextRun() {
this.clientTelemetryInfo.getSystemInfoMap().clear();
this.clientTelemetryInfo.getOperationInfoMap().clear();
this.clientTelemetryInfo.getCacheRefreshInfoMap().clear();
for (ConcurrentDoubleHistogram histogram : this.clientTelemetryInfo.getSystemInfoMap().values()) {
histogram.reset();
}
}
private void readHistogram() {
ConcurrentDoubleHistogram cpuHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.CPU_MAX,
ClientTelemetry.CPU_PRECISION);
cpuHistogram.setAutoResize(true);
for (double val : CpuMemoryMonitor.getClientTelemetryCpuLatestList()) {
recordValue(cpuHistogram, val);
}
ReportPayload cpuReportPayload = new ReportPayload(CPU_NAME, CPU_UNIT);
clientTelemetryInfo.getSystemInfoMap().put(cpuReportPayload, cpuHistogram);
ConcurrentDoubleHistogram memoryHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.MEMORY_MAX_IN_MB,
ClientTelemetry.MEMORY_PRECISION);
memoryHistogram.setAutoResize(true);
for (double val : CpuMemoryMonitor.getClientTelemetryMemoryLatestList()) {
recordValue(memoryHistogram, val);
}
ReportPayload memoryReportPayload = new ReportPayload(MEMORY_NAME, MEMORY_UNIT);
clientTelemetryInfo.getSystemInfoMap().put(memoryReportPayload, memoryHistogram);
this.clientTelemetryInfo.setTimeStamp(Instant.now().toString());
for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry :
this.clientTelemetryInfo.getSystemInfoMap().entrySet()) {
fillMetricsInfo(entry.getKey(), entry.getValue());
}
for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry :
this.clientTelemetryInfo.getCacheRefreshInfoMap().entrySet()) {
fillMetricsInfo(entry.getKey(), entry.getValue());
}
for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry :
this.clientTelemetryInfo.getOperationInfoMap().entrySet()) {
fillMetricsInfo(entry.getKey(), entry.getValue());
}
}
private void fillMetricsInfo(ReportPayload payload, ConcurrentDoubleHistogram histogram) {
DoubleHistogram copyHistogram = histogram.copy();
payload.getMetricInfo().setCount(copyHistogram.getTotalCount());
payload.getMetricInfo().setMax(copyHistogram.getMaxValue());
payload.getMetricInfo().setMin(copyHistogram.getMinValue());
payload.getMetricInfo().setMean(copyHistogram.getMean());
Map<Double, Double> percentile = new HashMap<>();
percentile.put(PERCENTILE_50, copyHistogram.getValueAtPercentile(PERCENTILE_50));
percentile.put(PERCENTILE_90, copyHistogram.getValueAtPercentile(PERCENTILE_90));
percentile.put(PERCENTILE_95, copyHistogram.getValueAtPercentile(PERCENTILE_95));
percentile.put(PERCENTILE_99, copyHistogram.getValueAtPercentile(PERCENTILE_99));
percentile.put(PERCENTILE_999, copyHistogram.getValueAtPercentile(PERCENTILE_999));
payload.getMetricInfo().setPercentiles(percentile);
}
} | class ClientTelemetry {
public final static int ONE_KB_TO_BYTES = 1024;
public final static int REQUEST_LATENCY_MAX_MILLI_SEC = 300000;
public final static int REQUEST_LATENCY_SUCCESS_PRECISION = 4;
public final static int REQUEST_LATENCY_FAILURE_PRECISION = 2;
public final static String REQUEST_LATENCY_NAME = "RequestLatency";
public final static String REQUEST_LATENCY_UNIT = "MilliSecond";
public final static int REQUEST_CHARGE_MAX = 10000;
public final static int REQUEST_CHARGE_PRECISION = 2;
public final static String REQUEST_CHARGE_NAME = "RequestCharge";
public final static String REQUEST_CHARGE_UNIT = "RU";
public final static String TCP_NEW_CHANNEL_LATENCY_NAME = "TcpNewChannelOpenLatency";
public final static String TCP_NEW_CHANNEL_LATENCY_UNIT = "MilliSecond";
public final static int TCP_NEW_CHANNEL_LATENCY_MAX_MILLI_SEC = 300000;
public final static int TCP_NEW_CHANNEL_LATENCY_PRECISION = 2;
public final static int CPU_MAX = 100;
public final static int CPU_PRECISION = 2;
private final static String CPU_NAME = "CPU";
private final static String CPU_UNIT = "Percentage";
public final static int MEMORY_MAX_IN_MB = 102400;
public final static int MEMORY_PRECISION = 2;
private final static String MEMORY_NAME = "MemoryRemaining";
private final static String MEMORY_UNIT = "MB";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private final static AtomicLong instanceCount = new AtomicLong(0);
private final static AtomicReference<AzureVMMetadata> azureVmMetaDataSingleton =
new AtomicReference<>(null);
private ClientTelemetryInfo clientTelemetryInfo;
private final HttpClient httpClient;
private final ScheduledThreadPoolExecutor scheduledExecutorService = new ScheduledThreadPoolExecutor(1,
new CosmosDaemonThreadFactory("ClientTelemetry-" + instanceCount.incrementAndGet()));
private final Scheduler scheduler = Schedulers.fromExecutor(scheduledExecutorService);
private static final Logger logger = LoggerFactory.getLogger(ClientTelemetry.class);
private volatile boolean isClosed;
private volatile boolean isClientTelemetryEnabled;
private static String AZURE_VM_METADATA = "http:
private static final double PERCENTILE_50 = 50.0;
private static final double PERCENTILE_90 = 90.0;
private static final double PERCENTILE_95 = 95.0;
private static final double PERCENTILE_99 = 99.0;
private static final double PERCENTILE_999 = 99.9;
private final int clientTelemetrySchedulingSec;
private final IAuthorizationTokenProvider tokenProvider;
private final String globalDatabaseAccountName;
public ClientTelemetry(DiagnosticsClientContext diagnosticsClientContext,
Boolean acceleratedNetworking,
String clientId,
String processId,
String userAgent,
ConnectionMode connectionMode,
String globalDatabaseAccountName,
String applicationRegion,
String hostEnvInfo,
HttpClient httpClient,
boolean isClientTelemetryEnabled,
IAuthorizationTokenProvider tokenProvider,
List<String> preferredRegions
) {
clientTelemetryInfo = new ClientTelemetryInfo(
getMachineId(diagnosticsClientContext),
clientId,
processId,
userAgent,
connectionMode,
globalDatabaseAccountName,
applicationRegion,
hostEnvInfo,
acceleratedNetworking,
preferredRegions);
this.isClosed = false;
this.httpClient = httpClient;
this.isClientTelemetryEnabled = isClientTelemetryEnabled;
this.clientTelemetrySchedulingSec = Configs.getClientTelemetrySchedulingInSec();
this.tokenProvider = tokenProvider;
this.globalDatabaseAccountName = globalDatabaseAccountName;
}
public ClientTelemetryInfo getClientTelemetryInfo() {
return clientTelemetryInfo;
}
public static String getMachineId(DiagnosticsClientContext diagnosticsClientContext) {
AzureVMMetadata metadataSnapshot = azureVmMetaDataSingleton.get();
if (metadataSnapshot != null && metadataSnapshot.getVmId() != null) {
String machineId = "vmId:" + metadataSnapshot.getVmId();
if (diagnosticsClientContext != null) {
diagnosticsClientContext.getConfig().withMachineId(machineId);
}
return machineId;
}
if (diagnosticsClientContext == null) {
return "";
}
return diagnosticsClientContext.getConfig().getMachineId();
}
public static void recordValue(ConcurrentDoubleHistogram doubleHistogram, long value) {
try {
doubleHistogram.recordValue(value);
} catch (Exception ex) {
logger.warn("Error while recording value for client telemetry. ", ex);
}
}
public static void recordValue(ConcurrentDoubleHistogram doubleHistogram, double value) {
try {
doubleHistogram.recordValue(value);
} catch (Exception ex) {
logger.warn("Error while recording value for client telemetry. ", ex);
}
}
public boolean isClientTelemetryEnabled() {
return isClientTelemetryEnabled;
}
public void init() {
loadAzureVmMetaData();
sendClientTelemetry().subscribe();
}
public void close() {
this.isClosed = true;
this.scheduledExecutorService.shutdown();
logger.debug("GlobalEndpointManager closed.");
}
private Mono<Void> sendClientTelemetry() {
return Mono.delay(Duration.ofSeconds(clientTelemetrySchedulingSec), CosmosSchedulers.COSMOS_PARALLEL)
.flatMap(t -> {
if (this.isClosed) {
logger.warn("client already closed");
return Mono.empty();
}
if (!Configs.isClientTelemetryEnabled(this.isClientTelemetryEnabled)) {
logger.trace("client telemetry not enabled");
return Mono.empty();
}
readHistogram();
try {
String endpoint = Configs.getClientTelemetryEndpoint();
if (StringUtils.isEmpty(endpoint)) {
logger.info("ClientTelemetry {}",
OBJECT_MAPPER.writeValueAsString(this.clientTelemetryInfo));
clearDataForNextRun();
return this.sendClientTelemetry();
} else {
URI targetEndpoint = new URI(endpoint);
ByteBuffer byteBuffer =
BridgeInternal.serializeJsonToByteBuffer(this.clientTelemetryInfo,
ClientTelemetry.OBJECT_MAPPER);
Flux<byte[]> fluxBytes = Flux.just(RxDocumentServiceRequest.toByteArray(byteBuffer));
Map<String, String> headers = new HashMap<>();
String date = Utils.nowAsRFC1123();
headers.put(HttpConstants.HttpHeaders.X_DATE, date);
String authorization = this.tokenProvider.getUserAuthorizationToken(
"", ResourceType.ClientTelemetry, RequestVerb.POST, headers,
AuthorizationTokenType.PrimaryMasterKey, null);
try {
authorization = URLEncoder.encode(authorization, Constants.UrlEncodingInfo.UTF_8);
} catch (UnsupportedEncodingException e) {
logger.error("Failed to encode authToken. Exception: ", e);
this.clearDataForNextRun();
return this.sendClientTelemetry();
}
HttpHeaders httpHeaders = new HttpHeaders();
httpHeaders.set(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON);
httpHeaders.set(HttpConstants.HttpHeaders.CONTENT_ENCODING, RuntimeConstants.Encoding.GZIP);
httpHeaders.set(HttpConstants.HttpHeaders.X_DATE, date);
httpHeaders.set(HttpConstants.HttpHeaders.DATABASE_ACCOUNT_NAME,
this.globalDatabaseAccountName);
httpHeaders.set(HttpConstants.HttpHeaders.AUTHORIZATION, authorization);
String envName = Configs.getEnvironmentName();
if (StringUtils.isNotEmpty(envName)) {
httpHeaders.set(HttpConstants.HttpHeaders.ENVIRONMENT_NAME, envName);
}
HttpRequest httpRequest = new HttpRequest(HttpMethod.POST, targetEndpoint,
targetEndpoint.getPort(), httpHeaders, fluxBytes);
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest,
Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds()));
return httpResponseMono.flatMap(response -> {
if (response.statusCode() != HttpConstants.StatusCodes.OK) {
logger.error("Client telemetry request did not succeeded, status code {}",
response.statusCode());
}
this.clearDataForNextRun();
return this.sendClientTelemetry();
}).onErrorResume(throwable -> {
logger.error("Error while sending client telemetry request Exception: ", throwable);
this.clearDataForNextRun();
return this.sendClientTelemetry();
});
}
} catch (JsonProcessingException | URISyntaxException ex) {
logger.error("Error while preparing client telemetry. Exception: ", ex);
this.clearDataForNextRun();
return this.sendClientTelemetry();
}
}).onErrorResume(ex -> {
logger.error("sendClientTelemetry() - Unable to send client telemetry" +
". Exception: ", ex);
clearDataForNextRun();
return this.sendClientTelemetry();
}).subscribeOn(scheduler);
}
private void populateAzureVmMetaData(AzureVMMetadata azureVMMetadata) {
this.clientTelemetryInfo.setApplicationRegion(azureVMMetadata.getLocation());
this.clientTelemetryInfo.setMachineId("vmId:" + azureVMMetadata.getVmId());
this.clientTelemetryInfo.setHostEnvInfo(azureVMMetadata.getOsType() + "|" + azureVMMetadata.getSku() +
"|" + azureVMMetadata.getVmSize() + "|" + azureVMMetadata.getAzEnvironment());
}
private static <T> T parse(String itemResponseBodyAsString, Class<T> itemClassType) {
try {
return OBJECT_MAPPER.readValue(itemResponseBodyAsString, itemClassType);
} catch (IOException e) {
throw new IllegalStateException(
"Failed to parse string [" + itemResponseBodyAsString + "] to POJO.", e);
}
}
private void clearDataForNextRun() {
this.clientTelemetryInfo.getSystemInfoMap().clear();
this.clientTelemetryInfo.getOperationInfoMap().clear();
this.clientTelemetryInfo.getCacheRefreshInfoMap().clear();
for (ConcurrentDoubleHistogram histogram : this.clientTelemetryInfo.getSystemInfoMap().values()) {
histogram.reset();
}
}
private void readHistogram() {
ConcurrentDoubleHistogram cpuHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.CPU_MAX,
ClientTelemetry.CPU_PRECISION);
cpuHistogram.setAutoResize(true);
for (double val : CpuMemoryMonitor.getClientTelemetryCpuLatestList()) {
recordValue(cpuHistogram, val);
}
ReportPayload cpuReportPayload = new ReportPayload(CPU_NAME, CPU_UNIT);
clientTelemetryInfo.getSystemInfoMap().put(cpuReportPayload, cpuHistogram);
ConcurrentDoubleHistogram memoryHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.MEMORY_MAX_IN_MB,
ClientTelemetry.MEMORY_PRECISION);
memoryHistogram.setAutoResize(true);
for (double val : CpuMemoryMonitor.getClientTelemetryMemoryLatestList()) {
recordValue(memoryHistogram, val);
}
ReportPayload memoryReportPayload = new ReportPayload(MEMORY_NAME, MEMORY_UNIT);
clientTelemetryInfo.getSystemInfoMap().put(memoryReportPayload, memoryHistogram);
this.clientTelemetryInfo.setTimeStamp(Instant.now().toString());
for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry :
this.clientTelemetryInfo.getSystemInfoMap().entrySet()) {
fillMetricsInfo(entry.getKey(), entry.getValue());
}
for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry :
this.clientTelemetryInfo.getCacheRefreshInfoMap().entrySet()) {
fillMetricsInfo(entry.getKey(), entry.getValue());
}
for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry :
this.clientTelemetryInfo.getOperationInfoMap().entrySet()) {
fillMetricsInfo(entry.getKey(), entry.getValue());
}
}
private void fillMetricsInfo(ReportPayload payload, ConcurrentDoubleHistogram histogram) {
DoubleHistogram copyHistogram = histogram.copy();
payload.getMetricInfo().setCount(copyHistogram.getTotalCount());
payload.getMetricInfo().setMax(copyHistogram.getMaxValue());
payload.getMetricInfo().setMin(copyHistogram.getMinValue());
payload.getMetricInfo().setMean(copyHistogram.getMean());
Map<Double, Double> percentile = new HashMap<>();
percentile.put(PERCENTILE_50, copyHistogram.getValueAtPercentile(PERCENTILE_50));
percentile.put(PERCENTILE_90, copyHistogram.getValueAtPercentile(PERCENTILE_90));
percentile.put(PERCENTILE_95, copyHistogram.getValueAtPercentile(PERCENTILE_95));
percentile.put(PERCENTILE_99, copyHistogram.getValueAtPercentile(PERCENTILE_99));
percentile.put(PERCENTILE_999, copyHistogram.getValueAtPercentile(PERCENTILE_999));
payload.getMetricInfo().setPercentiles(percentile);
}
} |
> the only reason for collecting the retrieved Gauge instances to a Set is to validate that none of them are null Here, we are not checking for null; rather, we are examining whether there are duplicates among the five `Gauge`s to ensure that no metric has been registered multiple times. | void testWatermarkMetrics() throws Exception {
final OneInputStreamTaskTestHarness<String, String> testHarness =
new OneInputStreamTaskTestHarness<>(
OneInputStreamTask::new,
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO);
OneInputStreamOperator<String, String> headOperator = new WatermarkMetricOperator();
OperatorID headOperatorId = new OperatorID();
OneInputStreamOperator<String, String> chainedOperator = new WatermarkMetricOperator();
OperatorID chainedOperatorId = new OperatorID();
testHarness
.setupOperatorChain(headOperatorId, headOperator)
.chain(
chainedOperatorId,
chainedOperator,
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new SerializerConfigImpl()))
.finish();
InterceptingOperatorMetricGroup headOperatorMetricGroup =
new InterceptingOperatorMetricGroup();
InterceptingOperatorMetricGroup chainedOperatorMetricGroup =
new InterceptingOperatorMetricGroup();
InterceptingTaskMetricGroup taskMetricGroup =
new InterceptingTaskMetricGroup() {
@Override
public InternalOperatorMetricGroup getOrAddOperator(
OperatorID id, String name) {
if (id.equals(headOperatorId)) {
return headOperatorMetricGroup;
} else if (id.equals(chainedOperatorId)) {
return chainedOperatorMetricGroup;
} else {
return super.getOrAddOperator(id, name);
}
}
};
StreamMockEnvironment env =
new StreamMockEnvironment(
testHarness.jobConfig,
testHarness.taskConfig,
testHarness.memorySize,
new MockInputSplitProvider(),
testHarness.bufferSize,
new TestTaskStateManager()) {
@Override
public TaskMetricGroup getMetricGroup() {
return taskMetricGroup;
}
};
testHarness.invoke(env);
testHarness.waitForTaskRunning();
Gauge<Long> taskInputWatermarkGauge =
(Gauge<Long>) taskMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK);
Gauge<Long> headInputWatermarkGauge =
(Gauge<Long>) headOperatorMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK);
Gauge<Long> headOutputWatermarkGauge =
(Gauge<Long>) headOperatorMetricGroup.get(MetricNames.IO_CURRENT_OUTPUT_WATERMARK);
Gauge<Long> chainedInputWatermarkGauge =
(Gauge<Long>)
chainedOperatorMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK);
Gauge<Long> chainedOutputWatermarkGauge =
(Gauge<Long>)
chainedOperatorMetricGroup.get(MetricNames.IO_CURRENT_OUTPUT_WATERMARK);
assertThat(
new HashSet<>(
Arrays.asList(
taskInputWatermarkGauge,
headInputWatermarkGauge,
headOutputWatermarkGauge,
chainedInputWatermarkGauge,
chainedOutputWatermarkGauge)))
.as("A metric was registered multiple times.")
.hasSize(5);
assertThat(taskInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(headInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(headOutputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(chainedInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(chainedOutputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
testHarness.processElement(new Watermark(1L));
testHarness.waitForInputProcessing();
assertThat(taskInputWatermarkGauge.getValue()).isOne();
assertThat(headInputWatermarkGauge.getValue()).isOne();
assertThat(headOutputWatermarkGauge.getValue()).isEqualTo(2L);
assertThat(chainedInputWatermarkGauge.getValue()).isEqualTo(2L);
assertThat(chainedOutputWatermarkGauge.getValue()).isEqualTo(4L);
testHarness.processElement(new Watermark(2L));
testHarness.waitForInputProcessing();
assertThat(taskInputWatermarkGauge.getValue()).isEqualTo(2L);
assertThat(headInputWatermarkGauge.getValue()).isEqualTo(2L);
assertThat(headOutputWatermarkGauge.getValue()).isEqualTo(4L);
assertThat(chainedInputWatermarkGauge.getValue()).isEqualTo(4L);
assertThat(chainedOutputWatermarkGauge.getValue()).isEqualTo(8L);
testHarness.endInput();
testHarness.waitForTaskCompletion();
} | assertThat( | void testWatermarkMetrics() throws Exception {
final OneInputStreamTaskTestHarness<String, String> testHarness =
new OneInputStreamTaskTestHarness<>(
OneInputStreamTask::new,
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO);
OneInputStreamOperator<String, String> headOperator = new WatermarkMetricOperator();
OperatorID headOperatorId = new OperatorID();
OneInputStreamOperator<String, String> chainedOperator = new WatermarkMetricOperator();
OperatorID chainedOperatorId = new OperatorID();
testHarness
.setupOperatorChain(headOperatorId, headOperator)
.chain(
chainedOperatorId,
chainedOperator,
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new SerializerConfigImpl()))
.finish();
InterceptingOperatorMetricGroup headOperatorMetricGroup =
new InterceptingOperatorMetricGroup();
InterceptingOperatorMetricGroup chainedOperatorMetricGroup =
new InterceptingOperatorMetricGroup();
InterceptingTaskMetricGroup taskMetricGroup =
new InterceptingTaskMetricGroup() {
@Override
public InternalOperatorMetricGroup getOrAddOperator(
OperatorID id, String name) {
if (id.equals(headOperatorId)) {
return headOperatorMetricGroup;
} else if (id.equals(chainedOperatorId)) {
return chainedOperatorMetricGroup;
} else {
return super.getOrAddOperator(id, name);
}
}
};
StreamMockEnvironment env =
new StreamMockEnvironment(
testHarness.jobConfig,
testHarness.taskConfig,
testHarness.memorySize,
new MockInputSplitProvider(),
testHarness.bufferSize,
new TestTaskStateManager()) {
@Override
public TaskMetricGroup getMetricGroup() {
return taskMetricGroup;
}
};
testHarness.invoke(env);
testHarness.waitForTaskRunning();
Gauge<Long> taskInputWatermarkGauge =
(Gauge<Long>) taskMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK);
Gauge<Long> headInputWatermarkGauge =
(Gauge<Long>) headOperatorMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK);
Gauge<Long> headOutputWatermarkGauge =
(Gauge<Long>) headOperatorMetricGroup.get(MetricNames.IO_CURRENT_OUTPUT_WATERMARK);
Gauge<Long> chainedInputWatermarkGauge =
(Gauge<Long>)
chainedOperatorMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK);
Gauge<Long> chainedOutputWatermarkGauge =
(Gauge<Long>)
chainedOperatorMetricGroup.get(MetricNames.IO_CURRENT_OUTPUT_WATERMARK);
assertThat(
new HashSet<>(
Arrays.asList(
taskInputWatermarkGauge,
headInputWatermarkGauge,
headOutputWatermarkGauge,
chainedInputWatermarkGauge,
chainedOutputWatermarkGauge)))
.as("A metric was registered multiple times.")
.hasSize(5);
assertThat(taskInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(headInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(headOutputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(chainedInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(chainedOutputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
testHarness.processElement(new Watermark(1L));
testHarness.waitForInputProcessing();
assertThat(taskInputWatermarkGauge.getValue()).isOne();
assertThat(headInputWatermarkGauge.getValue()).isOne();
assertThat(headOutputWatermarkGauge.getValue()).isEqualTo(2L);
assertThat(chainedInputWatermarkGauge.getValue()).isEqualTo(2L);
assertThat(chainedOutputWatermarkGauge.getValue()).isEqualTo(4L);
testHarness.processElement(new Watermark(2L));
testHarness.waitForInputProcessing();
assertThat(taskInputWatermarkGauge.getValue()).isEqualTo(2L);
assertThat(headInputWatermarkGauge.getValue()).isEqualTo(2L);
assertThat(headOutputWatermarkGauge.getValue()).isEqualTo(4L);
assertThat(chainedInputWatermarkGauge.getValue()).isEqualTo(4L);
assertThat(chainedOutputWatermarkGauge.getValue()).isEqualTo(8L);
testHarness.endInput();
testHarness.waitForTaskCompletion();
} | class DuplicatingOperator extends AbstractStreamOperator<String>
implements OneInputStreamOperator<String, String> {
@Override
public void processElement(StreamRecord<String> element) {
output.collect(element);
output.collect(element);
}
} | class DuplicatingOperator extends AbstractStreamOperator<String>
implements OneInputStreamOperator<String, String> {
@Override
public void processElement(StreamRecord<String> element) {
output.collect(element);
output.collect(element);
}
} |
add comment to explain why must replace here | private static List<RewriteJob> buildAnalyzeJobs(Optional<CustomTableResolver> customTableResolver) {
return jobs(
topDown(new AnalyzeCTE()),
topDown(new EliminateLogicalSelectHint()),
bottomUp(
new BindRelation(customTableResolver),
new CheckPolicy()
),
bottomUp(new BindExpression()),
bottomUp(new BindSlotWithPaths()),
topDown(new BindSink()),
bottomUp(new CheckAfterBind()),
bottomUp(
new ProjectToGlobalAggregate(),
new EliminateDistinctConstant(),
new ProjectWithDistinctToAggregate(),
new ReplaceExpressionByChildOutput(),
new OneRowRelationExtractAggregate()
),
topDown(
new FillUpMissingSlots(),
new NormalizeRepeat()
),
bottomUp(new AdjustAggregateNullableForEmptySet()),
topDown(new VariableToLiteral()),
bottomUp(new CheckAnalysis()),
topDown(new EliminateGroupByConstant()),
topDown(new SimplifyAggGroupBy()),
topDown(new NormalizeAggregate()),
topDown(new HavingToFilter()),
bottomUp(new SemiJoinCommute()),
bottomUp(
new CollectSubQueryAlias(),
new CollectJoinConstraint()
),
topDown(new LeadingJoin()),
bottomUp(new SubqueryToApply()),
topDown(new MergeProjects())
);
} | bottomUp(new SemiJoinCommute()), | private static List<RewriteJob> buildAnalyzeJobs(Optional<CustomTableResolver> customTableResolver) {
return jobs(
topDown(new AnalyzeCTE()),
topDown(new EliminateLogicalSelectHint()),
bottomUp(
new BindRelation(customTableResolver),
new CheckPolicy()
),
bottomUp(new BindExpression()),
bottomUp(new BindSlotWithPaths()),
topDown(new BindSink()),
bottomUp(new CheckAfterBind()),
bottomUp(
new ProjectToGlobalAggregate(),
new EliminateDistinctConstant(),
new ProjectWithDistinctToAggregate(),
new ReplaceExpressionByChildOutput(),
new OneRowRelationExtractAggregate()
),
topDown(
new FillUpMissingSlots(),
new NormalizeRepeat()
),
bottomUp(new AdjustAggregateNullableForEmptySet()),
topDown(new VariableToLiteral()),
bottomUp(new CheckAnalysis()),
topDown(new EliminateGroupByConstant()),
topDown(new SimplifyAggGroupBy()),
topDown(new NormalizeAggregate()),
topDown(new HavingToFilter()),
bottomUp(new SemiJoinCommute()),
bottomUp(
new CollectSubQueryAlias(),
new CollectJoinConstraint()
),
topDown(new LeadingJoin()),
bottomUp(new SubqueryToApply()),
topDown(new MergeProjects())
);
} | class Analyzer extends AbstractBatchJobExecutor {
public static final List<RewriteJob> DEFAULT_ANALYZE_JOBS = buildAnalyzeJobs(Optional.empty());
public static final List<RewriteJob> DEFAULT_ANALYZE_VIEW_JOBS = buildAnalyzeViewJobs(Optional.empty());
private final List<RewriteJob> jobs;
/**
* Execute the analysis job with scope.
* @param cascadesContext planner context for execute job
*/
public Analyzer(CascadesContext cascadesContext) {
this(cascadesContext, false);
}
public Analyzer(CascadesContext cascadesContext, boolean analyzeView) {
this(cascadesContext, analyzeView, Optional.empty());
}
/**
* constructor of Analyzer. For view, we only do bind relation since other analyze step will do by outer Analyzer.
*
* @param cascadesContext current context for analyzer
* @param analyzeView analyze view or user sql. If true, analyzer is used for view.
* @param customTableResolver custom resolver for outer catalog.
*/
public Analyzer(CascadesContext cascadesContext, boolean analyzeView,
Optional<CustomTableResolver> customTableResolver) {
super(cascadesContext);
Objects.requireNonNull(customTableResolver, "customTableResolver cannot be null");
if (analyzeView) {
if (customTableResolver.isPresent()) {
this.jobs = buildAnalyzeViewJobs(customTableResolver);
} else {
this.jobs = DEFAULT_ANALYZE_VIEW_JOBS;
}
} else {
if (customTableResolver.isPresent()) {
this.jobs = buildAnalyzeJobs(customTableResolver);
} else {
this.jobs = DEFAULT_ANALYZE_JOBS;
}
}
}
@Override
public List<RewriteJob> getJobs() {
return jobs;
}
/**
* nereids analyze sql.
*/
public void analyze() {
execute();
}
private static List<RewriteJob> buildAnalyzeViewJobs(Optional<CustomTableResolver> customTableResolver) {
return jobs(
topDown(new AnalyzeCTE()),
topDown(new EliminateLogicalSelectHint()),
bottomUp(
new BindRelation(customTableResolver),
new CheckPolicy()
)
);
}
} | class Analyzer extends AbstractBatchJobExecutor {
public static final List<RewriteJob> DEFAULT_ANALYZE_JOBS = buildAnalyzeJobs(Optional.empty());
public static final List<RewriteJob> DEFAULT_ANALYZE_VIEW_JOBS = buildAnalyzeViewJobs(Optional.empty());
private final List<RewriteJob> jobs;
/**
* Execute the analysis job with scope.
* @param cascadesContext planner context for execute job
*/
public Analyzer(CascadesContext cascadesContext) {
this(cascadesContext, false);
}
public Analyzer(CascadesContext cascadesContext, boolean analyzeView) {
this(cascadesContext, analyzeView, Optional.empty());
}
/**
* constructor of Analyzer. For view, we only do bind relation since other analyze step will do by outer Analyzer.
*
* @param cascadesContext current context for analyzer
* @param analyzeView analyze view or user sql. If true, analyzer is used for view.
* @param customTableResolver custom resolver for outer catalog.
*/
public Analyzer(CascadesContext cascadesContext, boolean analyzeView,
Optional<CustomTableResolver> customTableResolver) {
super(cascadesContext);
Objects.requireNonNull(customTableResolver, "customTableResolver cannot be null");
if (analyzeView) {
if (customTableResolver.isPresent()) {
this.jobs = buildAnalyzeViewJobs(customTableResolver);
} else {
this.jobs = DEFAULT_ANALYZE_VIEW_JOBS;
}
} else {
if (customTableResolver.isPresent()) {
this.jobs = buildAnalyzeJobs(customTableResolver);
} else {
this.jobs = DEFAULT_ANALYZE_JOBS;
}
}
}
@Override
public List<RewriteJob> getJobs() {
return jobs;
}
/**
* nereids analyze sql.
*/
public void analyze() {
execute();
}
private static List<RewriteJob> buildAnalyzeViewJobs(Optional<CustomTableResolver> customTableResolver) {
return jobs(
topDown(new AnalyzeCTE()),
topDown(new EliminateLogicalSelectHint()),
bottomUp(
new BindRelation(customTableResolver),
new CheckPolicy()
)
);
}
} |
Yes, but afaik, it would require quite a bit of refactoring, which I'd do in another PR. | public void testClosingWithBlockedEmitter() throws Exception {
final Object lock = new Object();
ArgumentCaptor<Throwable> failureReason = ArgumentCaptor.forClass(Throwable.class);
MockEnvironment environment = createMockEnvironment();
StreamTask<?, ?> containingTask = mock(StreamTask.class);
TaskMailboxImpl mailbox = new TaskMailboxImpl();
when(containingTask.getEnvironment()).thenReturn(environment);
when(containingTask.getCheckpointLock()).thenReturn(lock);
when(containingTask.getProcessingTimeService()).thenReturn(new TestProcessingTimeService());
when(containingTask.getTaskMailboxExecutor(any())).thenReturn(new MailboxExecutorImpl(mailbox));
StreamConfig streamConfig = new MockStreamConfig();
streamConfig.setTypeSerializerIn1(IntSerializer.INSTANCE);
final OneShotLatch closingLatch = new OneShotLatch();
final OneShotLatch outputLatch = new OneShotLatch();
Output<StreamRecord<Integer>> output = mock(Output.class);
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
assertTrue("Output should happen under the checkpoint lock.", Thread.currentThread().holdsLock(lock));
outputLatch.trigger();
while (!closingLatch.isTriggered()) {
lock.wait();
}
return null;
}
}).when(output).collect(any(StreamRecord.class));
AsyncWaitOperator<Integer, Integer> operator = new TestAsyncWaitOperator<>(
new MyAsyncFunction(),
1000L,
1,
AsyncDataStream.OutputMode.ORDERED,
closingLatch);
operator.setup(
containingTask,
streamConfig,
output);
mailbox.open();
operator.open();
synchronized (lock) {
operator.processElement(new StreamRecord<>(42));
}
outputLatch.await();
synchronized (lock) {
operator.close();
}
} | when(containingTask.getTaskMailboxExecutor(any())).thenReturn(new MailboxExecutorImpl(mailbox)); | public void testClosingWithBlockedEmitter() throws Exception {
JobVertex chainedVertex = createChainedVertex(new MyAsyncFunction(), new EmitterBlockingFunction());
final OneInputStreamTaskTestHarness<Integer, Integer> testHarness = new OneInputStreamTaskTestHarness<>(
OneInputStreamTask::new,
1, 1,
BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO);
testHarness.setupOutputForSingletonOperatorChain();
testHarness.taskConfig = chainedVertex.getConfiguration();
final StreamConfig streamConfig = testHarness.getStreamConfig();
final StreamConfig operatorChainStreamConfig = new StreamConfig(chainedVertex.getConfiguration());
final AsyncWaitOperator<Integer, Integer> headOperator =
operatorChainStreamConfig.getStreamOperator(AsyncWaitOperatorTest.class.getClassLoader());
streamConfig.setStreamOperator(headOperator);
testHarness.invoke();
testHarness.waitForTaskRunning();
Object checkpointLock = testHarness.getTask().getCheckpointLock();
EmitterBlockingFunction.setLock(checkpointLock);
testHarness.processElement(new StreamRecord<>(42, 1L));
EmitterBlockingFunction.outputLatch.await();
testHarness.endInput();
EmitterBlockingFunction.closingLatch.trigger();
testHarness.waitForTaskCompletion();
assertEquals(emptyList(), new ArrayList<>(testHarness.getOutput()));
} | class StreamRecordComparator implements Comparator<Object> {
@Override
public int compare(Object o1, Object o2) {
if (o1 instanceof Watermark || o2 instanceof Watermark) {
return 0;
} else {
StreamRecord<Integer> sr0 = (StreamRecord<Integer>) o1;
StreamRecord<Integer> sr1 = (StreamRecord<Integer>) o2;
if (sr0.getTimestamp() != sr1.getTimestamp()) {
return (int) (sr0.getTimestamp() - sr1.getTimestamp());
}
int comparison = sr0.getValue().compareTo(sr1.getValue());
if (comparison != 0) {
return comparison;
} else {
return sr0.getValue() - sr1.getValue();
}
}
}
} | class StreamRecordComparator implements Comparator<Object> {
@Override
public int compare(Object o1, Object o2) {
if (o1 instanceof Watermark || o2 instanceof Watermark) {
return 0;
} else {
StreamRecord<Integer> sr0 = (StreamRecord<Integer>) o1;
StreamRecord<Integer> sr1 = (StreamRecord<Integer>) o2;
if (sr0.getTimestamp() != sr1.getTimestamp()) {
return (int) (sr0.getTimestamp() - sr1.getTimestamp());
}
int comparison = sr0.getValue().compareTo(sr1.getValue());
if (comparison != 0) {
return comparison;
} else {
return sr0.getValue() - sr1.getValue();
}
}
}
} |
Lets rename exprs to indexExprs for readability | private void assignValueToArrayMapAccessExpr(BValue rValue, ArrayMapAccessExpr lExpr) {
ArrayMapAccessExpr accessExpr = lExpr;
if (!(accessExpr.getType() == BTypes.typeMap)) {
BArray arrayVal = (BArray) accessExpr.getRExpr().execute(this);
Expression[] exprs = accessExpr.getIndexExprs();
if (exprs.length > 1) {
arrayVal = retrieveArray(arrayVal, exprs);
}
BInteger indexVal = (BInteger) exprs[0].execute(this);
arrayVal.add(indexVal.intValue(), rValue);
} else {
BMap<BString, BValue> mapVal = (BMap<BString, BValue>) accessExpr.getRExpr().execute(this);
BString indexVal = (BString) accessExpr.getIndexExprs()[0].execute(this);
mapVal.put(indexVal, rValue);
}
} | Expression[] exprs = accessExpr.getIndexExprs(); | private void assignValueToArrayMapAccessExpr(BValue rValue, ArrayMapAccessExpr lExpr) {
ArrayMapAccessExpr accessExpr = lExpr;
if (!(accessExpr.getType() == BTypes.typeMap)) {
BArray arrayVal = (BArray) accessExpr.getRExpr().execute(this);
Expression[] indexExprs = accessExpr.getIndexExprs();
if (indexExprs.length > 1) {
arrayVal = retrieveArray(arrayVal, indexExprs);
}
BInteger indexVal = (BInteger) indexExprs[0].execute(this);
arrayVal.add(indexVal.intValue(), rValue);
} else {
BMap<BString, BValue> mapVal = (BMap<BString, BValue>) accessExpr.getRExpr().execute(this);
BString indexVal = (BString) accessExpr.getIndexExprs()[0].execute(this);
mapVal.put(indexVal, rValue);
}
} | class since Unary does not need BiFunction
return unaryExpr.getEvalFunc().apply(null, rValue);
}
@Override
public BValue visit(BinaryExpression binaryExpr) {
Expression rExpr = binaryExpr.getRExpr();
BValueType rValue = (BValueType) rExpr.execute(this);
Expression lExpr = binaryExpr.getLExpr();
BValueType lValue = (BValueType) lExpr.execute(this);
return binaryExpr.getEvalFunc().apply(lValue, rValue);
} | class since Unary does not need BiFunction
return unaryExpr.getEvalFunc().apply(null, rValue);
}
@Override
public BValue visit(BinaryExpression binaryExpr) {
Expression rExpr = binaryExpr.getRExpr();
BValueType rValue = (BValueType) rExpr.execute(this);
Expression lExpr = binaryExpr.getLExpr();
BValueType lValue = (BValueType) lExpr.execute(this);
return binaryExpr.getEvalFunc().apply(lValue, rValue);
} |
That sounds good. Maybe add a comment on the bug or here with a comment on what are the limitations. Previously we had a conversation about not receiving progress updates as an indicator of sdk harness failure. We could consider that as an option. | public void unregisterWorkerClient(FnApiControlClient controlClient) {
WorkCountingSdkWorkerHarness worker = workerMap.remove(controlClient);
if (worker != null) {
worker.closed.set(true);
workers.remove(worker);
}
LOG.info("Unregistered Control client {}", worker != null ? worker.getWorkerId() : null);
sdkHarnessesAreHealthy.set(false);
LOG.info("SDK harness {} became unhealthy", worker != null ? worker.getWorkerId() : null);
} | sdkHarnessesAreHealthy.set(false); | public void unregisterWorkerClient(FnApiControlClient controlClient) {
WorkCountingSdkWorkerHarness worker = workerMap.remove(controlClient);
if (worker != null) {
worker.closed.set(true);
workers.remove(worker);
}
LOG.info("Unregistered Control client {}", worker != null ? worker.getWorkerId() : null);
sdkHarnessesAreHealthy.set(false);
LOG.info("SDK harness {} became unhealthy", worker != null ? worker.getWorkerId() : null);
} | class WorkBalancingSdkHarnessRegistry implements SdkHarnessRegistry {
private static final Logger LOG =
LoggerFactory.getLogger(WorkBalancingSdkHarnessRegistry.class);
private final ApiServiceDescriptor stateApiServiceDescriptor;
private final GrpcStateService beamFnStateService;
private final BeamFnDataGrpcService beamFnDataGrpcService;
private final ConcurrentHashMap<FnApiControlClient, WorkCountingSdkWorkerHarness> workerMap =
new ConcurrentHashMap<>();
private final AtomicBoolean sdkHarnessesAreHealthy = new AtomicBoolean(true);
private final PriorityBlockingQueue<WorkCountingSdkWorkerHarness> workers =
new PriorityBlockingQueue<>(
1,
/* Prioritize the worker with least work */
Comparator.comparingInt(o -> o.assignedWorkCount.get()));
/** Create a registry for fnapi worker. */
private WorkBalancingSdkHarnessRegistry(
ApiServiceDescriptor stateApiServiceDescriptor,
GrpcStateService beamFnStateService,
BeamFnDataGrpcService beamFnDataGrpcService) {
Preconditions.checkNotNull(beamFnStateService, "StateService can not be null.");
Preconditions.checkNotNull(beamFnDataGrpcService, "DataService can not be null.");
this.stateApiServiceDescriptor = stateApiServiceDescriptor;
this.beamFnStateService = beamFnStateService;
this.beamFnDataGrpcService = beamFnDataGrpcService;
}
private boolean validateAndCleanWorker(WorkCountingSdkWorkerHarness worker) {
if (worker.closed.get()) {
workers.remove(worker);
return false;
}
return true;
}
@Override
public void registerWorkerClient(FnApiControlClient controlClient) {
Preconditions.checkNotNull(controlClient, "Control client can not be null.");
WorkCountingSdkWorkerHarness sdkWorkerHarness =
new WorkCountingSdkWorkerHarness(controlClient);
workerMap.put(controlClient, sdkWorkerHarness);
workers.add(sdkWorkerHarness);
LOG.info("Registered Control client {}", sdkWorkerHarness.getWorkerId());
}
@Override
@Override
public boolean sdkHarnessesAreHealthy() {
return sdkHarnessesAreHealthy.get();
}
/* Any modification to workers has race condition with unregisterWorkerClient. To resolve this
we recheck worker state after picking a worker and clean the worker if needed.*/
@Override
public WorkCountingSdkWorkerHarness getAvailableWorkerAndAssignWork() {
try {
WorkCountingSdkWorkerHarness worker;
do {
worker = workers.take();
worker.assignedWorkCount.incrementAndGet();
workers.add(worker);
} while (!validateAndCleanWorker(worker));
return worker;
} catch (InterruptedException e) {
LOG.error("Interrupted while waiting to get an available worker.");
return null;
}
}
@Override
public void completeWork(SdkWorkerHarness worker) {
if (!(worker instanceof WorkCountingSdkWorkerHarness)) {
throw new IllegalArgumentException(
String.format(
"Worker should be of type %s. Found worker type %s",
WorkCountingSdkWorkerHarness.class, worker.getClass()));
}
WorkCountingSdkWorkerHarness actualWorker = (WorkCountingSdkWorkerHarness) worker;
if (workers.remove(actualWorker)) {
actualWorker.assignedWorkCount.decrementAndGet();
workers.add(actualWorker);
validateAndCleanWorker(actualWorker);
}
}
@Override
@Nullable
public ApiServiceDescriptor beamFnStateApiServiceDescriptor() {
return stateApiServiceDescriptor;
}
@Override
@Nullable
public ApiServiceDescriptor beamFnDataApiServiceDescriptor() {
return beamFnDataGrpcService.getApiServiceDescriptor();
}
/** Class to keep client and associated data */
public class WorkCountingSdkWorkerHarness implements SdkWorkerHarness {
private final FnApiControlClient controlClientHandler;
private final AtomicInteger assignedWorkCount;
private final AtomicBoolean closed;
private WorkCountingSdkWorkerHarness(FnApiControlClient controlClientHandler) {
this.controlClientHandler = controlClientHandler;
this.assignedWorkCount = new AtomicInteger(0);
this.closed = new AtomicBoolean(false);
}
@Override
@Nullable
public FnApiControlClient getControlClientHandler() {
return controlClientHandler;
}
@Override
@Nullable
public String getWorkerId() {
return controlClientHandler.getWorkerId();
}
@Override
@Nullable
public GrpcFnServer<GrpcDataService> getGrpcDataFnServer() {
return GrpcFnServer.create(
beamFnDataGrpcService.getDataService(getWorkerId()), beamFnDataApiServiceDescriptor());
}
@Override
@Nullable
public GrpcFnServer<GrpcStateService> getGrpcStateFnServer() {
return GrpcFnServer.create(beamFnStateService, beamFnDataApiServiceDescriptor());
}
}
} | class WorkBalancingSdkHarnessRegistry implements SdkHarnessRegistry {
private static final Logger LOG =
LoggerFactory.getLogger(WorkBalancingSdkHarnessRegistry.class);
private final ApiServiceDescriptor stateApiServiceDescriptor;
private final GrpcStateService beamFnStateService;
private final BeamFnDataGrpcService beamFnDataGrpcService;
private final ConcurrentHashMap<FnApiControlClient, WorkCountingSdkWorkerHarness> workerMap =
new ConcurrentHashMap<>();
private final AtomicBoolean sdkHarnessesAreHealthy = new AtomicBoolean(true);
private final PriorityBlockingQueue<WorkCountingSdkWorkerHarness> workers =
new PriorityBlockingQueue<>(
1,
/* Prioritize the worker with least work */
Comparator.comparingInt(o -> o.assignedWorkCount.get()));
/** Create a registry for fnapi worker. */
private WorkBalancingSdkHarnessRegistry(
ApiServiceDescriptor stateApiServiceDescriptor,
GrpcStateService beamFnStateService,
BeamFnDataGrpcService beamFnDataGrpcService) {
Preconditions.checkNotNull(beamFnStateService, "StateService can not be null.");
Preconditions.checkNotNull(beamFnDataGrpcService, "DataService can not be null.");
this.stateApiServiceDescriptor = stateApiServiceDescriptor;
this.beamFnStateService = beamFnStateService;
this.beamFnDataGrpcService = beamFnDataGrpcService;
}
private boolean validateAndCleanWorker(WorkCountingSdkWorkerHarness worker) {
if (worker.closed.get()) {
workers.remove(worker);
return false;
}
return true;
}
@Override
public void registerWorkerClient(FnApiControlClient controlClient) {
Preconditions.checkNotNull(controlClient, "Control client can not be null.");
WorkCountingSdkWorkerHarness sdkWorkerHarness =
new WorkCountingSdkWorkerHarness(controlClient);
workerMap.put(controlClient, sdkWorkerHarness);
workers.add(sdkWorkerHarness);
LOG.info("Registered Control client {}", sdkWorkerHarness.getWorkerId());
}
@Override
@Override
public boolean sdkHarnessesAreHealthy() {
return sdkHarnessesAreHealthy.get();
}
/* Any modification to workers has race condition with unregisterWorkerClient. To resolve this
we recheck worker state after picking a worker and clean the worker if needed.*/
@Override
public WorkCountingSdkWorkerHarness getAvailableWorkerAndAssignWork() {
try {
WorkCountingSdkWorkerHarness worker;
do {
worker = workers.take();
worker.assignedWorkCount.incrementAndGet();
workers.add(worker);
} while (!validateAndCleanWorker(worker));
return worker;
} catch (InterruptedException e) {
LOG.error("Interrupted while waiting to get an available worker.");
return null;
}
}
@Override
public void completeWork(SdkWorkerHarness worker) {
if (!(worker instanceof WorkCountingSdkWorkerHarness)) {
throw new IllegalArgumentException(
String.format(
"Worker should be of type %s. Found worker type %s",
WorkCountingSdkWorkerHarness.class, worker.getClass()));
}
WorkCountingSdkWorkerHarness actualWorker = (WorkCountingSdkWorkerHarness) worker;
if (workers.remove(actualWorker)) {
actualWorker.assignedWorkCount.decrementAndGet();
workers.add(actualWorker);
validateAndCleanWorker(actualWorker);
}
}
@Override
@Nullable
public ApiServiceDescriptor beamFnStateApiServiceDescriptor() {
return stateApiServiceDescriptor;
}
@Override
@Nullable
public ApiServiceDescriptor beamFnDataApiServiceDescriptor() {
return beamFnDataGrpcService.getApiServiceDescriptor();
}
/** Class to keep client and associated data */
public class WorkCountingSdkWorkerHarness implements SdkWorkerHarness {
private final FnApiControlClient controlClientHandler;
private final AtomicInteger assignedWorkCount;
private final AtomicBoolean closed;
private WorkCountingSdkWorkerHarness(FnApiControlClient controlClientHandler) {
this.controlClientHandler = controlClientHandler;
this.assignedWorkCount = new AtomicInteger(0);
this.closed = new AtomicBoolean(false);
}
@Override
@Nullable
public FnApiControlClient getControlClientHandler() {
return controlClientHandler;
}
@Override
@Nullable
public String getWorkerId() {
return controlClientHandler.getWorkerId();
}
@Override
@Nullable
public GrpcFnServer<GrpcDataService> getGrpcDataFnServer() {
return GrpcFnServer.create(
beamFnDataGrpcService.getDataService(getWorkerId()), beamFnDataApiServiceDescriptor());
}
@Override
@Nullable
public GrpcFnServer<GrpcStateService> getGrpcStateFnServer() {
return GrpcFnServer.create(beamFnStateService, beamFnDataApiServiceDescriptor());
}
}
} |
Even if there is no clustering shouldn't we still set the event bus options? | private static VertxOptions convertToVertxOptions(VertxConfiguration conf, boolean allowClustering) {
VertxOptions options = new VertxOptions();
if (allowClustering) {
setEventBusOptions(conf, options);
initializeClusterOptions(conf, options);
}
String fileCacheDir = System.getProperty(CACHE_DIR_BASE_PROP_NAME,
System.getProperty("java.io.tmpdir", ".") + File.separator + "vertx-cache");
options.setFileSystemOptions(new FileSystemOptions()
.setFileCachingEnabled(conf.caching)
.setFileCacheDir(fileCacheDir)
.setClassPathResolvingEnabled(conf.classpathResolving));
options.setWorkerPoolSize(conf.workerPoolSize);
options.setInternalBlockingPoolSize(conf.internalBlockingPoolSize);
options.setBlockedThreadCheckInterval(conf.warningExceptionTime.toMillis());
if (conf.eventLoopsPoolSize.isPresent()) {
options.setEventLoopPoolSize(conf.eventLoopsPoolSize.getAsInt());
} else {
options.setEventLoopPoolSize(calculateDefaultIOThreads());
}
Optional<Duration> maxEventLoopExecuteTime = conf.maxEventLoopExecuteTime;
if (maxEventLoopExecuteTime.isPresent()) {
options.setMaxEventLoopExecuteTime(maxEventLoopExecuteTime.get().toMillis());
options.setMaxEventLoopExecuteTimeUnit(TimeUnit.MILLISECONDS);
}
Optional<Duration> maxWorkerExecuteTime = conf.maxWorkerExecuteTime;
if (maxWorkerExecuteTime.isPresent()) {
options.setMaxWorkerExecuteTime(maxWorkerExecuteTime.get().toMillis());
options.setMaxWorkerExecuteTimeUnit(TimeUnit.MILLISECONDS);
}
options.setWarningExceptionTime(conf.warningExceptionTime.toNanos());
return options;
} | setEventBusOptions(conf, options); | private static VertxOptions convertToVertxOptions(VertxConfiguration conf, boolean allowClustering) {
VertxOptions options = new VertxOptions();
if (allowClustering) {
setEventBusOptions(conf, options);
initializeClusterOptions(conf, options);
}
String fileCacheDir = System.getProperty(CACHE_DIR_BASE_PROP_NAME,
System.getProperty("java.io.tmpdir", ".") + File.separator + "vertx-cache");
options.setFileSystemOptions(new FileSystemOptions()
.setFileCachingEnabled(conf.caching)
.setFileCacheDir(fileCacheDir)
.setClassPathResolvingEnabled(conf.classpathResolving));
options.setWorkerPoolSize(conf.workerPoolSize);
options.setInternalBlockingPoolSize(conf.internalBlockingPoolSize);
options.setBlockedThreadCheckInterval(conf.warningExceptionTime.toMillis());
if (conf.eventLoopsPoolSize.isPresent()) {
options.setEventLoopPoolSize(conf.eventLoopsPoolSize.getAsInt());
} else {
options.setEventLoopPoolSize(calculateDefaultIOThreads());
}
Optional<Duration> maxEventLoopExecuteTime = conf.maxEventLoopExecuteTime;
if (maxEventLoopExecuteTime.isPresent()) {
options.setMaxEventLoopExecuteTime(maxEventLoopExecuteTime.get().toMillis());
options.setMaxEventLoopExecuteTimeUnit(TimeUnit.MILLISECONDS);
}
Optional<Duration> maxWorkerExecuteTime = conf.maxWorkerExecuteTime;
if (maxWorkerExecuteTime.isPresent()) {
options.setMaxWorkerExecuteTime(maxWorkerExecuteTime.get().toMillis());
options.setMaxWorkerExecuteTimeUnit(TimeUnit.MILLISECONDS);
}
options.setWarningExceptionTime(conf.warningExceptionTime.toNanos());
return options;
} | class VertxCoreRecorder {
private static final Pattern COMMA_PATTERN = Pattern.compile(",");
static volatile VertxSupplier vertx;
static volatile Vertx webVertx;
public Supplier<Vertx> configureVertx(BeanContainer container, VertxConfiguration config,
LaunchMode launchMode, ShutdownContext shutdown) {
vertx = new VertxSupplier(config);
VertxCoreProducer producer = container.instance(VertxCoreProducer.class);
producer.initialize(vertx);
if (launchMode != LaunchMode.DEVELOPMENT) {
shutdown.addLastShutdownTask(new Runnable() {
@Override
public void run() {
destroy();
}
});
}
return vertx;
}
public IOThreadDetector detector() {
return new IOThreadDetector() {
@Override
public boolean isInIOThread() {
return Context.isOnEventLoopThread();
}
};
}
public static Supplier<Vertx> getVertx() {
return vertx;
}
public static Vertx getWebVertx() {
return webVertx;
}
public RuntimeValue<Vertx> initializeWeb(VertxConfiguration conf, ShutdownContext shutdown, LaunchMode launchMode) {
initializeWeb(conf);
if (launchMode != LaunchMode.DEVELOPMENT) {
shutdown.addShutdownTask(new Runnable() {
@Override
public void run() {
destroyWeb();
}
});
}
return new RuntimeValue<>(webVertx);
}
public static void initializeWeb(VertxConfiguration conf) {
if (webVertx != null) {
} else if (conf == null) {
webVertx = Vertx.vertx();
} else {
VertxOptions options = convertToVertxOptions(conf, false);
webVertx = Vertx.vertx(options);
}
}
public static Vertx initialize(VertxConfiguration conf) {
if (conf == null) {
return Vertx.vertx();
}
VertxOptions options = convertToVertxOptions(conf, true);
if (!conf.useAsyncDNS) {
System.setProperty("vertx.disableDnsResolver", "true");
}
if (options.getEventBusOptions().isClustered()) {
CompletableFuture<Vertx> latch = new CompletableFuture<>();
Vertx.clusteredVertx(options, ar -> {
if (ar.failed()) {
latch.completeExceptionally(ar.cause());
} else {
latch.complete(ar.result());
}
});
return latch.join();
} else {
return Vertx.vertx(options);
}
}
private static int calculateDefaultIOThreads() {
int recommended = ProcessorInfo.availableProcessors() * 2;
long mem = Runtime.getRuntime().maxMemory();
long memInMb = mem / (1024 * 1024);
long maxAllowed = memInMb / 10;
return (int) Math.max(2, Math.min(maxAllowed, recommended));
}
void destroy() {
if (vertx != null && vertx.v != null) {
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<Throwable> problem = new AtomicReference<>();
vertx.v.close(ar -> {
if (ar.failed()) {
problem.set(ar.cause());
}
latch.countDown();
});
try {
latch.await();
if (problem.get() != null) {
throw new IllegalStateException("Error when closing Vert.x instance", problem.get());
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IllegalStateException("Interrupted when closing Vert.x instance", e);
}
vertx = null;
}
}
void destroyWeb() {
if (webVertx != null) {
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<Throwable> problem = new AtomicReference<>();
webVertx.close(ar -> {
if (ar.failed()) {
problem.set(ar.cause());
}
latch.countDown();
});
try {
latch.await();
if (problem.get() != null) {
throw new IllegalStateException("Error when closing Vert.x instance", problem.get());
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IllegalStateException("Interrupted when closing Vert.x instance", e);
}
webVertx = null;
}
}
private static void initializeClusterOptions(VertxConfiguration conf, VertxOptions options) {
ClusterConfiguration cluster = conf.cluster;
options.getEventBusOptions().setClustered(cluster.clustered);
options.getEventBusOptions().setClusterPingReplyInterval(cluster.pingReplyInterval.toMillis());
options.getEventBusOptions().setClusterPingInterval(cluster.pingInterval.toMillis());
if (cluster.host != null) {
options.getEventBusOptions().setHost(cluster.host);
}
if (cluster.port.isPresent()) {
options.getEventBusOptions().setPort(cluster.port.getAsInt());
}
cluster.publicHost.ifPresent(options.getEventBusOptions()::setClusterPublicHost);
if (cluster.publicPort.isPresent()) {
options.getEventBusOptions().setPort(cluster.publicPort.getAsInt());
}
}
private static void setEventBusOptions(VertxConfiguration conf, VertxOptions options) {
EventBusConfiguration eb = conf.eventbus;
EventBusOptions opts = new EventBusOptions();
opts.setAcceptBacklog(eb.acceptBacklog.orElse(-1));
opts.setClientAuth(ClientAuth.valueOf(eb.clientAuth.toUpperCase()));
opts.setConnectTimeout((int) (Math.min(Integer.MAX_VALUE, eb.connectTimeout.toMillis())));
opts.setIdleTimeout(
eb.idleTimeout.isPresent() ? (int) Math.max(1, Math.min(Integer.MAX_VALUE, eb.idleTimeout.get().getSeconds()))
: 0);
opts.setSendBufferSize(eb.sendBufferSize.orElse(-1));
opts.setSoLinger(eb.soLinger.orElse(-1));
opts.setSsl(eb.ssl);
opts.setReceiveBufferSize(eb.receiveBufferSize.orElse(-1));
opts.setReconnectAttempts(eb.reconnectAttempts);
opts.setReconnectInterval(eb.reconnectInterval.toMillis());
opts.setReuseAddress(eb.reuseAddress);
opts.setReusePort(eb.reusePort);
opts.setTrafficClass(eb.trafficClass.orElse(-1));
opts.setTcpKeepAlive(eb.tcpKeepAlive);
opts.setTcpNoDelay(eb.tcpNoDelay);
opts.setTrustAll(eb.trustAll);
if (eb.keyCertificatePem != null) {
List<String> certs = new ArrayList<>();
List<String> keys = new ArrayList<>();
eb.keyCertificatePem.certs.ifPresent(
s -> certs.addAll(COMMA_PATTERN.splitAsStream(s).map(String::trim).collect(Collectors.toList())));
eb.keyCertificatePem.keys.ifPresent(
s -> keys.addAll(COMMA_PATTERN.splitAsStream(s).map(String::trim).collect(Collectors.toList())));
PemKeyCertOptions o = new PemKeyCertOptions()
.setCertPaths(certs)
.setKeyPaths(keys);
opts.setPemKeyCertOptions(o);
}
if (eb.keyCertificateJks != null) {
JksOptions o = new JksOptions();
eb.keyCertificateJks.path.ifPresent(o::setPath);
eb.keyCertificateJks.password.ifPresent(o::setPassword);
opts.setKeyStoreOptions(o);
}
if (eb.keyCertificatePfx != null) {
PfxOptions o = new PfxOptions();
eb.keyCertificatePfx.path.ifPresent(o::setPath);
eb.keyCertificatePfx.password.ifPresent(o::setPassword);
opts.setPfxKeyCertOptions(o);
}
if (eb.trustCertificatePem != null) {
eb.trustCertificatePem.certs.ifPresent(s -> {
PemTrustOptions o = new PemTrustOptions();
COMMA_PATTERN.splitAsStream(s).map(String::trim).forEach(o::addCertPath);
opts.setPemTrustOptions(o);
});
}
if (eb.trustCertificateJks != null) {
JksOptions o = new JksOptions();
eb.trustCertificateJks.path.ifPresent(o::setPath);
eb.trustCertificateJks.password.ifPresent(o::setPassword);
opts.setTrustStoreOptions(o);
}
if (eb.trustCertificatePfx != null) {
PfxOptions o = new PfxOptions();
eb.trustCertificatePfx.path.ifPresent(o::setPath);
eb.trustCertificatePfx.password.ifPresent(o::setPassword);
opts.setPfxTrustOptions(o);
}
options.setEventBusOptions(opts);
}
public Supplier<EventLoopGroup> bossSupplier() {
return new Supplier<EventLoopGroup>() {
@Override
public EventLoopGroup get() {
return ((VertxImpl) vertx.get()).getAcceptorEventLoopGroup();
}
};
}
public Supplier<EventLoopGroup> mainSupplier() {
return new Supplier<EventLoopGroup>() {
@Override
public EventLoopGroup get() {
return vertx.get().nettyEventLoopGroup();
}
};
}
public Supplier<Integer> calculateEventLoopThreads(VertxConfiguration conf) {
int threads;
if (conf.eventLoopsPoolSize.isPresent()) {
threads = conf.eventLoopsPoolSize.getAsInt();
} else {
threads = calculateDefaultIOThreads();
}
return new Supplier<Integer>() {
@Override
public Integer get() {
return threads;
}
};
}
static class VertxSupplier implements Supplier<Vertx> {
final VertxConfiguration config;
Vertx v;
VertxSupplier(VertxConfiguration config) {
this.config = config;
}
@Override
public synchronized Vertx get() {
if (v == null) {
v = initialize(config);
}
return v;
}
}
} | class VertxCoreRecorder {
private static final Pattern COMMA_PATTERN = Pattern.compile(",");
static volatile VertxSupplier vertx;
static volatile Vertx webVertx;
public Supplier<Vertx> configureVertx(BeanContainer container, VertxConfiguration config,
LaunchMode launchMode, ShutdownContext shutdown) {
vertx = new VertxSupplier(config);
VertxCoreProducer producer = container.instance(VertxCoreProducer.class);
producer.initialize(vertx);
if (launchMode != LaunchMode.DEVELOPMENT) {
shutdown.addLastShutdownTask(new Runnable() {
@Override
public void run() {
destroy();
}
});
}
return vertx;
}
public IOThreadDetector detector() {
return new IOThreadDetector() {
@Override
public boolean isInIOThread() {
return Context.isOnEventLoopThread();
}
};
}
public static Supplier<Vertx> getVertx() {
return vertx;
}
public static Vertx getWebVertx() {
return webVertx;
}
public RuntimeValue<Vertx> initializeWeb(VertxConfiguration conf, ShutdownContext shutdown, LaunchMode launchMode) {
initializeWeb(conf);
if (launchMode != LaunchMode.DEVELOPMENT) {
shutdown.addShutdownTask(new Runnable() {
@Override
public void run() {
destroyWeb();
}
});
}
return new RuntimeValue<>(webVertx);
}
public static void initializeWeb(VertxConfiguration conf) {
if (webVertx != null) {
} else if (conf == null) {
webVertx = Vertx.vertx();
} else {
VertxOptions options = convertToVertxOptions(conf, false);
webVertx = Vertx.vertx(options);
}
}
public static Vertx initialize(VertxConfiguration conf) {
if (conf == null) {
return Vertx.vertx();
}
VertxOptions options = convertToVertxOptions(conf, true);
if (!conf.useAsyncDNS) {
System.setProperty("vertx.disableDnsResolver", "true");
}
if (options.getEventBusOptions().isClustered()) {
CompletableFuture<Vertx> latch = new CompletableFuture<>();
Vertx.clusteredVertx(options, ar -> {
if (ar.failed()) {
latch.completeExceptionally(ar.cause());
} else {
latch.complete(ar.result());
}
});
return latch.join();
} else {
return Vertx.vertx(options);
}
}
private static int calculateDefaultIOThreads() {
int recommended = ProcessorInfo.availableProcessors() * 2;
long mem = Runtime.getRuntime().maxMemory();
long memInMb = mem / (1024 * 1024);
long maxAllowed = memInMb / 10;
return (int) Math.max(2, Math.min(maxAllowed, recommended));
}
void destroy() {
if (vertx != null && vertx.v != null) {
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<Throwable> problem = new AtomicReference<>();
vertx.v.close(ar -> {
if (ar.failed()) {
problem.set(ar.cause());
}
latch.countDown();
});
try {
latch.await();
if (problem.get() != null) {
throw new IllegalStateException("Error when closing Vert.x instance", problem.get());
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IllegalStateException("Interrupted when closing Vert.x instance", e);
}
vertx = null;
}
}
void destroyWeb() {
if (webVertx != null) {
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<Throwable> problem = new AtomicReference<>();
webVertx.close(ar -> {
if (ar.failed()) {
problem.set(ar.cause());
}
latch.countDown();
});
try {
latch.await();
if (problem.get() != null) {
throw new IllegalStateException("Error when closing Vert.x instance", problem.get());
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IllegalStateException("Interrupted when closing Vert.x instance", e);
}
webVertx = null;
}
}
private static void initializeClusterOptions(VertxConfiguration conf, VertxOptions options) {
ClusterConfiguration cluster = conf.cluster;
options.getEventBusOptions().setClustered(cluster.clustered);
options.getEventBusOptions().setClusterPingReplyInterval(cluster.pingReplyInterval.toMillis());
options.getEventBusOptions().setClusterPingInterval(cluster.pingInterval.toMillis());
if (cluster.host != null) {
options.getEventBusOptions().setHost(cluster.host);
}
if (cluster.port.isPresent()) {
options.getEventBusOptions().setPort(cluster.port.getAsInt());
}
cluster.publicHost.ifPresent(options.getEventBusOptions()::setClusterPublicHost);
if (cluster.publicPort.isPresent()) {
options.getEventBusOptions().setPort(cluster.publicPort.getAsInt());
}
}
private static void setEventBusOptions(VertxConfiguration conf, VertxOptions options) {
EventBusConfiguration eb = conf.eventbus;
EventBusOptions opts = new EventBusOptions();
opts.setAcceptBacklog(eb.acceptBacklog.orElse(-1));
opts.setClientAuth(ClientAuth.valueOf(eb.clientAuth.toUpperCase()));
opts.setConnectTimeout((int) (Math.min(Integer.MAX_VALUE, eb.connectTimeout.toMillis())));
opts.setIdleTimeout(
eb.idleTimeout.isPresent() ? (int) Math.max(1, Math.min(Integer.MAX_VALUE, eb.idleTimeout.get().getSeconds()))
: 0);
opts.setSendBufferSize(eb.sendBufferSize.orElse(-1));
opts.setSoLinger(eb.soLinger.orElse(-1));
opts.setSsl(eb.ssl);
opts.setReceiveBufferSize(eb.receiveBufferSize.orElse(-1));
opts.setReconnectAttempts(eb.reconnectAttempts);
opts.setReconnectInterval(eb.reconnectInterval.toMillis());
opts.setReuseAddress(eb.reuseAddress);
opts.setReusePort(eb.reusePort);
opts.setTrafficClass(eb.trafficClass.orElse(-1));
opts.setTcpKeepAlive(eb.tcpKeepAlive);
opts.setTcpNoDelay(eb.tcpNoDelay);
opts.setTrustAll(eb.trustAll);
if (eb.keyCertificatePem != null) {
List<String> certs = new ArrayList<>();
List<String> keys = new ArrayList<>();
eb.keyCertificatePem.certs.ifPresent(
s -> certs.addAll(COMMA_PATTERN.splitAsStream(s).map(String::trim).collect(Collectors.toList())));
eb.keyCertificatePem.keys.ifPresent(
s -> keys.addAll(COMMA_PATTERN.splitAsStream(s).map(String::trim).collect(Collectors.toList())));
PemKeyCertOptions o = new PemKeyCertOptions()
.setCertPaths(certs)
.setKeyPaths(keys);
opts.setPemKeyCertOptions(o);
}
if (eb.keyCertificateJks != null) {
JksOptions o = new JksOptions();
eb.keyCertificateJks.path.ifPresent(o::setPath);
eb.keyCertificateJks.password.ifPresent(o::setPassword);
opts.setKeyStoreOptions(o);
}
if (eb.keyCertificatePfx != null) {
PfxOptions o = new PfxOptions();
eb.keyCertificatePfx.path.ifPresent(o::setPath);
eb.keyCertificatePfx.password.ifPresent(o::setPassword);
opts.setPfxKeyCertOptions(o);
}
if (eb.trustCertificatePem != null) {
eb.trustCertificatePem.certs.ifPresent(s -> {
PemTrustOptions o = new PemTrustOptions();
COMMA_PATTERN.splitAsStream(s).map(String::trim).forEach(o::addCertPath);
opts.setPemTrustOptions(o);
});
}
if (eb.trustCertificateJks != null) {
JksOptions o = new JksOptions();
eb.trustCertificateJks.path.ifPresent(o::setPath);
eb.trustCertificateJks.password.ifPresent(o::setPassword);
opts.setTrustStoreOptions(o);
}
if (eb.trustCertificatePfx != null) {
PfxOptions o = new PfxOptions();
eb.trustCertificatePfx.path.ifPresent(o::setPath);
eb.trustCertificatePfx.password.ifPresent(o::setPassword);
opts.setPfxTrustOptions(o);
}
options.setEventBusOptions(opts);
}
public Supplier<EventLoopGroup> bossSupplier() {
return new Supplier<EventLoopGroup>() {
@Override
public EventLoopGroup get() {
return ((VertxImpl) vertx.get()).getAcceptorEventLoopGroup();
}
};
}
public Supplier<EventLoopGroup> mainSupplier() {
return new Supplier<EventLoopGroup>() {
@Override
public EventLoopGroup get() {
return vertx.get().nettyEventLoopGroup();
}
};
}
public Supplier<Integer> calculateEventLoopThreads(VertxConfiguration conf) {
int threads;
if (conf.eventLoopsPoolSize.isPresent()) {
threads = conf.eventLoopsPoolSize.getAsInt();
} else {
threads = calculateDefaultIOThreads();
}
return new Supplier<Integer>() {
@Override
public Integer get() {
return threads;
}
};
}
static class VertxSupplier implements Supplier<Vertx> {
final VertxConfiguration config;
Vertx v;
VertxSupplier(VertxConfiguration config) {
this.config = config;
}
@Override
public synchronized Vertx get() {
if (v == null) {
v = initialize(config);
}
return v;
}
}
} |
+1 changed it to emit results in processElement as well. Could not measure a noticeable difference in my test though. | public void finishBundle() {
try {
remoteBundle.close();
} catch (Exception e) {
throw new RuntimeException(e);
}
KV<String, OutputT> result;
while ((result = outputQueue.poll()) != null) {
outputManager.output(outputMap.get(result.getKey()), (WindowedValue) result.getValue());
}
} | public void finishBundle() {
try {
remoteBundle.close();
emitResults();
} catch (Exception e) {
throw new RuntimeException("Failed to finish remote bundle", e);
}
} | class SdkHarnessDoFnRunner implements DoFnRunner<InputT, OutputT> {
@Override
public void startBundle() {
checkState(
stageBundleFactory != null, "%s not yet prepared", StageBundleFactory.class.getName());
checkState(
stateRequestHandler != null, "%s not yet prepared", StateRequestHandler.class.getName());
OutputReceiverFactory receiverFactory =
new OutputReceiverFactory() {
@Override
public FnDataReceiver<OutputT> create(String pCollectionId) {
return (receivedElement) -> {
outputQueue.put(KV.of(pCollectionId, receivedElement));
};
}
};
try {
remoteBundle =
stageBundleFactory.getBundle(receiverFactory, stateRequestHandler, progressHandler);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public void processElement(WindowedValue<InputT> element) {
try {
checkState(remoteBundle != null, "%s not yet prepared", RemoteBundle.class.getName());
LOG.debug(String.format("Sending value: %s", element));
Iterables.getOnlyElement(remoteBundle.getInputReceivers().values()).accept(element);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public void onTimer(
String timerId, BoundedWindow window, Instant timestamp, TimeDomain timeDomain) {}
@Override
@Override
public DoFn<InputT, OutputT> getFn() {
throw new UnsupportedOperationException();
}
} | class SdkHarnessDoFnRunner implements DoFnRunner<InputT, OutputT> {
@Override
public void startBundle() {
checkState(
stageBundleFactory != null, "%s not yet prepared", StageBundleFactory.class.getName());
checkState(
stateRequestHandler != null, "%s not yet prepared", StateRequestHandler.class.getName());
OutputReceiverFactory receiverFactory =
new OutputReceiverFactory() {
@Override
public FnDataReceiver<OutputT> create(String pCollectionId) {
return (receivedElement) -> {
outputQueue.put(KV.of(pCollectionId, receivedElement));
};
}
};
try {
remoteBundle =
stageBundleFactory.getBundle(receiverFactory, stateRequestHandler, progressHandler);
} catch (Exception e) {
throw new RuntimeException("Failed to start remote bundle", e);
}
}
@Override
public void processElement(WindowedValue<InputT> element) {
checkState(remoteBundle != null, "%s not yet prepared", RemoteBundle.class.getName());
try {
LOG.debug(String.format("Sending value: %s", element));
Iterables.getOnlyElement(remoteBundle.getInputReceivers().values()).accept(element);
} catch (Exception e) {
throw new RuntimeException("Failed to process element with SDK harness.", e);
}
emitResults();
}
@Override
public void onTimer(
String timerId, BoundedWindow window, Instant timestamp, TimeDomain timeDomain) {}
@Override
private void emitResults() {
KV<String, OutputT> result;
while ((result = outputQueue.poll()) != null) {
outputManager.output(outputMap.get(result.getKey()), (WindowedValue) result.getValue());
}
}
@Override
public DoFn<InputT, OutputT> getFn() {
throw new UnsupportedOperationException();
}
} | |
Can't we return here? Or we have to unnecessarily check the tag in L3701 and return? | private void addAsRecordTypeDefinition(BType type, Location pos) {
if (type.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
addAsRecordTypeDefinition(memberType, pos);
}
}
if (type.tag != TypeTags.RECORD) {
return;
}
BRecordType recordType = (BRecordType) type;
if (isRecordTypeDefExist(recordType.tsymbol, env)) {
return;
}
BLangRecordTypeNode recordTypeNode = new BLangRecordTypeNode();
recordTypeNode.pos = pos;
recordTypeNode.type = recordType;
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (BField field : recordType.fields.values()) {
typeDefFields.add(ASTBuilderUtil.createVariable(field.pos, field.name.value, field.type, null,
field.symbol));
}
recordTypeNode.fields = typeDefFields;
recordTypeNode.symbol = recordType.tsymbol;
recordTypeNode.isAnonymous = true;
recordTypeNode.isLocal = true;
recordTypeNode.type.tsymbol.scope = new Scope(recordTypeNode.type.tsymbol);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.addTypeDefinition(recordType, recordType.tsymbol, recordTypeNode, env);
} | BRecordType recordType = (BRecordType) type; | private void addAsRecordTypeDefinition(BType type, Location pos) {
if (type.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
addAsRecordTypeDefinition(memberType, pos);
}
return;
}
if (type.tag != TypeTags.RECORD) {
return;
}
BRecordType recordType = (BRecordType) type;
if (isRecordTypeDefExist(recordType.tsymbol, env)) {
return;
}
BLangRecordTypeNode recordTypeNode = new BLangRecordTypeNode();
recordTypeNode.pos = pos;
recordTypeNode.type = recordType;
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (BField field : recordType.fields.values()) {
typeDefFields.add(ASTBuilderUtil.createVariable(field.pos, field.name.value, field.type, null,
field.symbol));
}
recordTypeNode.fields = typeDefFields;
recordTypeNode.symbol = recordType.tsymbol;
recordTypeNode.isAnonymous = true;
recordTypeNode.isLocal = true;
recordTypeNode.type.tsymbol.scope = new Scope(recordTypeNode.type.tsymbol);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.addTypeDefinition(recordType, recordType.tsymbol, recordTypeNode, env);
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} |
This anonymous class is used twice, can you extract it as a private static class and remove the duplication? | public void testWriteQueue() throws Exception {
final int maxNumRecords = 1000;
List<RabbitMqMessage> data =
IntStream.range(0, maxNumRecords)
.mapToObj(i -> new RabbitMqMessage(("Test " + i).getBytes(StandardCharsets.UTF_8)))
.collect(Collectors.toList());
p.apply(Create.of(data))
.apply(
RabbitMqIO.write().withUri("amqp:
final List<String> received = new ArrayList<>();
ConnectionFactory connectionFactory = new ConnectionFactory();
connectionFactory.setUri("amqp:
Connection connection = connectionFactory.newConnection();
Channel channel = connection.createChannel();
channel.queueDeclare("TEST", true, false, false, null);
Consumer consumer =
new DefaultConsumer(channel) {
@Override
public void handleDelivery(
String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body)
throws IOException {
String message = new String(body, "UTF-8");
received.add(message);
}
};
channel.basicConsume("TEST", true, consumer);
p.run();
while (received.size() < maxNumRecords) {
Thread.sleep(500);
}
assertEquals(maxNumRecords, received.size());
for (int i = 0; i < maxNumRecords; i++) {
assertTrue(received.contains("Test " + i));
}
channel.close();
connection.close();
} | new DefaultConsumer(channel) { | public void testWriteQueue() throws Exception {
final int maxNumRecords = 1000;
List<RabbitMqMessage> data =
generateRecords(maxNumRecords)
.stream()
.map(bytes -> new RabbitMqMessage(bytes))
.collect(Collectors.toList());
p.apply(Create.of(data))
.apply(
RabbitMqIO.write().withUri("amqp:
final List<String> received = new ArrayList<>();
ConnectionFactory connectionFactory = new ConnectionFactory();
connectionFactory.setUri("amqp:
Connection connection = null;
Channel channel = null;
try {
connection = connectionFactory.newConnection();
channel = connection.createChannel();
channel.queueDeclare("TEST", true, false, false, null);
Consumer consumer = new TestConsumer(channel, received);
channel.basicConsume("TEST", true, consumer);
p.run();
while (received.size() < maxNumRecords) {
Thread.sleep(500);
}
assertEquals(maxNumRecords, received.size());
for (int i = 0; i < maxNumRecords; i++) {
assertTrue(received.contains("Test " + i));
}
} finally {
if (channel != null) {
channel.close();
}
if (connection != null) {
connection.close();
}
}
} | class RabbitMqIOTest implements Serializable {
private static final Logger LOG = LoggerFactory.getLogger(RabbitMqIOTest.class);
private static int port;
@ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder();
@Rule public transient TestPipeline p = TestPipeline.create();
private static transient Broker broker;
@BeforeClass
public static void startBroker() throws Exception {
try (ServerSocket serverSocket = new ServerSocket(0)) {
port = serverSocket.getLocalPort();
}
System.setProperty("derby.stream.error.field", "MyApp.DEV_NULL");
broker = new Broker();
BrokerOptions options = new BrokerOptions();
options.setConfigProperty(BrokerOptions.QPID_AMQP_PORT, String.valueOf(port));
options.setConfigProperty(BrokerOptions.QPID_WORK_DIR, temporaryFolder.newFolder().toString());
options.setConfigProperty(BrokerOptions.QPID_HOME_DIR, "src/test/qpid");
broker.startup(options);
}
@AfterClass
public static void stopBroker() {
broker.shutdown();
}
@Test
public void testReadQueue() throws Exception {
final int maxNumRecords = 10;
PCollection<RabbitMqMessage> raw =
p.apply(
RabbitMqIO.read()
.withUri("amqp:
.withQueue("READ")
.withMaxNumRecords(maxNumRecords));
PCollection<byte[]> output = raw.apply(ParDo.of(new ConverterFn()));
List<byte[]> records = generateRecords(maxNumRecords);
PAssert.that(output).containsInAnyOrder(records);
ConnectionFactory connectionFactory = new ConnectionFactory();
connectionFactory.setUri("amqp:
Connection connection = connectionFactory.newConnection();
Channel channel = connection.createChannel();
channel.queueDeclare("READ", false, false, false, null);
for (byte[] record : records) {
channel.basicPublish("", "READ", null, record);
}
p.run();
channel.close();
connection.close();
}
@Test(timeout = 60 * 1000)
public void testReadExchange() throws Exception {
final int maxNumRecords = 10;
PCollection<RabbitMqMessage> raw =
p.apply(
RabbitMqIO.read()
.withUri("amqp:
.withExchange("READEXCHANGE", "fanout", "test")
.withMaxNumRecords(maxNumRecords));
PCollection<byte[]> output = raw.apply(ParDo.of(new ConverterFn()));
List<byte[]> records = generateRecords(maxNumRecords);
PAssert.that(output).containsInAnyOrder(records);
ConnectionFactory connectionFactory = new ConnectionFactory();
connectionFactory.setUri("amqp:
Connection connection = connectionFactory.newConnection();
final Channel channel = connection.createChannel();
channel.exchangeDeclare("READEXCHANGE", "fanout");
Thread publisher =
new Thread(
() -> {
try {
Thread.sleep(5000);
} catch (Exception e) {
LOG.error(e.getMessage(), e);
}
for (int i = 0; i < 10; i++) {
try {
channel.basicPublish(
"READEXCHANGE", "test", null, ("Test " + i).getBytes(StandardCharsets.UTF_8));
} catch (Exception e) {
LOG.error(e.getMessage(), e);
}
}
});
publisher.start();
p.run();
publisher.join();
channel.close();
connection.close();
}
@Test
@Test
public void testWriteExchange() throws Exception {
final int maxNumRecords = 1000;
List<RabbitMqMessage> data =
IntStream.range(0, maxNumRecords)
.mapToObj(i -> new RabbitMqMessage(("Test " + i).getBytes(StandardCharsets.UTF_8)))
.collect(Collectors.toList());
p.apply(Create.of(data))
.apply(
RabbitMqIO.write()
.withUri("amqp:
.withExchange("WRITE", "fanout"));
final List<String> received = new ArrayList<>();
ConnectionFactory connectionFactory = new ConnectionFactory();
connectionFactory.setUri("amqp:
Connection connection = connectionFactory.newConnection();
Channel channel = connection.createChannel();
channel.exchangeDeclare("WRITE", "fanout");
String queueName = channel.queueDeclare().getQueue();
channel.queueBind(queueName, "WRITE", "");
Consumer consumer =
new DefaultConsumer(channel) {
@Override
public void handleDelivery(
String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body)
throws IOException {
String message = new String(body, "UTF-8");
received.add(message);
}
};
channel.basicConsume(queueName, true, consumer);
p.run();
while (received.size() < maxNumRecords) {
Thread.sleep(500);
}
assertEquals(maxNumRecords, received.size());
for (int i = 0; i < maxNumRecords; i++) {
assertTrue(received.contains("Test " + i));
}
channel.close();
connection.close();
}
private static class ConverterFn extends DoFn<RabbitMqMessage, byte[]> {
ConverterFn() {}
@ProcessElement
public void processElement(ProcessContext c) {
RabbitMqMessage message = c.element();
c.output(message.getBody());
}
}
private static List<byte[]> generateRecords(int maxNumRecords) {
return IntStream.range(0, maxNumRecords)
.mapToObj(i -> ("Test " + i).getBytes(StandardCharsets.UTF_8))
.collect(Collectors.toList());
}
} | class RabbitMqIOTest implements Serializable {
private static final Logger LOG = LoggerFactory.getLogger(RabbitMqIOTest.class);
private static int port;
@ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder();
@Rule public transient TestPipeline p = TestPipeline.create();
private static transient Broker broker;
@BeforeClass
public static void startBroker() throws Exception {
try (ServerSocket serverSocket = new ServerSocket(0)) {
port = serverSocket.getLocalPort();
}
System.setProperty("derby.stream.error.field", "MyApp.DEV_NULL");
broker = new Broker();
BrokerOptions options = new BrokerOptions();
options.setConfigProperty(BrokerOptions.QPID_AMQP_PORT, String.valueOf(port));
options.setConfigProperty(BrokerOptions.QPID_WORK_DIR, temporaryFolder.newFolder().toString());
options.setConfigProperty(BrokerOptions.QPID_HOME_DIR, "src/test/qpid");
broker.startup(options);
}
@AfterClass
public static void stopBroker() {
broker.shutdown();
}
@Test
public void testReadQueue() throws Exception {
final int maxNumRecords = 10;
PCollection<RabbitMqMessage> raw =
p.apply(
RabbitMqIO.read()
.withUri("amqp:
.withQueue("READ")
.withMaxNumRecords(maxNumRecords));
PCollection<String> output =
raw.apply(
MapElements.into(TypeDescriptors.strings())
.via(
(RabbitMqMessage message) ->
new String(message.getBody(), StandardCharsets.UTF_8)));
List<String> records =
generateRecords(maxNumRecords)
.stream()
.map(record -> new String(record, StandardCharsets.UTF_8))
.collect(Collectors.toList());
PAssert.that(output).containsInAnyOrder(records);
ConnectionFactory connectionFactory = new ConnectionFactory();
connectionFactory.setUri("amqp:
Connection connection = null;
Channel channel = null;
try {
connection = connectionFactory.newConnection();
channel = connection.createChannel();
channel.queueDeclare("READ", false, false, false, null);
for (String record : records) {
channel.basicPublish("", "READ", null, record.getBytes(StandardCharsets.UTF_8));
}
p.run();
} finally {
if (channel != null) {
channel.close();
}
if (connection != null) {
connection.close();
}
}
}
@Test(timeout = 60 * 1000)
public void testReadExchange() throws Exception {
final int maxNumRecords = 10;
PCollection<RabbitMqMessage> raw =
p.apply(
RabbitMqIO.read()
.withUri("amqp:
.withExchange("READEXCHANGE", "fanout", "test")
.withMaxNumRecords(maxNumRecords));
PCollection<String> output =
raw.apply(
MapElements.into(TypeDescriptors.strings())
.via(
(RabbitMqMessage message) ->
new String(message.getBody(), StandardCharsets.UTF_8)));
List<String> records =
generateRecords(maxNumRecords)
.stream()
.map(record -> new String(record, StandardCharsets.UTF_8))
.collect(Collectors.toList());
PAssert.that(output).containsInAnyOrder(records);
ConnectionFactory connectionFactory = new ConnectionFactory();
connectionFactory.setUri("amqp:
Connection connection = null;
Channel channel = null;
try {
connection = connectionFactory.newConnection();
channel = connection.createChannel();
channel.exchangeDeclare("READEXCHANGE", "fanout");
Channel finalChannel = channel;
Thread publisher =
new Thread(
() -> {
try {
Thread.sleep(5000);
} catch (Exception e) {
LOG.error(e.getMessage(), e);
}
for (int i = 0; i < maxNumRecords; i++) {
try {
finalChannel.basicPublish(
"READEXCHANGE",
"test",
null,
("Test " + i).getBytes(StandardCharsets.UTF_8));
} catch (Exception e) {
LOG.error(e.getMessage(), e);
}
}
});
publisher.start();
p.run();
publisher.join();
} finally {
if (channel != null) {
channel.close();
}
if (connection != null) {
connection.close();
}
}
}
@Test
@Test
public void testWriteExchange() throws Exception {
final int maxNumRecords = 1000;
List<RabbitMqMessage> data =
generateRecords(maxNumRecords)
.stream()
.map(bytes -> new RabbitMqMessage(bytes))
.collect(Collectors.toList());
p.apply(Create.of(data))
.apply(
RabbitMqIO.write()
.withUri("amqp:
.withExchange("WRITE", "fanout"));
final List<String> received = new ArrayList<>();
ConnectionFactory connectionFactory = new ConnectionFactory();
connectionFactory.setUri("amqp:
Connection connection = null;
Channel channel = null;
try {
connection = connectionFactory.newConnection();
channel = connection.createChannel();
channel.exchangeDeclare("WRITE", "fanout");
String queueName = channel.queueDeclare().getQueue();
channel.queueBind(queueName, "WRITE", "");
Consumer consumer = new TestConsumer(channel, received);
channel.basicConsume(queueName, true, consumer);
p.run();
while (received.size() < maxNumRecords) {
Thread.sleep(500);
}
assertEquals(maxNumRecords, received.size());
for (int i = 0; i < maxNumRecords; i++) {
assertTrue(received.contains("Test " + i));
}
} finally {
if (channel != null) {
channel.close();
}
if (connection != null) {
connection.close();
}
}
}
private static List<byte[]> generateRecords(int maxNumRecords) {
return IntStream.range(0, maxNumRecords)
.mapToObj(i -> ("Test " + i).getBytes(StandardCharsets.UTF_8))
.collect(Collectors.toList());
}
private static class TestConsumer extends DefaultConsumer {
private final List<String> received;
public TestConsumer(Channel channel, List<String> received) {
super(channel);
this.received = received;
}
@Override
public void handleDelivery(
String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body)
throws IOException {
String message = new String(body, "UTF-8");
received.add(message);
}
}
} |
run `./gradlew spotlessApply` to fix style issues. | public void testAnyValueFunction() throws Exception {
pipeline.enableAbandonedNodeEnforcement(false);
Schema schema =
Schema.builder().addInt32Field("key").addInt32Field("col").build();
PCollection<Row> inputRows =
pipeline
.apply(
Create.of(
TestUtils.rowsBuilderOf(schema)
.addRows(
0, 1,
0, 2,
1, 3,
2, 4,
2, 5)
.getRows()))
.setRowSchema(schema);
String sql = "SELECT key, any_value(col) as any_value FROM PCOLLECTION GROUP BY key";
PCollection<Row> result = inputRows.apply("sql", SqlTransform.query(sql));
Map<Integer, List<Integer>> allowedTuples = new HashMap<>();
allowedTuples.put(0, Arrays.asList(1,2));
allowedTuples.put(1, Arrays.asList(3));
allowedTuples.put(2, Arrays.asList(4,5));
PAssert.that(result).satisfies(input -> {
Iterator<Row> iter = input.iterator();
while (iter.hasNext()){
Row row = iter.next();
List<Schema.Field> fields = row.getSchema().getFields();
List<Integer> values= allowedTuples.remove(row.getInt32("key"));
assertTrue(values !=null);
assertTrue(values.contains(row.getInt32("any_value")));
}
assertTrue(allowedTuples.isEmpty());
return null;
});
pipeline.run();
} | TestUtils.rowsBuilderOf(schema) | public void testAnyValueFunction() throws Exception {
pipeline.enableAbandonedNodeEnforcement(false);
Schema schema = Schema.builder().addInt32Field("key").addInt32Field("col").build();
PCollection<Row> inputRows =
pipeline
.apply(
Create.of(
TestUtils.rowsBuilderOf(schema)
.addRows(
0, 1,
0, 2,
1, 3,
2, 4,
2, 5)
.getRows()))
.setRowSchema(schema);
String sql = "SELECT key, any_value(col) as any_value FROM PCOLLECTION GROUP BY key";
PCollection<Row> result = inputRows.apply("sql", SqlTransform.query(sql));
Map<Integer, List<Integer>> allowedTuples = new HashMap<>();
allowedTuples.put(0, Arrays.asList(1, 2));
allowedTuples.put(1, Arrays.asList(3));
allowedTuples.put(2, Arrays.asList(4, 5));
PAssert.that(result)
.satisfies(
input -> {
Iterator<Row> iter = input.iterator();
while (iter.hasNext()) {
Row row = iter.next();
List<Integer> values = allowedTuples.remove(row.getInt32("key"));
assertTrue(values != null);
assertTrue(values.contains(row.getInt32("any_value")));
}
assertTrue(allowedTuples.isEmpty());
return null;
});
pipeline.run();
} | class BeamSqlDslAggregationTest extends BeamSqlDslBase {
public PCollection<Row> boundedInput3;
@Before
public void setUp() {
Schema schemaInTableB =
Schema.builder()
.addInt32Field("f_int")
.addDoubleField("f_double")
.addInt32Field("f_int2")
.addDecimalField("f_decimal")
.build();
List<Row> rowsInTableB =
TestUtils.RowsBuilder.of(schemaInTableB)
.addRows(
1,
1.0,
0,
new BigDecimal(1),
4,
4.0,
0,
new BigDecimal(4),
7,
7.0,
0,
new BigDecimal(7),
13,
13.0,
0,
new BigDecimal(13),
5,
5.0,
0,
new BigDecimal(5),
10,
10.0,
0,
new BigDecimal(10),
17,
17.0,
0,
new BigDecimal(17))
.getRows();
boundedInput3 =
pipeline.apply("boundedInput3", Create.of(rowsInTableB).withRowSchema(schemaInTableB));
}
/** GROUP-BY with single aggregation function with bounded PCollection. */
@Test
public void testAggregationWithoutWindowWithBounded() throws Exception {
runAggregationWithoutWindow(boundedInput1);
}
/** GROUP-BY with single aggregation function with unbounded PCollection. */
@Test
public void testAggregationWithoutWindowWithUnbounded() throws Exception {
runAggregationWithoutWindow(unboundedInput1);
}
private void runAggregationWithoutWindow(PCollection<Row> input) throws Exception {
String sql = "SELECT f_int2, COUNT(*) AS `getFieldCount` FROM PCOLLECTION GROUP BY f_int2";
PCollection<Row> result = input.apply("testAggregationWithoutWindow", SqlTransform.query(sql));
Schema resultType = Schema.builder().addInt32Field("f_int2").addInt64Field("size").build();
Row row = Row.withSchema(resultType).addValues(0, 4L).build();
PAssert.that(result).containsInAnyOrder(row);
pipeline.run().waitUntilFinish();
}
/** GROUP-BY with multiple aggregation functions with bounded PCollection. */
@Test
public void testAggregationFunctionsWithBounded() throws Exception {
runAggregationFunctions(boundedInput1);
}
/** GROUP-BY with multiple aggregation functions with unbounded PCollection. */
@Test
public void testAggregationFunctionsWithUnbounded() throws Exception {
runAggregationFunctions(unboundedInput1);
}
private void runAggregationFunctions(PCollection<Row> input) throws Exception {
String sql =
"select f_int2, count(*) as getFieldCount, "
+ "sum(f_long) as sum1, avg(f_long) as avg1, "
+ "max(f_long) as max1, min(f_long) as min1, "
+ "sum(f_short) as sum2, avg(f_short) as avg2, "
+ "max(f_short) as max2, min(f_short) as min2, "
+ "sum(f_byte) as sum3, avg(f_byte) as avg3, "
+ "max(f_byte) as max3, min(f_byte) as min3, "
+ "sum(f_float) as sum4, avg(f_float) as avg4, "
+ "max(f_float) as max4, min(f_float) as min4, "
+ "sum(f_double) as sum5, avg(f_double) as avg5, "
+ "max(f_double) as max5, min(f_double) as min5, "
+ "max(f_timestamp) as max6, min(f_timestamp) as min6, "
+ "max(f_string) as max7, min(f_string) as min7, "
+ "var_pop(f_double) as varpop1, var_samp(f_double) as varsamp1, "
+ "var_pop(f_int) as varpop2, var_samp(f_int) as varsamp2 "
+ "FROM TABLE_A group by f_int2";
PCollection<Row> result =
PCollectionTuple.of(new TupleTag<>("TABLE_A"), input)
.apply("testAggregationFunctions", SqlTransform.query(sql));
Schema resultType =
Schema.builder()
.addInt32Field("f_int2")
.addInt64Field("size")
.addInt64Field("sum1")
.addInt64Field("avg1")
.addInt64Field("max1")
.addInt64Field("min1")
.addInt16Field("sum2")
.addInt16Field("avg2")
.addInt16Field("max2")
.addInt16Field("min2")
.addByteField("sum3")
.addByteField("avg3")
.addByteField("max3")
.addByteField("min3")
.addFloatField("sum4")
.addFloatField("avg4")
.addFloatField("max4")
.addFloatField("min4")
.addDoubleField("sum5")
.addDoubleField("avg5")
.addDoubleField("max5")
.addDoubleField("min5")
.addDateTimeField("max6")
.addDateTimeField("min6")
.addStringField("max7")
.addStringField("min7")
.addDoubleField("varpop1")
.addDoubleField("varsamp1")
.addInt32Field("varpop2")
.addInt32Field("varsamp2")
.build();
Row row =
Row.withSchema(resultType)
.addValues(
0,
4L,
10000L,
2500L,
4000L,
1000L,
(short) 10,
(short) 2,
(short) 4,
(short) 1,
(byte) 10,
(byte) 2,
(byte) 4,
(byte) 1,
10.0F,
2.5F,
4.0F,
1.0F,
10.0,
2.5,
4.0,
1.0,
parseTimestampWithoutTimeZone("2017-01-01 02:04:03"),
parseTimestampWithoutTimeZone("2017-01-01 01:01:03"),
"第四行",
"string_row1",
1.25,
1.666666667,
1,
1)
.build();
PAssert.that(result).containsInAnyOrder(row);
pipeline.run().waitUntilFinish();
}
/** GROUP-BY with the any_value aggregation function. */
@Test
private static class CheckerBigDecimalDivide
implements SerializableFunction<Iterable<Row>, Void> {
@Override
public Void apply(Iterable<Row> input) {
Iterator<Row> iter = input.iterator();
assertTrue(iter.hasNext());
Row row = iter.next();
assertEquals(row.getDouble("avg1"), 8.142857143, 1e-7);
assertTrue(row.getInt32("avg2") == 8);
assertEquals(row.getDouble("varpop1"), 26.40816326, 1e-7);
assertTrue(row.getInt32("varpop2") == 26);
assertEquals(row.getDouble("varsamp1"), 30.80952381, 1e-7);
assertTrue(row.getInt32("varsamp2") == 30);
assertFalse(iter.hasNext());
return null;
}
}
/** GROUP-BY with aggregation functions with BigDeciaml Calculation (Avg, Var_Pop, etc). */
@Test
public void testAggregationFunctionsWithBoundedOnBigDecimalDivide() throws Exception {
String sql =
"SELECT AVG(f_double) as avg1, AVG(f_int) as avg2, "
+ "VAR_POP(f_double) as varpop1, VAR_POP(f_int) as varpop2, "
+ "VAR_SAMP(f_double) as varsamp1, VAR_SAMP(f_int) as varsamp2 "
+ "FROM PCOLLECTION GROUP BY f_int2";
PCollection<Row> result =
boundedInput3.apply("testAggregationWithDecimalValue", SqlTransform.query(sql));
PAssert.that(result).satisfies(new CheckerBigDecimalDivide());
pipeline.run().waitUntilFinish();
}
/** Implicit GROUP-BY with DISTINCT with bounded PCollection. */
@Test
public void testDistinctWithBounded() throws Exception {
runDistinct(boundedInput1);
}
/** Implicit GROUP-BY with DISTINCT with unbounded PCollection. */
@Test
public void testDistinctWithUnbounded() throws Exception {
runDistinct(unboundedInput1);
}
private void runDistinct(PCollection<Row> input) throws Exception {
String sql = "SELECT distinct f_int, f_long FROM PCOLLECTION ";
PCollection<Row> result = input.apply("testDistinct", SqlTransform.query(sql));
Schema resultType = Schema.builder().addInt32Field("f_int").addInt64Field("f_long").build();
List<Row> expectedRows =
TestUtils.RowsBuilder.of(resultType)
.addRows(
1, 1000L,
2, 2000L,
3, 3000L,
4, 4000L)
.getRows();
PAssert.that(result).containsInAnyOrder(expectedRows);
pipeline.run().waitUntilFinish();
}
/** GROUP-BY with TUMBLE window(aka fix_time_window) with bounded PCollection. */
@Test
public void testTumbleWindowWithBounded() throws Exception {
runTumbleWindow(boundedInput1);
}
/** GROUP-BY with TUMBLE window(aka fix_time_window) with unbounded PCollection. */
@Test
public void testTumbleWindowWithUnbounded() throws Exception {
runTumbleWindow(unboundedInput1);
}
@Test
public void testTumbleWindowWith31DaysBounded() throws Exception {
runTumbleWindowFor31Days(boundedInputMonthly);
}
private void runTumbleWindowFor31Days(PCollection<Row> input) throws Exception {
String sql =
"SELECT f_int2, COUNT(*) AS `getFieldCount`,"
+ " TUMBLE_START(f_timestamp, INTERVAL '31' DAY) AS `window_start`, "
+ " TUMBLE_END(f_timestamp, INTERVAL '31' DAY) AS `window_end` "
+ " FROM TABLE_A"
+ " GROUP BY f_int2, TUMBLE(f_timestamp, INTERVAL '31' DAY)";
PCollection<Row> result =
PCollectionTuple.of(new TupleTag<>("TABLE_A"), input)
.apply("testTumbleWindow", SqlTransform.query(sql));
Schema resultType =
Schema.builder()
.addInt32Field("f_int2")
.addInt64Field("size")
.addDateTimeField("window_start")
.addDateTimeField("window_end")
.build();
List<Row> expectedRows =
TestUtils.RowsBuilder.of(resultType)
.addRows(
0,
1L,
parseTimestampWithUTCTimeZone("2016-12-08 00:00:00"),
parseTimestampWithUTCTimeZone("2017-01-08 00:00:00"),
0,
1L,
parseTimestampWithUTCTimeZone("2017-01-08 00:00:00"),
parseTimestampWithUTCTimeZone("2017-02-08 00:00:00"),
0,
1L,
parseTimestampWithUTCTimeZone("2017-02-08 00:00:00"),
parseTimestampWithUTCTimeZone("2017-03-11 00:00:00"))
.getRows();
PAssert.that(result).containsInAnyOrder(expectedRows);
pipeline.run().waitUntilFinish();
}
private void runTumbleWindow(PCollection<Row> input) throws Exception {
String sql =
"SELECT f_int2, COUNT(*) AS `getFieldCount`,"
+ " TUMBLE_START(f_timestamp, INTERVAL '1' HOUR) AS `window_start`, "
+ " TUMBLE_END(f_timestamp, INTERVAL '1' HOUR) AS `window_end` "
+ " FROM TABLE_A"
+ " GROUP BY f_int2, TUMBLE(f_timestamp, INTERVAL '1' HOUR)";
PCollection<Row> result =
PCollectionTuple.of(new TupleTag<>("TABLE_A"), input)
.apply("testTumbleWindow", SqlTransform.query(sql));
Schema resultType =
Schema.builder()
.addInt32Field("f_int2")
.addInt64Field("size")
.addDateTimeField("window_start")
.addDateTimeField("window_end")
.build();
List<Row> expectedRows =
TestUtils.RowsBuilder.of(resultType)
.addRows(
0,
3L,
parseTimestampWithoutTimeZone("2017-01-01 01:00:00"),
parseTimestampWithoutTimeZone("2017-01-01 02:00:00"),
0,
1L,
parseTimestampWithoutTimeZone("2017-01-01 02:00:00"),
parseTimestampWithoutTimeZone("2017-01-01 03:00:00"))
.getRows();
PAssert.that(result).containsInAnyOrder(expectedRows);
pipeline.run().waitUntilFinish();
}
/**
* Tests that a trigger set up prior to a SQL statement still is effective within the SQL
* statement.
*/
@Test
@Category(UsesTestStream.class)
public void testTriggeredTumble() throws Exception {
Schema inputSchema =
Schema.builder().addInt32Field("f_int").addDateTimeField("f_timestamp").build();
PCollection<Row> input =
pipeline.apply(
TestStream.create(inputSchema)
.addElements(
Row.withSchema(inputSchema)
.addValues(1, parseTimestampWithoutTimeZone("2017-01-01 01:01:01"))
.build(),
Row.withSchema(inputSchema)
.addValues(2, parseTimestampWithoutTimeZone("2017-01-01 01:01:01"))
.build())
.addElements(
Row.withSchema(inputSchema)
.addValues(3, parseTimestampWithoutTimeZone("2017-01-01 01:01:01"))
.build())
.addElements(
Row.withSchema(inputSchema)
.addValues(4, parseTimestampWithoutTimeZone("2017-01-01 01:01:01"))
.build())
.advanceWatermarkToInfinity());
String sql =
"SELECT SUM(f_int) AS f_int_sum FROM PCOLLECTION"
+ " GROUP BY TUMBLE(f_timestamp, INTERVAL '1' HOUR)";
Schema outputSchema = Schema.builder().addInt32Field("fn_int_sum").build();
PCollection<Row> result =
input
.apply(
"Triggering",
Window.<Row>configure()
.triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(1)))
.withAllowedLateness(Duration.ZERO)
.withOnTimeBehavior(Window.OnTimeBehavior.FIRE_IF_NON_EMPTY)
.accumulatingFiredPanes())
.apply("Windowed Query", SqlTransform.query(sql));
PAssert.that(result)
.containsInAnyOrder(
TestUtils.RowsBuilder.of(outputSchema)
.addRows(3)
.addRows(6)
.addRows(10)
.getRows());
pipeline.run().waitUntilFinish();
}
/** GROUP-BY with HOP window(aka sliding_window) with bounded PCollection. */
@Test
public void testHopWindowWithBounded() throws Exception {
runHopWindow(boundedInput1);
}
/** GROUP-BY with HOP window(aka sliding_window) with unbounded PCollection. */
@Test
public void testHopWindowWithUnbounded() throws Exception {
runHopWindow(unboundedInput1);
}
private void runHopWindow(PCollection<Row> input) throws Exception {
String sql =
"SELECT f_int2, COUNT(*) AS `getFieldCount`,"
+ " HOP_START(f_timestamp, INTERVAL '30' MINUTE, INTERVAL '1' HOUR) AS `window_start`, "
+ " HOP_END(f_timestamp, INTERVAL '30' MINUTE, INTERVAL '1' HOUR) AS `window_end` "
+ " FROM PCOLLECTION"
+ " GROUP BY f_int2, HOP(f_timestamp, INTERVAL '30' MINUTE, INTERVAL '1' HOUR)";
PCollection<Row> result = input.apply("testHopWindow", SqlTransform.query(sql));
Schema resultType =
Schema.builder()
.addInt32Field("f_int2")
.addInt64Field("size")
.addDateTimeField("window_start")
.addDateTimeField("window_end")
.build();
List<Row> expectedRows =
TestUtils.RowsBuilder.of(resultType)
.addRows(
0,
3L,
parseTimestampWithoutTimeZone("2017-01-01 00:30:00"),
parseTimestampWithoutTimeZone("2017-01-01 01:30:00"),
0,
3L,
parseTimestampWithoutTimeZone("2017-01-01 01:00:00"),
parseTimestampWithoutTimeZone("2017-01-01 02:00:00"),
0,
1L,
parseTimestampWithoutTimeZone("2017-01-01 01:30:00"),
parseTimestampWithoutTimeZone("2017-01-01 02:30:00"),
0,
1L,
parseTimestampWithoutTimeZone("2017-01-01 02:00:00"),
parseTimestampWithoutTimeZone("2017-01-01 03:00:00"))
.getRows();
PAssert.that(result).containsInAnyOrder(expectedRows);
pipeline.run().waitUntilFinish();
}
/** GROUP-BY with SESSION window with bounded PCollection. */
@Test
public void testSessionWindowWithBounded() throws Exception {
runSessionWindow(boundedInput1);
}
/** GROUP-BY with SESSION window with unbounded PCollection. */
@Test
public void testSessionWindowWithUnbounded() throws Exception {
runSessionWindow(unboundedInput1);
}
private void runSessionWindow(PCollection<Row> input) throws Exception {
String sql =
"SELECT f_int2, COUNT(*) AS `getFieldCount`,"
+ " SESSION_START(f_timestamp, INTERVAL '5' MINUTE) AS `window_start`, "
+ " SESSION_END(f_timestamp, INTERVAL '5' MINUTE) AS `window_end` "
+ " FROM TABLE_A"
+ " GROUP BY f_int2, SESSION(f_timestamp, INTERVAL '5' MINUTE)";
PCollection<Row> result =
PCollectionTuple.of(new TupleTag<>("TABLE_A"), input)
.apply("testSessionWindow", SqlTransform.query(sql));
Schema resultType =
Schema.builder()
.addInt32Field("f_int2")
.addInt64Field("size")
.addDateTimeField("window_start")
.addDateTimeField("window_end")
.build();
List<Row> expectedRows =
TestUtils.RowsBuilder.of(resultType)
.addRows(
0,
3L,
parseTimestampWithoutTimeZone("2017-01-01 01:01:03"),
parseTimestampWithoutTimeZone("2017-01-01 01:01:03"),
0,
1L,
parseTimestampWithoutTimeZone("2017-01-01 02:04:03"),
parseTimestampWithoutTimeZone("2017-01-01 02:04:03"))
.getRows();
PAssert.that(result).containsInAnyOrder(expectedRows);
pipeline.run().waitUntilFinish();
}
@Test
public void testWindowOnNonTimestampField() throws Exception {
exceptions.expect(ParseException.class);
exceptions.expectCause(
hasMessage(
containsString(
"Cannot apply 'TUMBLE' to arguments of type 'TUMBLE(<BIGINT>, <INTERVAL HOUR>)'")));
pipeline.enableAbandonedNodeEnforcement(false);
String sql =
"SELECT f_int2, COUNT(*) AS `getFieldCount` FROM TABLE_A "
+ "GROUP BY f_int2, TUMBLE(f_long, INTERVAL '1' HOUR)";
PCollection<Row> result =
PCollectionTuple.of(new TupleTag<>("TABLE_A"), boundedInput1)
.apply("testWindowOnNonTimestampField", SqlTransform.query(sql));
pipeline.run().waitUntilFinish();
}
@Test
public void testUnsupportedDistinct() throws Exception {
exceptions.expect(ParseException.class);
exceptions.expectCause(hasMessage(containsString("Encountered \"*\"")));
pipeline.enableAbandonedNodeEnforcement(false);
String sql = "SELECT f_int2, COUNT(DISTINCT *) AS `size` " + "FROM PCOLLECTION GROUP BY f_int2";
PCollection<Row> result =
boundedInput1.apply("testUnsupportedDistinct", SqlTransform.query(sql));
pipeline.run().waitUntilFinish();
}
@Test
public void testUnsupportedDistinct2() throws Exception {
exceptions.expect(UnsupportedOperationException.class);
exceptions.expectMessage(containsString("COUNT DISTINCT"));
pipeline.enableAbandonedNodeEnforcement(false);
String sql = "SELECT f_int2, COUNT(DISTINCT f_long)" + "FROM PCOLLECTION GROUP BY f_int2";
boundedInput1.apply("testUnsupportedDistinct", SqlTransform.query(sql));
pipeline.run().waitUntilFinish();
}
@Test
public void testUnsupportedGlobalWindowWithDefaultTrigger() {
exceptions.expect(UnsupportedOperationException.class);
pipeline.enableAbandonedNodeEnforcement(false);
PCollection<Row> input =
unboundedInput1.apply(
"unboundedInput1.globalWindow",
Window.<Row>into(new GlobalWindows()).triggering(DefaultTrigger.of()));
String sql = "SELECT f_int2, COUNT(*) AS `size` FROM PCOLLECTION GROUP BY f_int2";
input.apply("testUnsupportedGlobalWindows", SqlTransform.query(sql));
}
@Test
public void testSupportsGlobalWindowWithCustomTrigger() throws Exception {
pipeline.enableAbandonedNodeEnforcement(false);
DateTime startTime = parseTimestampWithoutTimeZone("2017-1-1 0:0:0");
Schema type =
Schema.builder()
.addInt32Field("f_intGroupingKey")
.addInt32Field("f_intValue")
.addDateTimeField("f_timestamp")
.build();
Object[] rows =
new Object[] {
0, 1, startTime.plusSeconds(0),
0, 2, startTime.plusSeconds(1),
0, 3, startTime.plusSeconds(2),
0, 4, startTime.plusSeconds(3),
0, 5, startTime.plusSeconds(4),
0, 6, startTime.plusSeconds(6)
};
PCollection<Row> input =
createTestPCollection(type, rows, "f_timestamp")
.apply(
Window.<Row>into(new GlobalWindows())
.triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(2)))
.discardingFiredPanes()
.withOnTimeBehavior(Window.OnTimeBehavior.FIRE_IF_NON_EMPTY));
String sql = "SELECT SUM(f_intValue) AS `sum` FROM PCOLLECTION GROUP BY f_intGroupingKey";
PCollection<Row> result = input.apply("sql", SqlTransform.query(sql));
assertEquals(new GlobalWindows(), result.getWindowingStrategy().getWindowFn());
PAssert.that(result).containsInAnyOrder(rowsWithSingleIntField("sum", Arrays.asList(3, 7, 11)));
pipeline.run();
}
/** Query has all the input fields, so no projection is added. */
@Test
public void testSupportsAggregationWithoutProjection() throws Exception {
pipeline.enableAbandonedNodeEnforcement(false);
Schema schema =
Schema.builder().addInt32Field("f_intGroupingKey").addInt32Field("f_intValue").build();
PCollection<Row> inputRows =
pipeline
.apply(
Create.of(
TestUtils.rowsBuilderOf(schema)
.addRows(
0, 1,
0, 2,
1, 3,
2, 4,
2, 5)
.getRows()))
.setRowSchema(schema);
String sql = "SELECT SUM(f_intValue) FROM PCOLLECTION GROUP BY f_intGroupingKey";
PCollection<Row> result = inputRows.apply("sql", SqlTransform.query(sql));
PAssert.that(result).containsInAnyOrder(rowsWithSingleIntField("sum", Arrays.asList(3, 3, 9)));
pipeline.run();
}
@Test
public void testSupportsAggregationWithFilterWithoutProjection() throws Exception {
pipeline.enableAbandonedNodeEnforcement(false);
Schema schema =
Schema.builder().addInt32Field("f_intGroupingKey").addInt32Field("f_intValue").build();
PCollection<Row> inputRows =
pipeline
.apply(
Create.of(
TestUtils.rowsBuilderOf(schema)
.addRows(
0, 1,
0, 2,
1, 3,
2, 4,
2, 5)
.getRows()))
.setRowSchema(schema);
String sql =
"SELECT SUM(f_intValue) FROM PCOLLECTION WHERE f_intValue < 5 GROUP BY f_intGroupingKey";
PCollection<Row> result = inputRows.apply("sql", SqlTransform.query(sql));
PAssert.that(result).containsInAnyOrder(rowsWithSingleIntField("sum", Arrays.asList(3, 3, 4)));
pipeline.run();
}
@Test
public void testSupportsNonGlobalWindowWithCustomTrigger() {
DateTime startTime = parseTimestampWithoutTimeZone("2017-1-1 0:0:0");
Schema type =
Schema.builder()
.addInt32Field("f_intGroupingKey")
.addInt32Field("f_intValue")
.addDateTimeField("f_timestamp")
.build();
Object[] rows =
new Object[] {
0, 1, startTime.plusSeconds(0),
0, 2, startTime.plusSeconds(1),
0, 3, startTime.plusSeconds(2),
0, 4, startTime.plusSeconds(3),
0, 5, startTime.plusSeconds(4),
0, 6, startTime.plusSeconds(6)
};
PCollection<Row> input =
createTestPCollection(type, rows, "f_timestamp")
.apply(
Window.<Row>into(FixedWindows.of(Duration.standardSeconds(3)))
.triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(2)))
.discardingFiredPanes()
.withAllowedLateness(Duration.ZERO)
.withOnTimeBehavior(Window.OnTimeBehavior.FIRE_IF_NON_EMPTY));
String sql = "SELECT SUM(f_intValue) AS `sum` FROM PCOLLECTION GROUP BY f_intGroupingKey";
PCollection<Row> result = input.apply("sql", SqlTransform.query(sql));
assertEquals(
FixedWindows.of(Duration.standardSeconds(3)), result.getWindowingStrategy().getWindowFn());
PAssert.that(result)
.containsInAnyOrder(rowsWithSingleIntField("sum", Arrays.asList(3, 3, 9, 6)));
pipeline.run();
}
private List<Row> rowsWithSingleIntField(String fieldName, List<Integer> values) {
return TestUtils.rowsBuilderOf(Schema.builder().addInt32Field(fieldName).build())
.addRows(values)
.getRows();
}
private PCollection<Row> createTestPCollection(
Schema type, Object[] rows, String timestampField) {
return TestUtils.rowsBuilderOf(type)
.addRows(rows)
.getPCollectionBuilder()
.inPipeline(pipeline)
.withTimestampField(timestampField)
.buildUnbounded();
}
} | class BeamSqlDslAggregationTest extends BeamSqlDslBase {
public PCollection<Row> boundedInput3;
@Before
public void setUp() {
Schema schemaInTableB =
Schema.builder()
.addInt32Field("f_int")
.addDoubleField("f_double")
.addInt32Field("f_int2")
.addDecimalField("f_decimal")
.build();
List<Row> rowsInTableB =
TestUtils.RowsBuilder.of(schemaInTableB)
.addRows(
1,
1.0,
0,
new BigDecimal(1),
4,
4.0,
0,
new BigDecimal(4),
7,
7.0,
0,
new BigDecimal(7),
13,
13.0,
0,
new BigDecimal(13),
5,
5.0,
0,
new BigDecimal(5),
10,
10.0,
0,
new BigDecimal(10),
17,
17.0,
0,
new BigDecimal(17))
.getRows();
boundedInput3 =
pipeline.apply("boundedInput3", Create.of(rowsInTableB).withRowSchema(schemaInTableB));
}
/** GROUP-BY with single aggregation function with bounded PCollection. */
@Test
public void testAggregationWithoutWindowWithBounded() throws Exception {
runAggregationWithoutWindow(boundedInput1);
}
/** GROUP-BY with single aggregation function with unbounded PCollection. */
@Test
public void testAggregationWithoutWindowWithUnbounded() throws Exception {
runAggregationWithoutWindow(unboundedInput1);
}
private void runAggregationWithoutWindow(PCollection<Row> input) throws Exception {
String sql = "SELECT f_int2, COUNT(*) AS `getFieldCount` FROM PCOLLECTION GROUP BY f_int2";
PCollection<Row> result = input.apply("testAggregationWithoutWindow", SqlTransform.query(sql));
Schema resultType = Schema.builder().addInt32Field("f_int2").addInt64Field("size").build();
Row row = Row.withSchema(resultType).addValues(0, 4L).build();
PAssert.that(result).containsInAnyOrder(row);
pipeline.run().waitUntilFinish();
}
/** GROUP-BY with multiple aggregation functions with bounded PCollection. */
@Test
public void testAggregationFunctionsWithBounded() throws Exception {
runAggregationFunctions(boundedInput1);
}
/** GROUP-BY with multiple aggregation functions with unbounded PCollection. */
@Test
public void testAggregationFunctionsWithUnbounded() throws Exception {
runAggregationFunctions(unboundedInput1);
}
private void runAggregationFunctions(PCollection<Row> input) throws Exception {
String sql =
"select f_int2, count(*) as getFieldCount, "
+ "sum(f_long) as sum1, avg(f_long) as avg1, "
+ "max(f_long) as max1, min(f_long) as min1, "
+ "sum(f_short) as sum2, avg(f_short) as avg2, "
+ "max(f_short) as max2, min(f_short) as min2, "
+ "sum(f_byte) as sum3, avg(f_byte) as avg3, "
+ "max(f_byte) as max3, min(f_byte) as min3, "
+ "sum(f_float) as sum4, avg(f_float) as avg4, "
+ "max(f_float) as max4, min(f_float) as min4, "
+ "sum(f_double) as sum5, avg(f_double) as avg5, "
+ "max(f_double) as max5, min(f_double) as min5, "
+ "max(f_timestamp) as max6, min(f_timestamp) as min6, "
+ "max(f_string) as max7, min(f_string) as min7, "
+ "var_pop(f_double) as varpop1, var_samp(f_double) as varsamp1, "
+ "var_pop(f_int) as varpop2, var_samp(f_int) as varsamp2 "
+ "FROM TABLE_A group by f_int2";
PCollection<Row> result =
PCollectionTuple.of(new TupleTag<>("TABLE_A"), input)
.apply("testAggregationFunctions", SqlTransform.query(sql));
Schema resultType =
Schema.builder()
.addInt32Field("f_int2")
.addInt64Field("size")
.addInt64Field("sum1")
.addInt64Field("avg1")
.addInt64Field("max1")
.addInt64Field("min1")
.addInt16Field("sum2")
.addInt16Field("avg2")
.addInt16Field("max2")
.addInt16Field("min2")
.addByteField("sum3")
.addByteField("avg3")
.addByteField("max3")
.addByteField("min3")
.addFloatField("sum4")
.addFloatField("avg4")
.addFloatField("max4")
.addFloatField("min4")
.addDoubleField("sum5")
.addDoubleField("avg5")
.addDoubleField("max5")
.addDoubleField("min5")
.addDateTimeField("max6")
.addDateTimeField("min6")
.addStringField("max7")
.addStringField("min7")
.addDoubleField("varpop1")
.addDoubleField("varsamp1")
.addInt32Field("varpop2")
.addInt32Field("varsamp2")
.build();
Row row =
Row.withSchema(resultType)
.addValues(
0,
4L,
10000L,
2500L,
4000L,
1000L,
(short) 10,
(short) 2,
(short) 4,
(short) 1,
(byte) 10,
(byte) 2,
(byte) 4,
(byte) 1,
10.0F,
2.5F,
4.0F,
1.0F,
10.0,
2.5,
4.0,
1.0,
parseTimestampWithoutTimeZone("2017-01-01 02:04:03"),
parseTimestampWithoutTimeZone("2017-01-01 01:01:03"),
"第四行",
"string_row1",
1.25,
1.666666667,
1,
1)
.build();
PAssert.that(result).containsInAnyOrder(row);
pipeline.run().waitUntilFinish();
}
/** GROUP-BY with the any_value aggregation function. */
@Test
private static class CheckerBigDecimalDivide
implements SerializableFunction<Iterable<Row>, Void> {
@Override
public Void apply(Iterable<Row> input) {
Iterator<Row> iter = input.iterator();
assertTrue(iter.hasNext());
Row row = iter.next();
assertEquals(row.getDouble("avg1"), 8.142857143, 1e-7);
assertTrue(row.getInt32("avg2") == 8);
assertEquals(row.getDouble("varpop1"), 26.40816326, 1e-7);
assertTrue(row.getInt32("varpop2") == 26);
assertEquals(row.getDouble("varsamp1"), 30.80952381, 1e-7);
assertTrue(row.getInt32("varsamp2") == 30);
assertFalse(iter.hasNext());
return null;
}
}
/** GROUP-BY with aggregation functions with BigDeciaml Calculation (Avg, Var_Pop, etc). */
@Test
public void testAggregationFunctionsWithBoundedOnBigDecimalDivide() throws Exception {
String sql =
"SELECT AVG(f_double) as avg1, AVG(f_int) as avg2, "
+ "VAR_POP(f_double) as varpop1, VAR_POP(f_int) as varpop2, "
+ "VAR_SAMP(f_double) as varsamp1, VAR_SAMP(f_int) as varsamp2 "
+ "FROM PCOLLECTION GROUP BY f_int2";
PCollection<Row> result =
boundedInput3.apply("testAggregationWithDecimalValue", SqlTransform.query(sql));
PAssert.that(result).satisfies(new CheckerBigDecimalDivide());
pipeline.run().waitUntilFinish();
}
/** Implicit GROUP-BY with DISTINCT with bounded PCollection. */
@Test
public void testDistinctWithBounded() throws Exception {
runDistinct(boundedInput1);
}
/** Implicit GROUP-BY with DISTINCT with unbounded PCollection. */
@Test
public void testDistinctWithUnbounded() throws Exception {
runDistinct(unboundedInput1);
}
private void runDistinct(PCollection<Row> input) throws Exception {
String sql = "SELECT distinct f_int, f_long FROM PCOLLECTION ";
PCollection<Row> result = input.apply("testDistinct", SqlTransform.query(sql));
Schema resultType = Schema.builder().addInt32Field("f_int").addInt64Field("f_long").build();
List<Row> expectedRows =
TestUtils.RowsBuilder.of(resultType)
.addRows(
1, 1000L,
2, 2000L,
3, 3000L,
4, 4000L)
.getRows();
PAssert.that(result).containsInAnyOrder(expectedRows);
pipeline.run().waitUntilFinish();
}
/** GROUP-BY with TUMBLE window(aka fix_time_window) with bounded PCollection. */
@Test
public void testTumbleWindowWithBounded() throws Exception {
runTumbleWindow(boundedInput1);
}
/** GROUP-BY with TUMBLE window(aka fix_time_window) with unbounded PCollection. */
@Test
public void testTumbleWindowWithUnbounded() throws Exception {
runTumbleWindow(unboundedInput1);
}
@Test
public void testTumbleWindowWith31DaysBounded() throws Exception {
runTumbleWindowFor31Days(boundedInputMonthly);
}
private void runTumbleWindowFor31Days(PCollection<Row> input) throws Exception {
String sql =
"SELECT f_int2, COUNT(*) AS `getFieldCount`,"
+ " TUMBLE_START(f_timestamp, INTERVAL '31' DAY) AS `window_start`, "
+ " TUMBLE_END(f_timestamp, INTERVAL '31' DAY) AS `window_end` "
+ " FROM TABLE_A"
+ " GROUP BY f_int2, TUMBLE(f_timestamp, INTERVAL '31' DAY)";
PCollection<Row> result =
PCollectionTuple.of(new TupleTag<>("TABLE_A"), input)
.apply("testTumbleWindow", SqlTransform.query(sql));
Schema resultType =
Schema.builder()
.addInt32Field("f_int2")
.addInt64Field("size")
.addDateTimeField("window_start")
.addDateTimeField("window_end")
.build();
List<Row> expectedRows =
TestUtils.RowsBuilder.of(resultType)
.addRows(
0,
1L,
parseTimestampWithUTCTimeZone("2016-12-08 00:00:00"),
parseTimestampWithUTCTimeZone("2017-01-08 00:00:00"),
0,
1L,
parseTimestampWithUTCTimeZone("2017-01-08 00:00:00"),
parseTimestampWithUTCTimeZone("2017-02-08 00:00:00"),
0,
1L,
parseTimestampWithUTCTimeZone("2017-02-08 00:00:00"),
parseTimestampWithUTCTimeZone("2017-03-11 00:00:00"))
.getRows();
PAssert.that(result).containsInAnyOrder(expectedRows);
pipeline.run().waitUntilFinish();
}
private void runTumbleWindow(PCollection<Row> input) throws Exception {
String sql =
"SELECT f_int2, COUNT(*) AS `getFieldCount`,"
+ " TUMBLE_START(f_timestamp, INTERVAL '1' HOUR) AS `window_start`, "
+ " TUMBLE_END(f_timestamp, INTERVAL '1' HOUR) AS `window_end` "
+ " FROM TABLE_A"
+ " GROUP BY f_int2, TUMBLE(f_timestamp, INTERVAL '1' HOUR)";
PCollection<Row> result =
PCollectionTuple.of(new TupleTag<>("TABLE_A"), input)
.apply("testTumbleWindow", SqlTransform.query(sql));
Schema resultType =
Schema.builder()
.addInt32Field("f_int2")
.addInt64Field("size")
.addDateTimeField("window_start")
.addDateTimeField("window_end")
.build();
List<Row> expectedRows =
TestUtils.RowsBuilder.of(resultType)
.addRows(
0,
3L,
parseTimestampWithoutTimeZone("2017-01-01 01:00:00"),
parseTimestampWithoutTimeZone("2017-01-01 02:00:00"),
0,
1L,
parseTimestampWithoutTimeZone("2017-01-01 02:00:00"),
parseTimestampWithoutTimeZone("2017-01-01 03:00:00"))
.getRows();
PAssert.that(result).containsInAnyOrder(expectedRows);
pipeline.run().waitUntilFinish();
}
/**
* Tests that a trigger set up prior to a SQL statement still is effective within the SQL
* statement.
*/
@Test
@Category(UsesTestStream.class)
public void testTriggeredTumble() throws Exception {
Schema inputSchema =
Schema.builder().addInt32Field("f_int").addDateTimeField("f_timestamp").build();
PCollection<Row> input =
pipeline.apply(
TestStream.create(inputSchema)
.addElements(
Row.withSchema(inputSchema)
.addValues(1, parseTimestampWithoutTimeZone("2017-01-01 01:01:01"))
.build(),
Row.withSchema(inputSchema)
.addValues(2, parseTimestampWithoutTimeZone("2017-01-01 01:01:01"))
.build())
.addElements(
Row.withSchema(inputSchema)
.addValues(3, parseTimestampWithoutTimeZone("2017-01-01 01:01:01"))
.build())
.addElements(
Row.withSchema(inputSchema)
.addValues(4, parseTimestampWithoutTimeZone("2017-01-01 01:01:01"))
.build())
.advanceWatermarkToInfinity());
String sql =
"SELECT SUM(f_int) AS f_int_sum FROM PCOLLECTION"
+ " GROUP BY TUMBLE(f_timestamp, INTERVAL '1' HOUR)";
Schema outputSchema = Schema.builder().addInt32Field("fn_int_sum").build();
PCollection<Row> result =
input
.apply(
"Triggering",
Window.<Row>configure()
.triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(1)))
.withAllowedLateness(Duration.ZERO)
.withOnTimeBehavior(Window.OnTimeBehavior.FIRE_IF_NON_EMPTY)
.accumulatingFiredPanes())
.apply("Windowed Query", SqlTransform.query(sql));
PAssert.that(result)
.containsInAnyOrder(
TestUtils.RowsBuilder.of(outputSchema)
.addRows(3)
.addRows(6)
.addRows(10)
.getRows());
pipeline.run().waitUntilFinish();
}
/** GROUP-BY with HOP window(aka sliding_window) with bounded PCollection. */
@Test
public void testHopWindowWithBounded() throws Exception {
runHopWindow(boundedInput1);
}
/** GROUP-BY with HOP window(aka sliding_window) with unbounded PCollection. */
@Test
public void testHopWindowWithUnbounded() throws Exception {
runHopWindow(unboundedInput1);
}
private void runHopWindow(PCollection<Row> input) throws Exception {
String sql =
"SELECT f_int2, COUNT(*) AS `getFieldCount`,"
+ " HOP_START(f_timestamp, INTERVAL '30' MINUTE, INTERVAL '1' HOUR) AS `window_start`, "
+ " HOP_END(f_timestamp, INTERVAL '30' MINUTE, INTERVAL '1' HOUR) AS `window_end` "
+ " FROM PCOLLECTION"
+ " GROUP BY f_int2, HOP(f_timestamp, INTERVAL '30' MINUTE, INTERVAL '1' HOUR)";
PCollection<Row> result = input.apply("testHopWindow", SqlTransform.query(sql));
Schema resultType =
Schema.builder()
.addInt32Field("f_int2")
.addInt64Field("size")
.addDateTimeField("window_start")
.addDateTimeField("window_end")
.build();
List<Row> expectedRows =
TestUtils.RowsBuilder.of(resultType)
.addRows(
0,
3L,
parseTimestampWithoutTimeZone("2017-01-01 00:30:00"),
parseTimestampWithoutTimeZone("2017-01-01 01:30:00"),
0,
3L,
parseTimestampWithoutTimeZone("2017-01-01 01:00:00"),
parseTimestampWithoutTimeZone("2017-01-01 02:00:00"),
0,
1L,
parseTimestampWithoutTimeZone("2017-01-01 01:30:00"),
parseTimestampWithoutTimeZone("2017-01-01 02:30:00"),
0,
1L,
parseTimestampWithoutTimeZone("2017-01-01 02:00:00"),
parseTimestampWithoutTimeZone("2017-01-01 03:00:00"))
.getRows();
PAssert.that(result).containsInAnyOrder(expectedRows);
pipeline.run().waitUntilFinish();
}
/** GROUP-BY with SESSION window with bounded PCollection. */
@Test
public void testSessionWindowWithBounded() throws Exception {
runSessionWindow(boundedInput1);
}
/** GROUP-BY with SESSION window with unbounded PCollection. */
@Test
public void testSessionWindowWithUnbounded() throws Exception {
runSessionWindow(unboundedInput1);
}
private void runSessionWindow(PCollection<Row> input) throws Exception {
String sql =
"SELECT f_int2, COUNT(*) AS `getFieldCount`,"
+ " SESSION_START(f_timestamp, INTERVAL '5' MINUTE) AS `window_start`, "
+ " SESSION_END(f_timestamp, INTERVAL '5' MINUTE) AS `window_end` "
+ " FROM TABLE_A"
+ " GROUP BY f_int2, SESSION(f_timestamp, INTERVAL '5' MINUTE)";
PCollection<Row> result =
PCollectionTuple.of(new TupleTag<>("TABLE_A"), input)
.apply("testSessionWindow", SqlTransform.query(sql));
Schema resultType =
Schema.builder()
.addInt32Field("f_int2")
.addInt64Field("size")
.addDateTimeField("window_start")
.addDateTimeField("window_end")
.build();
List<Row> expectedRows =
TestUtils.RowsBuilder.of(resultType)
.addRows(
0,
3L,
parseTimestampWithoutTimeZone("2017-01-01 01:01:03"),
parseTimestampWithoutTimeZone("2017-01-01 01:01:03"),
0,
1L,
parseTimestampWithoutTimeZone("2017-01-01 02:04:03"),
parseTimestampWithoutTimeZone("2017-01-01 02:04:03"))
.getRows();
PAssert.that(result).containsInAnyOrder(expectedRows);
pipeline.run().waitUntilFinish();
}
@Test
public void testWindowOnNonTimestampField() throws Exception {
exceptions.expect(ParseException.class);
exceptions.expectCause(
hasMessage(
containsString(
"Cannot apply 'TUMBLE' to arguments of type 'TUMBLE(<BIGINT>, <INTERVAL HOUR>)'")));
pipeline.enableAbandonedNodeEnforcement(false);
String sql =
"SELECT f_int2, COUNT(*) AS `getFieldCount` FROM TABLE_A "
+ "GROUP BY f_int2, TUMBLE(f_long, INTERVAL '1' HOUR)";
PCollection<Row> result =
PCollectionTuple.of(new TupleTag<>("TABLE_A"), boundedInput1)
.apply("testWindowOnNonTimestampField", SqlTransform.query(sql));
pipeline.run().waitUntilFinish();
}
@Test
public void testUnsupportedDistinct() throws Exception {
exceptions.expect(ParseException.class);
exceptions.expectCause(hasMessage(containsString("Encountered \"*\"")));
pipeline.enableAbandonedNodeEnforcement(false);
String sql = "SELECT f_int2, COUNT(DISTINCT *) AS `size` " + "FROM PCOLLECTION GROUP BY f_int2";
PCollection<Row> result =
boundedInput1.apply("testUnsupportedDistinct", SqlTransform.query(sql));
pipeline.run().waitUntilFinish();
}
@Test
public void testUnsupportedDistinct2() throws Exception {
exceptions.expect(UnsupportedOperationException.class);
exceptions.expectMessage(containsString("COUNT DISTINCT"));
pipeline.enableAbandonedNodeEnforcement(false);
String sql = "SELECT f_int2, COUNT(DISTINCT f_long)" + "FROM PCOLLECTION GROUP BY f_int2";
boundedInput1.apply("testUnsupportedDistinct", SqlTransform.query(sql));
pipeline.run().waitUntilFinish();
}
@Test
public void testUnsupportedGlobalWindowWithDefaultTrigger() {
exceptions.expect(UnsupportedOperationException.class);
pipeline.enableAbandonedNodeEnforcement(false);
PCollection<Row> input =
unboundedInput1.apply(
"unboundedInput1.globalWindow",
Window.<Row>into(new GlobalWindows()).triggering(DefaultTrigger.of()));
String sql = "SELECT f_int2, COUNT(*) AS `size` FROM PCOLLECTION GROUP BY f_int2";
input.apply("testUnsupportedGlobalWindows", SqlTransform.query(sql));
}
@Test
public void testSupportsGlobalWindowWithCustomTrigger() throws Exception {
pipeline.enableAbandonedNodeEnforcement(false);
DateTime startTime = parseTimestampWithoutTimeZone("2017-1-1 0:0:0");
Schema type =
Schema.builder()
.addInt32Field("f_intGroupingKey")
.addInt32Field("f_intValue")
.addDateTimeField("f_timestamp")
.build();
Object[] rows =
new Object[] {
0, 1, startTime.plusSeconds(0),
0, 2, startTime.plusSeconds(1),
0, 3, startTime.plusSeconds(2),
0, 4, startTime.plusSeconds(3),
0, 5, startTime.plusSeconds(4),
0, 6, startTime.plusSeconds(6)
};
PCollection<Row> input =
createTestPCollection(type, rows, "f_timestamp")
.apply(
Window.<Row>into(new GlobalWindows())
.triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(2)))
.discardingFiredPanes()
.withOnTimeBehavior(Window.OnTimeBehavior.FIRE_IF_NON_EMPTY));
String sql = "SELECT SUM(f_intValue) AS `sum` FROM PCOLLECTION GROUP BY f_intGroupingKey";
PCollection<Row> result = input.apply("sql", SqlTransform.query(sql));
assertEquals(new GlobalWindows(), result.getWindowingStrategy().getWindowFn());
PAssert.that(result).containsInAnyOrder(rowsWithSingleIntField("sum", Arrays.asList(3, 7, 11)));
pipeline.run();
}
/** Query has all the input fields, so no projection is added. */
@Test
public void testSupportsAggregationWithoutProjection() throws Exception {
pipeline.enableAbandonedNodeEnforcement(false);
Schema schema =
Schema.builder().addInt32Field("f_intGroupingKey").addInt32Field("f_intValue").build();
PCollection<Row> inputRows =
pipeline
.apply(
Create.of(
TestUtils.rowsBuilderOf(schema)
.addRows(
0, 1,
0, 2,
1, 3,
2, 4,
2, 5)
.getRows()))
.setRowSchema(schema);
String sql = "SELECT SUM(f_intValue) FROM PCOLLECTION GROUP BY f_intGroupingKey";
PCollection<Row> result = inputRows.apply("sql", SqlTransform.query(sql));
PAssert.that(result).containsInAnyOrder(rowsWithSingleIntField("sum", Arrays.asList(3, 3, 9)));
pipeline.run();
}
@Test
public void testSupportsAggregationWithFilterWithoutProjection() throws Exception {
pipeline.enableAbandonedNodeEnforcement(false);
Schema schema =
Schema.builder().addInt32Field("f_intGroupingKey").addInt32Field("f_intValue").build();
PCollection<Row> inputRows =
pipeline
.apply(
Create.of(
TestUtils.rowsBuilderOf(schema)
.addRows(
0, 1,
0, 2,
1, 3,
2, 4,
2, 5)
.getRows()))
.setRowSchema(schema);
String sql =
"SELECT SUM(f_intValue) FROM PCOLLECTION WHERE f_intValue < 5 GROUP BY f_intGroupingKey";
PCollection<Row> result = inputRows.apply("sql", SqlTransform.query(sql));
PAssert.that(result).containsInAnyOrder(rowsWithSingleIntField("sum", Arrays.asList(3, 3, 4)));
pipeline.run();
}
@Test
public void testSupportsNonGlobalWindowWithCustomTrigger() {
DateTime startTime = parseTimestampWithoutTimeZone("2017-1-1 0:0:0");
Schema type =
Schema.builder()
.addInt32Field("f_intGroupingKey")
.addInt32Field("f_intValue")
.addDateTimeField("f_timestamp")
.build();
Object[] rows =
new Object[] {
0, 1, startTime.plusSeconds(0),
0, 2, startTime.plusSeconds(1),
0, 3, startTime.plusSeconds(2),
0, 4, startTime.plusSeconds(3),
0, 5, startTime.plusSeconds(4),
0, 6, startTime.plusSeconds(6)
};
PCollection<Row> input =
createTestPCollection(type, rows, "f_timestamp")
.apply(
Window.<Row>into(FixedWindows.of(Duration.standardSeconds(3)))
.triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(2)))
.discardingFiredPanes()
.withAllowedLateness(Duration.ZERO)
.withOnTimeBehavior(Window.OnTimeBehavior.FIRE_IF_NON_EMPTY));
String sql = "SELECT SUM(f_intValue) AS `sum` FROM PCOLLECTION GROUP BY f_intGroupingKey";
PCollection<Row> result = input.apply("sql", SqlTransform.query(sql));
assertEquals(
FixedWindows.of(Duration.standardSeconds(3)), result.getWindowingStrategy().getWindowFn());
PAssert.that(result)
.containsInAnyOrder(rowsWithSingleIntField("sum", Arrays.asList(3, 3, 9, 6)));
pipeline.run();
}
private List<Row> rowsWithSingleIntField(String fieldName, List<Integer> values) {
return TestUtils.rowsBuilderOf(Schema.builder().addInt32Field(fieldName).build())
.addRows(values)
.getRows();
}
private PCollection<Row> createTestPCollection(
Schema type, Object[] rows, String timestampField) {
return TestUtils.rowsBuilderOf(type)
.addRows(rows)
.getPCollectionBuilder()
.inPipeline(pipeline)
.withTimestampField(timestampField)
.buildUnbounded();
}
} |
Aren't these all `validateTransform()` calls? | public void testReadValidationFailsMissingConfiguration() {
HadoopInputFormatIO.Read<String, String> read = HadoopInputFormatIO.<String, String>read();
thrown.expect(NullPointerException.class);
read.validate(PipelineOptionsFactory.create());
} | read.validate(PipelineOptionsFactory.create()); | public void testReadValidationFailsMissingConfiguration() {
HadoopInputFormatIO.Read<String, String> read = HadoopInputFormatIO.<String, String>read();
thrown.expect(NullPointerException.class);
read.validateTransform();
} | class HadoopInputFormatIOTest {
static SerializableConfiguration serConf;
static SimpleFunction<Text, String> myKeyTranslate;
static SimpleFunction<Employee, String> myValueTranslate;
@Rule public final transient TestPipeline p = TestPipeline.create();
@Rule public ExpectedException thrown = ExpectedException.none();
private PBegin input = PBegin.in(p);
@BeforeClass
public static void setUp() throws IOException, InterruptedException {
serConf = loadTestConfiguration(
EmployeeInputFormat.class,
Text.class,
Employee.class);
myKeyTranslate = new SimpleFunction<Text, String>() {
@Override
public String apply(Text input) {
return input.toString();
}
};
myValueTranslate = new SimpleFunction<Employee, String>() {
@Override
public String apply(Employee input) {
return input.getEmpName() + "_" + input.getEmpAddress();
}
};
}
@Test
public void testReadBuildsCorrectly() {
HadoopInputFormatIO.Read<String, String> read = HadoopInputFormatIO.<String, String>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withKeyTranslation(myKeyTranslate)
.withValueTranslation(myValueTranslate);
assertEquals(serConf.getHadoopConfiguration(),
read.getConfiguration().getHadoopConfiguration());
assertEquals(myKeyTranslate, read.getKeyTranslationFunction());
assertEquals(myValueTranslate, read.getValueTranslationFunction());
assertEquals(myValueTranslate.getOutputTypeDescriptor(), read.getValueTypeDescriptor());
assertEquals(myKeyTranslate.getOutputTypeDescriptor(), read.getKeyTypeDescriptor());
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} builds correctly in different order
* of with configuration/key translation/value translation. This test also validates output
* PCollection key/value classes are set correctly even if Hadoop configuration is set after
* setting key/value translation.
*/
@Test
public void testReadBuildsCorrectlyInDifferentOrder() {
HadoopInputFormatIO.Read<String, String> read =
HadoopInputFormatIO.<String, String>read()
.withValueTranslation(myValueTranslate)
.withConfiguration(serConf.getHadoopConfiguration())
.withKeyTranslation(myKeyTranslate);
assertEquals(serConf.getHadoopConfiguration(),
read.getConfiguration().getHadoopConfiguration());
assertEquals(myKeyTranslate, read.getKeyTranslationFunction());
assertEquals(myValueTranslate, read.getValueTranslationFunction());
assertEquals(myKeyTranslate.getOutputTypeDescriptor(), read.getKeyTypeDescriptor());
assertEquals(myValueTranslate.getOutputTypeDescriptor(), read.getValueTypeDescriptor());
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} object creation if
* {@link HadoopInputFormatIO.Read
* once.
* @throws InterruptedException
* @throws IOException
*/
@Test
public void testReadBuildsCorrectlyIfWithConfigurationIsCalledMoreThanOneTime()
throws IOException, InterruptedException {
SerializableConfiguration diffConf =
loadTestConfiguration(
EmployeeInputFormat.class,
Employee.class,
Text.class);
HadoopInputFormatIO.Read<String, String> read = HadoopInputFormatIO.<String, String>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withKeyTranslation(myKeyTranslate)
.withConfiguration(diffConf.getHadoopConfiguration());
assertEquals(diffConf.getHadoopConfiguration(),
read.getConfiguration().getHadoopConfiguration());
assertEquals(myKeyTranslate, read.getKeyTranslationFunction());
assertEquals(null, read.getValueTranslationFunction());
assertEquals(myKeyTranslate.getOutputTypeDescriptor(), read.getKeyTypeDescriptor());
assertEquals(diffConf.getHadoopConfiguration().getClass("value.class", Object.class), read
.getValueTypeDescriptor().getRawType());
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} transform object creation fails with
* null configuration. {@link HadoopInputFormatIO.Read
* method checks configuration is null and throws exception if it is null.
*/
@Test
public void testReadObjectCreationFailsIfConfigurationIsNull() {
thrown.expect(NullPointerException.class);
HadoopInputFormatIO.<Text, Employee>read()
.withConfiguration(null);
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} transform object creation with only
* configuration.
*/
@Test
public void testReadObjectCreationWithConfiguration() {
HadoopInputFormatIO.Read<Text, Employee> read = HadoopInputFormatIO.<Text, Employee>read()
.withConfiguration(serConf.getHadoopConfiguration());
assertEquals(serConf.getHadoopConfiguration(),
read.getConfiguration().getHadoopConfiguration());
assertEquals(null, read.getKeyTranslationFunction());
assertEquals(null, read.getValueTranslationFunction());
assertEquals(serConf.getHadoopConfiguration().getClass("key.class", Object.class), read
.getKeyTypeDescriptor().getRawType());
assertEquals(serConf.getHadoopConfiguration().getClass("value.class", Object.class), read
.getValueTypeDescriptor().getRawType());
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} transform object creation fails with
* configuration and null key translation. {@link HadoopInputFormatIO.Read
* withKeyTranslation()} checks keyTranslation is null and throws exception if it null value is
* passed.
*/
@Test
public void testReadObjectCreationFailsIfKeyTranslationFunctionIsNull() {
thrown.expect(NullPointerException.class);
HadoopInputFormatIO.<String, Employee>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withKeyTranslation(null);
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} transform object creation with
* configuration and key translation.
*/
@Test
public void testReadObjectCreationWithConfigurationKeyTranslation() {
HadoopInputFormatIO.Read<String, Employee> read = HadoopInputFormatIO.<String, Employee>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withKeyTranslation(myKeyTranslate);
assertEquals(serConf.getHadoopConfiguration(),
read.getConfiguration().getHadoopConfiguration());
assertEquals(myKeyTranslate, read.getKeyTranslationFunction());
assertEquals(null, read.getValueTranslationFunction());
assertEquals(myKeyTranslate.getOutputTypeDescriptor().getRawType(),
read.getKeyTypeDescriptor().getRawType());
assertEquals(serConf.getHadoopConfiguration().getClass("value.class", Object.class),
read.getValueTypeDescriptor().getRawType());
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} transform object creation fails with
* configuration and null value translation.
* {@link HadoopInputFormatIO.Read
* valueTranslation is null and throws exception if null value is passed.
*/
@Test
public void testReadObjectCreationFailsIfValueTranslationFunctionIsNull() {
thrown.expect(NullPointerException.class);
HadoopInputFormatIO.<Text, String>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withValueTranslation(null);
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} transform object creation with
* configuration and value translation.
*/
@Test
public void testReadObjectCreationWithConfigurationValueTranslation() {
HadoopInputFormatIO.Read<Text, String> read = HadoopInputFormatIO.<Text, String>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withValueTranslation(myValueTranslate);
assertEquals(serConf.getHadoopConfiguration(),
read.getConfiguration().getHadoopConfiguration());
assertEquals(null, read.getKeyTranslationFunction());
assertEquals(myValueTranslate, read.getValueTranslationFunction());
assertEquals(serConf.getHadoopConfiguration().getClass("key.class", Object.class),
read.getKeyTypeDescriptor().getRawType());
assertEquals(myValueTranslate.getOutputTypeDescriptor().getRawType(),
read.getValueTypeDescriptor().getRawType());
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} transform object creation with
* configuration, key translation and value translation.
*/
@Test
public void testReadObjectCreationWithConfigurationKeyTranslationValueTranslation() {
HadoopInputFormatIO.Read<String, String> read = HadoopInputFormatIO.<String, String>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withKeyTranslation(myKeyTranslate)
.withValueTranslation(myValueTranslate);
assertEquals(serConf.getHadoopConfiguration(),
read.getConfiguration().getHadoopConfiguration());
assertEquals(myKeyTranslate, read.getKeyTranslationFunction());
assertEquals(myValueTranslate, read.getValueTranslationFunction());
assertEquals(myKeyTranslate.getOutputTypeDescriptor().getRawType(),
read.getKeyTypeDescriptor().getRawType());
assertEquals(myValueTranslate.getOutputTypeDescriptor().getRawType(),
read.getValueTypeDescriptor().getRawType());
}
/**
* This test validates functionality of {@link HadoopInputFormatIO.Read
* Read.validate()} function when Read transform is created without calling
* {@link HadoopInputFormatIO.Read
*/
@Test
/**
* This test validates functionality of {@link HadoopInputFormatIO.Read
* withConfiguration()} function when Hadoop InputFormat class is not provided by the user in
* configuration.
*/
@Test
public void testReadValidationFailsMissingInputFormatInConf() {
Configuration configuration = new Configuration();
configuration.setClass("key.class", Text.class, Object.class);
configuration.setClass("value.class", Employee.class, Object.class);
thrown.expect(NullPointerException.class);
HadoopInputFormatIO.<Text, Employee>read()
.withConfiguration(configuration);
}
/**
* This test validates functionality of {@link HadoopInputFormatIO.Read
* withConfiguration()} function when key class is not provided by the user in configuration.
*/
@Test
public void testReadValidationFailsMissingKeyClassInConf() {
Configuration configuration = new Configuration();
configuration.setClass("mapreduce.job.inputformat.class", EmployeeInputFormat.class,
InputFormat.class);
configuration.setClass("value.class", Employee.class, Object.class);
thrown.expect(NullPointerException.class);
HadoopInputFormatIO.<Text, Employee>read()
.withConfiguration(configuration);
}
/**
* This test validates functionality of {@link HadoopInputFormatIO.Read
* withConfiguration()} function when value class is not provided by the user in configuration.
*/
@Test
public void testReadValidationFailsMissingValueClassInConf() {
Configuration configuration = new Configuration();
configuration.setClass("mapreduce.job.inputformat.class", EmployeeInputFormat.class,
InputFormat.class);
configuration.setClass("key.class", Text.class, Object.class);
thrown.expect(NullPointerException.class);
HadoopInputFormatIO.<Text, Employee>read().withConfiguration(configuration);
}
/**
* This test validates functionality of {@link HadoopInputFormatIO.Read
* Read.validate()} function when myKeyTranslate's (simple function provided by user for key
* translation) input type is not same as Hadoop InputFormat's keyClass(Which is property set in
* configuration as "key.class").
*/
@Test
public void testReadValidationFailsWithWrongInputTypeKeyTranslationFunction() {
SimpleFunction<LongWritable, String> myKeyTranslateWithWrongInputType =
new SimpleFunction<LongWritable, String>() {
@Override
public String apply(LongWritable input) {
return input.toString();
}
};
HadoopInputFormatIO.Read<String, Employee> read = HadoopInputFormatIO.<String, Employee>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withKeyTranslation(myKeyTranslateWithWrongInputType);
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage(String.format(
"Key translation's input type is not same as hadoop InputFormat : %s key " + "class : %s",
serConf.getHadoopConfiguration().getClass("mapreduce.job.inputformat.class",
InputFormat.class), serConf.getHadoopConfiguration()
.getClass("key.class", Object.class)));
read.validate(PipelineOptionsFactory.create());
}
/**
* This test validates functionality of {@link HadoopInputFormatIO.Read
* Read.validate()} function when myValueTranslate's (simple function provided by user for value
* translation) input type is not same as Hadoop InputFormat's valueClass(Which is property set in
* configuration as "value.class").
*/
@Test
public void testReadValidationFailsWithWrongInputTypeValueTranslationFunction() {
SimpleFunction<LongWritable, String> myValueTranslateWithWrongInputType =
new SimpleFunction<LongWritable, String>() {
@Override
public String apply(LongWritable input) {
return input.toString();
}
};
HadoopInputFormatIO.Read<Text, String> read =
HadoopInputFormatIO.<Text, String>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withValueTranslation(myValueTranslateWithWrongInputType);
String expectedMessage =
String.format(
"Value translation's input type is not same as hadoop InputFormat : "
+ "%s value class : %s",
serConf.getHadoopConfiguration().getClass("mapreduce.job.inputformat.class",
InputFormat.class),
serConf.getHadoopConfiguration().getClass("value.class", Object.class));
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage(expectedMessage);
read.validate(PipelineOptionsFactory.create());
}
@Test
public void testReadingData() throws Exception {
HadoopInputFormatIO.Read<Text, Employee> read = HadoopInputFormatIO.<Text, Employee>read()
.withConfiguration(serConf.getHadoopConfiguration());
List<KV<Text, Employee>> expected = TestEmployeeDataSet.getEmployeeData();
PCollection<KV<Text, Employee>> actual = p.apply("ReadTest", read);
PAssert.that(actual).containsInAnyOrder(expected);
p.run();
}
/**
* This test validates functionality of
* {@link HadoopInputFormatIO.HadoopInputFormatBoundedSource
* populateDisplayData()}.
*/
@Test
public void testReadDisplayData() {
HadoopInputFormatBoundedSource<Text, Employee> boundedSource =
new HadoopInputFormatBoundedSource<Text, Employee>(
serConf,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class),
null,
null,
new SerializableSplit());
DisplayData displayData = DisplayData.from(boundedSource);
assertThat(
displayData,
hasDisplayItem("mapreduce.job.inputformat.class",
serConf.getHadoopConfiguration().get("mapreduce.job.inputformat.class")));
assertThat(displayData,
hasDisplayItem("key.class", serConf.getHadoopConfiguration().get("key.class")));
assertThat(displayData,
hasDisplayItem("value.class", serConf.getHadoopConfiguration().get("value.class")));
}
/**
* This test validates behavior of {@link HadoopInputFormatBoundedSource} if RecordReader object
* creation fails.
*/
@Test
public void testReadIfCreateRecordReaderFails() throws Exception {
thrown.expect(Exception.class);
thrown.expectMessage("Exception in creating RecordReader");
InputFormat<Text, Employee> mockInputFormat = Mockito.mock(EmployeeInputFormat.class);
Mockito.when(
mockInputFormat.createRecordReader(Mockito.any(InputSplit.class),
Mockito.any(TaskAttemptContext.class))).thenThrow(
new IOException("Exception in creating RecordReader"));
HadoopInputFormatBoundedSource<Text, Employee> boundedSource =
new HadoopInputFormatBoundedSource<Text, Employee>(
serConf,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class),
null,
null,
new SerializableSplit());
boundedSource.setInputFormatObj(mockInputFormat);
SourceTestUtils.readFromSource(boundedSource, p.getOptions());
}
/**
* This test validates behavior of HadoopInputFormatSource if
* {@link InputFormat
*/
@Test
public void testReadWithNullCreateRecordReader() throws Exception {
InputFormat<Text, Employee> mockInputFormat = Mockito.mock(EmployeeInputFormat.class);
thrown.expect(IOException.class);
thrown.expectMessage(String.format("Null RecordReader object returned by %s",
mockInputFormat.getClass()));
Mockito.when(
mockInputFormat.createRecordReader(Mockito.any(InputSplit.class),
Mockito.any(TaskAttemptContext.class))).thenReturn(null);
HadoopInputFormatBoundedSource<Text, Employee> boundedSource =
new HadoopInputFormatBoundedSource<Text, Employee>(
serConf,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class),
null,
null,
new SerializableSplit());
boundedSource.setInputFormatObj(mockInputFormat);
SourceTestUtils.readFromSource(boundedSource, p.getOptions());
}
/**
* This test validates behavior of
* {@link HadoopInputFormatBoundedSource.HadoopInputFormatReader
* InputFormat's {@link InputFormat
* records.
*/
@Test
public void testReadersStartWhenZeroRecords() throws Exception {
InputFormat mockInputFormat = Mockito.mock(EmployeeInputFormat.class);
EmployeeRecordReader mockReader = Mockito.mock(EmployeeRecordReader.class);
Mockito.when(
mockInputFormat.createRecordReader(Mockito.any(InputSplit.class),
Mockito.any(TaskAttemptContext.class))).thenReturn(mockReader);
Mockito.when(mockReader.nextKeyValue()).thenReturn(false);
InputSplit mockInputSplit = Mockito.mock(NewObjectsEmployeeInputSplit.class);
HadoopInputFormatBoundedSource<Text, Employee> boundedSource =
new HadoopInputFormatBoundedSource<Text, Employee>(
serConf,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class),
null,
null,
new SerializableSplit(mockInputSplit));
boundedSource.setInputFormatObj(mockInputFormat);
BoundedReader<KV<Text, Employee>> reader = boundedSource.createReader(p.getOptions());
assertEquals(false, reader.start());
assertEquals(Double.valueOf(1), reader.getFractionConsumed());
reader.close();
}
/**
* This test validates the method getFractionConsumed()- which indicates the progress of the read
* in range of 0 to 1.
*/
@Test
public void testReadersGetFractionConsumed() throws Exception {
List<KV<Text, Employee>> referenceRecords = TestEmployeeDataSet.getEmployeeData();
HadoopInputFormatBoundedSource<Text, Employee> hifSource = getTestHIFSource(
EmployeeInputFormat.class,
Text.class,
Employee.class,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class));
long estimatedSize = hifSource.getEstimatedSizeBytes(p.getOptions());
assertEquals(referenceRecords.size(), estimatedSize);
List<BoundedSource<KV<Text, Employee>>> boundedSourceList =
hifSource.split(0, p.getOptions());
assertEquals(TestEmployeeDataSet.NUMBER_OF_SPLITS, boundedSourceList.size());
List<KV<Text, Employee>> bundleRecords = new ArrayList<>();
for (BoundedSource<KV<Text, Employee>> source : boundedSourceList) {
List<KV<Text, Employee>> elements = new ArrayList<KV<Text, Employee>>();
BoundedReader<KV<Text, Employee>> reader = source.createReader(p.getOptions());
float recordsRead = 0;
assertEquals(Double.valueOf(0), reader.getFractionConsumed());
boolean start = reader.start();
assertEquals(true, start);
if (start) {
elements.add(reader.getCurrent());
boolean advance = reader.advance();
assertEquals(
Double.valueOf(++recordsRead / TestEmployeeDataSet.NUMBER_OF_RECORDS_IN_EACH_SPLIT),
reader.getFractionConsumed());
assertEquals(true, advance);
while (advance) {
elements.add(reader.getCurrent());
advance = reader.advance();
assertEquals(
Double.valueOf(++recordsRead / TestEmployeeDataSet.NUMBER_OF_RECORDS_IN_EACH_SPLIT),
reader.getFractionConsumed());
}
bundleRecords.addAll(elements);
}
assertEquals(Double.valueOf(1), reader.getFractionConsumed());
reader.close();
}
assertThat(bundleRecords, containsInAnyOrder(referenceRecords.toArray()));
}
/**
* This test validates the method getFractionConsumed()- when a bad progress value is returned by
* the inputformat.
*/
@Test
public void testGetFractionConsumedForBadProgressValue() throws Exception {
InputFormat<Text, Employee> mockInputFormat = Mockito.mock(EmployeeInputFormat.class);
EmployeeRecordReader mockReader = Mockito.mock(EmployeeRecordReader.class);
Mockito.when(
mockInputFormat.createRecordReader(Mockito.any(InputSplit.class),
Mockito.any(TaskAttemptContext.class))).thenReturn(mockReader);
Mockito.when(mockReader.nextKeyValue()).thenReturn(true);
Mockito.when(mockReader.getProgress()).thenReturn(2.0F);
InputSplit mockInputSplit = Mockito.mock(NewObjectsEmployeeInputSplit.class);
HadoopInputFormatBoundedSource<Text, Employee> boundedSource =
new HadoopInputFormatBoundedSource<Text, Employee>(
serConf,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class),
null,
null,
new SerializableSplit(mockInputSplit));
boundedSource.setInputFormatObj(mockInputFormat);
BoundedReader<KV<Text, Employee>> reader = boundedSource.createReader(p.getOptions());
assertEquals(Double.valueOf(0), reader.getFractionConsumed());
boolean start = reader.start();
assertEquals(true, start);
if (start) {
boolean advance = reader.advance();
assertEquals(null, reader.getFractionConsumed());
assertEquals(true, advance);
if (advance) {
advance = reader.advance();
assertEquals(null, reader.getFractionConsumed());
}
}
assertEquals(null, reader.getFractionConsumed());
reader.close();
}
/**
* This test validates that reader and its parent source reads the same records.
*/
@Test
public void testReaderAndParentSourceReadsSameData() throws Exception {
InputSplit mockInputSplit = Mockito.mock(NewObjectsEmployeeInputSplit.class);
HadoopInputFormatBoundedSource<Text, Employee> boundedSource =
new HadoopInputFormatBoundedSource<Text, Employee>(
serConf,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class),
null,
null,
new SerializableSplit(mockInputSplit));
BoundedReader<KV<Text, Employee>> reader = boundedSource
.createReader(p.getOptions());
SourceTestUtils.assertUnstartedReaderReadsSameAsItsSource(reader, p.getOptions());
}
/**
* This test verifies that the method
* {@link HadoopInputFormatBoundedSource.HadoopInputFormatReader
* getCurrentSource()} returns correct source object.
*/
@Test
public void testGetCurrentSourceFunction() throws Exception {
SerializableSplit split = new SerializableSplit();
BoundedSource<KV<Text, Employee>> source =
new HadoopInputFormatBoundedSource<Text, Employee>(
serConf,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class),
null,
null,
split);
BoundedReader<KV<Text, Employee>> hifReader = source.createReader(p.getOptions());
BoundedSource<KV<Text, Employee>> hifSource = hifReader.getCurrentSource();
assertEquals(hifSource, source);
}
/**
* This test validates behavior of
* {@link HadoopInputFormatBoundedSource
* createReader()} | class HadoopInputFormatIOTest {
static SerializableConfiguration serConf;
static SimpleFunction<Text, String> myKeyTranslate;
static SimpleFunction<Employee, String> myValueTranslate;
@Rule public final transient TestPipeline p = TestPipeline.create();
@Rule public ExpectedException thrown = ExpectedException.none();
private PBegin input = PBegin.in(p);
@BeforeClass
public static void setUp() throws IOException, InterruptedException {
serConf = loadTestConfiguration(
EmployeeInputFormat.class,
Text.class,
Employee.class);
myKeyTranslate = new SimpleFunction<Text, String>() {
@Override
public String apply(Text input) {
return input.toString();
}
};
myValueTranslate = new SimpleFunction<Employee, String>() {
@Override
public String apply(Employee input) {
return input.getEmpName() + "_" + input.getEmpAddress();
}
};
}
@Test
public void testReadBuildsCorrectly() {
HadoopInputFormatIO.Read<String, String> read = HadoopInputFormatIO.<String, String>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withKeyTranslation(myKeyTranslate)
.withValueTranslation(myValueTranslate);
assertEquals(serConf.getHadoopConfiguration(),
read.getConfiguration().getHadoopConfiguration());
assertEquals(myKeyTranslate, read.getKeyTranslationFunction());
assertEquals(myValueTranslate, read.getValueTranslationFunction());
assertEquals(myValueTranslate.getOutputTypeDescriptor(), read.getValueTypeDescriptor());
assertEquals(myKeyTranslate.getOutputTypeDescriptor(), read.getKeyTypeDescriptor());
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} builds correctly in different order
* of with configuration/key translation/value translation. This test also validates output
* PCollection key/value classes are set correctly even if Hadoop configuration is set after
* setting key/value translation.
*/
@Test
public void testReadBuildsCorrectlyInDifferentOrder() {
HadoopInputFormatIO.Read<String, String> read =
HadoopInputFormatIO.<String, String>read()
.withValueTranslation(myValueTranslate)
.withConfiguration(serConf.getHadoopConfiguration())
.withKeyTranslation(myKeyTranslate);
assertEquals(serConf.getHadoopConfiguration(),
read.getConfiguration().getHadoopConfiguration());
assertEquals(myKeyTranslate, read.getKeyTranslationFunction());
assertEquals(myValueTranslate, read.getValueTranslationFunction());
assertEquals(myKeyTranslate.getOutputTypeDescriptor(), read.getKeyTypeDescriptor());
assertEquals(myValueTranslate.getOutputTypeDescriptor(), read.getValueTypeDescriptor());
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} object creation if
* {@link HadoopInputFormatIO.Read
* once.
* @throws InterruptedException
* @throws IOException
*/
@Test
public void testReadBuildsCorrectlyIfWithConfigurationIsCalledMoreThanOneTime()
throws IOException, InterruptedException {
SerializableConfiguration diffConf =
loadTestConfiguration(
EmployeeInputFormat.class,
Employee.class,
Text.class);
HadoopInputFormatIO.Read<String, String> read = HadoopInputFormatIO.<String, String>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withKeyTranslation(myKeyTranslate)
.withConfiguration(diffConf.getHadoopConfiguration());
assertEquals(diffConf.getHadoopConfiguration(),
read.getConfiguration().getHadoopConfiguration());
assertEquals(myKeyTranslate, read.getKeyTranslationFunction());
assertEquals(null, read.getValueTranslationFunction());
assertEquals(myKeyTranslate.getOutputTypeDescriptor(), read.getKeyTypeDescriptor());
assertEquals(diffConf.getHadoopConfiguration().getClass("value.class", Object.class), read
.getValueTypeDescriptor().getRawType());
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} transform object creation fails with
* null configuration. {@link HadoopInputFormatIO.Read
* method checks configuration is null and throws exception if it is null.
*/
@Test
public void testReadObjectCreationFailsIfConfigurationIsNull() {
thrown.expect(NullPointerException.class);
HadoopInputFormatIO.<Text, Employee>read()
.withConfiguration(null);
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} transform object creation with only
* configuration.
*/
@Test
public void testReadObjectCreationWithConfiguration() {
HadoopInputFormatIO.Read<Text, Employee> read = HadoopInputFormatIO.<Text, Employee>read()
.withConfiguration(serConf.getHadoopConfiguration());
assertEquals(serConf.getHadoopConfiguration(),
read.getConfiguration().getHadoopConfiguration());
assertEquals(null, read.getKeyTranslationFunction());
assertEquals(null, read.getValueTranslationFunction());
assertEquals(serConf.getHadoopConfiguration().getClass("key.class", Object.class), read
.getKeyTypeDescriptor().getRawType());
assertEquals(serConf.getHadoopConfiguration().getClass("value.class", Object.class), read
.getValueTypeDescriptor().getRawType());
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} transform object creation fails with
* configuration and null key translation. {@link HadoopInputFormatIO.Read
* withKeyTranslation()} checks keyTranslation is null and throws exception if it null value is
* passed.
*/
@Test
public void testReadObjectCreationFailsIfKeyTranslationFunctionIsNull() {
thrown.expect(NullPointerException.class);
HadoopInputFormatIO.<String, Employee>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withKeyTranslation(null);
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} transform object creation with
* configuration and key translation.
*/
@Test
public void testReadObjectCreationWithConfigurationKeyTranslation() {
HadoopInputFormatIO.Read<String, Employee> read = HadoopInputFormatIO.<String, Employee>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withKeyTranslation(myKeyTranslate);
assertEquals(serConf.getHadoopConfiguration(),
read.getConfiguration().getHadoopConfiguration());
assertEquals(myKeyTranslate, read.getKeyTranslationFunction());
assertEquals(null, read.getValueTranslationFunction());
assertEquals(myKeyTranslate.getOutputTypeDescriptor().getRawType(),
read.getKeyTypeDescriptor().getRawType());
assertEquals(serConf.getHadoopConfiguration().getClass("value.class", Object.class),
read.getValueTypeDescriptor().getRawType());
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} transform object creation fails with
* configuration and null value translation.
* {@link HadoopInputFormatIO.Read
* valueTranslation is null and throws exception if null value is passed.
*/
@Test
public void testReadObjectCreationFailsIfValueTranslationFunctionIsNull() {
thrown.expect(NullPointerException.class);
HadoopInputFormatIO.<Text, String>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withValueTranslation(null);
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} transform object creation with
* configuration and value translation.
*/
@Test
public void testReadObjectCreationWithConfigurationValueTranslation() {
HadoopInputFormatIO.Read<Text, String> read = HadoopInputFormatIO.<Text, String>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withValueTranslation(myValueTranslate);
assertEquals(serConf.getHadoopConfiguration(),
read.getConfiguration().getHadoopConfiguration());
assertEquals(null, read.getKeyTranslationFunction());
assertEquals(myValueTranslate, read.getValueTranslationFunction());
assertEquals(serConf.getHadoopConfiguration().getClass("key.class", Object.class),
read.getKeyTypeDescriptor().getRawType());
assertEquals(myValueTranslate.getOutputTypeDescriptor().getRawType(),
read.getValueTypeDescriptor().getRawType());
}
/**
* This test validates {@link HadoopInputFormatIO.Read Read} transform object creation with
* configuration, key translation and value translation.
*/
@Test
public void testReadObjectCreationWithConfigurationKeyTranslationValueTranslation() {
HadoopInputFormatIO.Read<String, String> read = HadoopInputFormatIO.<String, String>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withKeyTranslation(myKeyTranslate)
.withValueTranslation(myValueTranslate);
assertEquals(serConf.getHadoopConfiguration(),
read.getConfiguration().getHadoopConfiguration());
assertEquals(myKeyTranslate, read.getKeyTranslationFunction());
assertEquals(myValueTranslate, read.getValueTranslationFunction());
assertEquals(myKeyTranslate.getOutputTypeDescriptor().getRawType(),
read.getKeyTypeDescriptor().getRawType());
assertEquals(myValueTranslate.getOutputTypeDescriptor().getRawType(),
read.getValueTypeDescriptor().getRawType());
}
/**
* This test validates functionality of {@link HadoopInputFormatIO.Read
* Read.validateTransform()} function when Read transform is created without calling
* {@link HadoopInputFormatIO.Read
*/
@Test
/**
* This test validates functionality of {@link HadoopInputFormatIO.Read
* withConfiguration()} function when Hadoop InputFormat class is not provided by the user in
* configuration.
*/
@Test
public void testReadValidationFailsMissingInputFormatInConf() {
Configuration configuration = new Configuration();
configuration.setClass("key.class", Text.class, Object.class);
configuration.setClass("value.class", Employee.class, Object.class);
thrown.expect(NullPointerException.class);
HadoopInputFormatIO.<Text, Employee>read()
.withConfiguration(configuration);
}
/**
* This test validates functionality of {@link HadoopInputFormatIO.Read
* withConfiguration()} function when key class is not provided by the user in configuration.
*/
@Test
public void testReadValidationFailsMissingKeyClassInConf() {
Configuration configuration = new Configuration();
configuration.setClass("mapreduce.job.inputformat.class", EmployeeInputFormat.class,
InputFormat.class);
configuration.setClass("value.class", Employee.class, Object.class);
thrown.expect(NullPointerException.class);
HadoopInputFormatIO.<Text, Employee>read()
.withConfiguration(configuration);
}
/**
* This test validates functionality of {@link HadoopInputFormatIO.Read
* withConfiguration()} function when value class is not provided by the user in configuration.
*/
@Test
public void testReadValidationFailsMissingValueClassInConf() {
Configuration configuration = new Configuration();
configuration.setClass("mapreduce.job.inputformat.class", EmployeeInputFormat.class,
InputFormat.class);
configuration.setClass("key.class", Text.class, Object.class);
thrown.expect(NullPointerException.class);
HadoopInputFormatIO.<Text, Employee>read().withConfiguration(configuration);
}
/**
* This test validates functionality of {@link HadoopInputFormatIO.Read
* Read.validateTransform()} function when myKeyTranslate's (simple function provided by user for
* key translation) input type is not same as Hadoop InputFormat's keyClass(Which is property set
* in configuration as "key.class").
*/
@Test
public void testReadValidationFailsWithWrongInputTypeKeyTranslationFunction() {
SimpleFunction<LongWritable, String> myKeyTranslateWithWrongInputType =
new SimpleFunction<LongWritable, String>() {
@Override
public String apply(LongWritable input) {
return input.toString();
}
};
HadoopInputFormatIO.Read<String, Employee> read = HadoopInputFormatIO.<String, Employee>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withKeyTranslation(myKeyTranslateWithWrongInputType);
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage(String.format(
"Key translation's input type is not same as hadoop InputFormat : %s key " + "class : %s",
serConf.getHadoopConfiguration().getClass("mapreduce.job.inputformat.class",
InputFormat.class), serConf.getHadoopConfiguration()
.getClass("key.class", Object.class)));
read.validateTransform();
}
/**
* This test validates functionality of {@link HadoopInputFormatIO.Read
* Read.validateTransform()} function when myValueTranslate's (simple function provided by user
* for value translation) input type is not same as Hadoop InputFormat's valueClass(Which is
* property set in configuration as "value.class").
*/
@Test
public void testReadValidationFailsWithWrongInputTypeValueTranslationFunction() {
SimpleFunction<LongWritable, String> myValueTranslateWithWrongInputType =
new SimpleFunction<LongWritable, String>() {
@Override
public String apply(LongWritable input) {
return input.toString();
}
};
HadoopInputFormatIO.Read<Text, String> read =
HadoopInputFormatIO.<Text, String>read()
.withConfiguration(serConf.getHadoopConfiguration())
.withValueTranslation(myValueTranslateWithWrongInputType);
String expectedMessage =
String.format(
"Value translation's input type is not same as hadoop InputFormat : "
+ "%s value class : %s",
serConf.getHadoopConfiguration().getClass("mapreduce.job.inputformat.class",
InputFormat.class),
serConf.getHadoopConfiguration().getClass("value.class", Object.class));
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage(expectedMessage);
read.validateTransform();
}
@Test
public void testReadingData() throws Exception {
HadoopInputFormatIO.Read<Text, Employee> read = HadoopInputFormatIO.<Text, Employee>read()
.withConfiguration(serConf.getHadoopConfiguration());
List<KV<Text, Employee>> expected = TestEmployeeDataSet.getEmployeeData();
PCollection<KV<Text, Employee>> actual = p.apply("ReadTest", read);
PAssert.that(actual).containsInAnyOrder(expected);
p.run();
}
/**
* This test validates functionality of
* {@link HadoopInputFormatIO.HadoopInputFormatBoundedSource
* populateDisplayData()}.
*/
@Test
public void testReadDisplayData() {
HadoopInputFormatBoundedSource<Text, Employee> boundedSource =
new HadoopInputFormatBoundedSource<Text, Employee>(
serConf,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class),
null,
null,
new SerializableSplit());
DisplayData displayData = DisplayData.from(boundedSource);
assertThat(
displayData,
hasDisplayItem("mapreduce.job.inputformat.class",
serConf.getHadoopConfiguration().get("mapreduce.job.inputformat.class")));
assertThat(displayData,
hasDisplayItem("key.class", serConf.getHadoopConfiguration().get("key.class")));
assertThat(displayData,
hasDisplayItem("value.class", serConf.getHadoopConfiguration().get("value.class")));
}
/**
* This test validates behavior of {@link HadoopInputFormatBoundedSource} if RecordReader object
* creation fails.
*/
@Test
public void testReadIfCreateRecordReaderFails() throws Exception {
thrown.expect(Exception.class);
thrown.expectMessage("Exception in creating RecordReader");
InputFormat<Text, Employee> mockInputFormat = Mockito.mock(EmployeeInputFormat.class);
Mockito.when(
mockInputFormat.createRecordReader(Mockito.any(InputSplit.class),
Mockito.any(TaskAttemptContext.class))).thenThrow(
new IOException("Exception in creating RecordReader"));
HadoopInputFormatBoundedSource<Text, Employee> boundedSource =
new HadoopInputFormatBoundedSource<Text, Employee>(
serConf,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class),
null,
null,
new SerializableSplit());
boundedSource.setInputFormatObj(mockInputFormat);
SourceTestUtils.readFromSource(boundedSource, p.getOptions());
}
/**
* This test validates behavior of HadoopInputFormatSource if
* {@link InputFormat
*/
@Test
public void testReadWithNullCreateRecordReader() throws Exception {
InputFormat<Text, Employee> mockInputFormat = Mockito.mock(EmployeeInputFormat.class);
thrown.expect(IOException.class);
thrown.expectMessage(String.format("Null RecordReader object returned by %s",
mockInputFormat.getClass()));
Mockito.when(
mockInputFormat.createRecordReader(Mockito.any(InputSplit.class),
Mockito.any(TaskAttemptContext.class))).thenReturn(null);
HadoopInputFormatBoundedSource<Text, Employee> boundedSource =
new HadoopInputFormatBoundedSource<Text, Employee>(
serConf,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class),
null,
null,
new SerializableSplit());
boundedSource.setInputFormatObj(mockInputFormat);
SourceTestUtils.readFromSource(boundedSource, p.getOptions());
}
/**
* This test validates behavior of
* {@link HadoopInputFormatBoundedSource.HadoopInputFormatReader
* InputFormat's {@link InputFormat
* records.
*/
@Test
public void testReadersStartWhenZeroRecords() throws Exception {
InputFormat mockInputFormat = Mockito.mock(EmployeeInputFormat.class);
EmployeeRecordReader mockReader = Mockito.mock(EmployeeRecordReader.class);
Mockito.when(
mockInputFormat.createRecordReader(Mockito.any(InputSplit.class),
Mockito.any(TaskAttemptContext.class))).thenReturn(mockReader);
Mockito.when(mockReader.nextKeyValue()).thenReturn(false);
InputSplit mockInputSplit = Mockito.mock(NewObjectsEmployeeInputSplit.class);
HadoopInputFormatBoundedSource<Text, Employee> boundedSource =
new HadoopInputFormatBoundedSource<Text, Employee>(
serConf,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class),
null,
null,
new SerializableSplit(mockInputSplit));
boundedSource.setInputFormatObj(mockInputFormat);
BoundedReader<KV<Text, Employee>> reader = boundedSource.createReader(p.getOptions());
assertEquals(false, reader.start());
assertEquals(Double.valueOf(1), reader.getFractionConsumed());
reader.close();
}
/**
* This test validates the method getFractionConsumed()- which indicates the progress of the read
* in range of 0 to 1.
*/
@Test
public void testReadersGetFractionConsumed() throws Exception {
List<KV<Text, Employee>> referenceRecords = TestEmployeeDataSet.getEmployeeData();
HadoopInputFormatBoundedSource<Text, Employee> hifSource = getTestHIFSource(
EmployeeInputFormat.class,
Text.class,
Employee.class,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class));
long estimatedSize = hifSource.getEstimatedSizeBytes(p.getOptions());
assertEquals(referenceRecords.size(), estimatedSize);
List<BoundedSource<KV<Text, Employee>>> boundedSourceList =
hifSource.split(0, p.getOptions());
assertEquals(TestEmployeeDataSet.NUMBER_OF_SPLITS, boundedSourceList.size());
List<KV<Text, Employee>> bundleRecords = new ArrayList<>();
for (BoundedSource<KV<Text, Employee>> source : boundedSourceList) {
List<KV<Text, Employee>> elements = new ArrayList<KV<Text, Employee>>();
BoundedReader<KV<Text, Employee>> reader = source.createReader(p.getOptions());
float recordsRead = 0;
assertEquals(Double.valueOf(0), reader.getFractionConsumed());
boolean start = reader.start();
assertEquals(true, start);
if (start) {
elements.add(reader.getCurrent());
boolean advance = reader.advance();
assertEquals(
Double.valueOf(++recordsRead / TestEmployeeDataSet.NUMBER_OF_RECORDS_IN_EACH_SPLIT),
reader.getFractionConsumed());
assertEquals(true, advance);
while (advance) {
elements.add(reader.getCurrent());
advance = reader.advance();
assertEquals(
Double.valueOf(++recordsRead / TestEmployeeDataSet.NUMBER_OF_RECORDS_IN_EACH_SPLIT),
reader.getFractionConsumed());
}
bundleRecords.addAll(elements);
}
assertEquals(Double.valueOf(1), reader.getFractionConsumed());
reader.close();
}
assertThat(bundleRecords, containsInAnyOrder(referenceRecords.toArray()));
}
/**
* This test validates the method getFractionConsumed()- when a bad progress value is returned by
* the inputformat.
*/
@Test
public void testGetFractionConsumedForBadProgressValue() throws Exception {
InputFormat<Text, Employee> mockInputFormat = Mockito.mock(EmployeeInputFormat.class);
EmployeeRecordReader mockReader = Mockito.mock(EmployeeRecordReader.class);
Mockito.when(
mockInputFormat.createRecordReader(Mockito.any(InputSplit.class),
Mockito.any(TaskAttemptContext.class))).thenReturn(mockReader);
Mockito.when(mockReader.nextKeyValue()).thenReturn(true);
Mockito.when(mockReader.getProgress()).thenReturn(2.0F);
InputSplit mockInputSplit = Mockito.mock(NewObjectsEmployeeInputSplit.class);
HadoopInputFormatBoundedSource<Text, Employee> boundedSource =
new HadoopInputFormatBoundedSource<Text, Employee>(
serConf,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class),
null,
null,
new SerializableSplit(mockInputSplit));
boundedSource.setInputFormatObj(mockInputFormat);
BoundedReader<KV<Text, Employee>> reader = boundedSource.createReader(p.getOptions());
assertEquals(Double.valueOf(0), reader.getFractionConsumed());
boolean start = reader.start();
assertEquals(true, start);
if (start) {
boolean advance = reader.advance();
assertEquals(null, reader.getFractionConsumed());
assertEquals(true, advance);
if (advance) {
advance = reader.advance();
assertEquals(null, reader.getFractionConsumed());
}
}
assertEquals(null, reader.getFractionConsumed());
reader.close();
}
/**
* This test validates that reader and its parent source reads the same records.
*/
@Test
public void testReaderAndParentSourceReadsSameData() throws Exception {
InputSplit mockInputSplit = Mockito.mock(NewObjectsEmployeeInputSplit.class);
HadoopInputFormatBoundedSource<Text, Employee> boundedSource =
new HadoopInputFormatBoundedSource<Text, Employee>(
serConf,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class),
null,
null,
new SerializableSplit(mockInputSplit));
BoundedReader<KV<Text, Employee>> reader = boundedSource
.createReader(p.getOptions());
SourceTestUtils.assertUnstartedReaderReadsSameAsItsSource(reader, p.getOptions());
}
/**
* This test verifies that the method
* {@link HadoopInputFormatBoundedSource.HadoopInputFormatReader
* getCurrentSource()} returns correct source object.
*/
@Test
public void testGetCurrentSourceFunction() throws Exception {
SerializableSplit split = new SerializableSplit();
BoundedSource<KV<Text, Employee>> source =
new HadoopInputFormatBoundedSource<Text, Employee>(
serConf,
WritableCoder.of(Text.class),
AvroCoder.of(Employee.class),
null,
null,
split);
BoundedReader<KV<Text, Employee>> hifReader = source.createReader(p.getOptions());
BoundedSource<KV<Text, Employee>> hifSource = hifReader.getCurrentSource();
assertEquals(hifSource, source);
}
/**
* This test validates behavior of
* {@link HadoopInputFormatBoundedSource
* createReader()} |
I assume what you are saying if on the ServiceEndpoint we had successful requests very recently let's throw away the failed channel rather faster because chances are good that other channels are healthy? | private String transitTimeoutValidation(Timestamps timestamps, Instant currentTime, RntbdRequestManager requestManager, Channel channel) {
String transitTimeoutValidationMessage = StringUtils.EMPTY;
if (this.timeoutDetectionEnabled && timestamps.transitTimeoutCount() > 0) {
final Optional<RntbdContext> rntbdContext = requestManager.rntbdContext();
long readDelay = Duration.between(timestamps.lastChannelReadTime(), currentTime).toNanos();
if (readDelay >= this.timeoutTimeLimitInNanos) {
transitTimeoutValidationMessage = MessageFormat.format(
"{0} health check failed due to transit timeout detection time limit: [rntbdContext: {1},"
+ "lastChannelRead: {2}, timeoutTimeLimitInNanos: {3}]",
channel,
rntbdContext,
timestamps.lastReadTime,
this.timeoutTimeLimitInNanos);
logger.warn(transitTimeoutValidationMessage);
return transitTimeoutValidationMessage;
}
if (timestamps.transitTimeoutCount() >= this.timeoutHighFrequencyThreshold
&& readDelay >= this.timeoutHighFrequencyTimeLimitInNanos) {
transitTimeoutValidationMessage = MessageFormat.format(
"{0} health check failed due to transit timeout high frequency threshold hit: [rntbdContext: {1},"
+ "lastChannelRead: {2}, transitTimeoutCount: {3}, timeoutHighFrequencyThreshold: {4}, timeoutHighFrequencyTimeLimitInNanos: {5}]",
channel,
rntbdContext,
timestamps.lastReadTime,
timestamps.transitTimeoutCount,
this.timeoutHighFrequencyThreshold,
this.timeoutHighFrequencyTimeLimitInNanos);
logger.warn(transitTimeoutValidationMessage);
return transitTimeoutValidationMessage;
}
if (timestamps.tansitTimeoutWriteCount() >= this.timeoutOnWriteThreshold
&& readDelay >= this.timeoutOnWriteTimeLimitInNanos) {
transitTimeoutValidationMessage = MessageFormat.format(
"{0} health check failed due to transit timeout on write threshold hit: [rntbdContext: {1},"
+ "lastChannelRead: {2}, transitTimeoutWriteCount: {3}, timeoutOnWriteThreshold: {4}, timeoutOnWriteTimeLimitInNanos: {5}]",
channel,
rntbdContext,
timestamps.lastReadTime,
timestamps.transitTimeoutWriteCount,
this.timeoutOnWriteThreshold,
this.timeoutOnWriteTimeLimitInNanos);
logger.warn(transitTimeoutValidationMessage);
return transitTimeoutValidationMessage;
}
}
return transitTimeoutValidationMessage;
} | private String transitTimeoutValidation(Timestamps timestamps, Instant currentTime, RntbdRequestManager requestManager, Channel channel) {
String transitTimeoutValidationMessage = StringUtils.EMPTY;
if (this.timeoutDetectionEnabled && timestamps.transitTimeoutCount() > 0) {
if (CpuMemoryMonitor.getCpuLoad().isCpuOverThreshold(this.timeoutDetectionDisableCPUThreshold)) {
return transitTimeoutValidationMessage;
}
final Optional<RntbdContext> rntbdContext = requestManager.rntbdContext();
long readDelay = Duration.between(timestamps.lastChannelReadTime(), currentTime).toNanos();
if (readDelay >= this.timeoutTimeLimitInNanos) {
transitTimeoutValidationMessage = MessageFormat.format(
"{0} health check failed due to transit timeout detection time limit: [rntbdContext: {1},"
+ "lastChannelRead: {2}, timeoutTimeLimitInNanos: {3}]",
channel,
rntbdContext,
timestamps.lastReadTime,
this.timeoutTimeLimitInNanos);
logger.warn(transitTimeoutValidationMessage);
return transitTimeoutValidationMessage;
}
if (timestamps.transitTimeoutCount() >= this.timeoutHighFrequencyThreshold
&& readDelay >= this.timeoutHighFrequencyTimeLimitInNanos) {
transitTimeoutValidationMessage = MessageFormat.format(
"{0} health check failed due to transit timeout high frequency threshold hit: [rntbdContext: {1},"
+ "lastChannelRead: {2}, transitTimeoutCount: {3}, timeoutHighFrequencyThreshold: {4}, timeoutHighFrequencyTimeLimitInNanos: {5}]",
channel,
rntbdContext,
timestamps.lastReadTime,
timestamps.transitTimeoutCount,
this.timeoutHighFrequencyThreshold,
this.timeoutHighFrequencyTimeLimitInNanos);
logger.warn(transitTimeoutValidationMessage);
return transitTimeoutValidationMessage;
}
if (timestamps.tansitTimeoutWriteCount() >= this.timeoutOnWriteThreshold
&& readDelay >= this.timeoutOnWriteTimeLimitInNanos) {
transitTimeoutValidationMessage = MessageFormat.format(
"{0} health check failed due to transit timeout on write threshold hit: [rntbdContext: {1},"
+ "lastChannelRead: {2}, transitTimeoutWriteCount: {3}, timeoutOnWriteThreshold: {4}, timeoutOnWriteTimeLimitInNanos: {5}]",
channel,
rntbdContext,
timestamps.lastReadTime,
timestamps.transitTimeoutWriteCount,
this.timeoutOnWriteThreshold,
this.timeoutOnWriteTimeLimitInNanos);
logger.warn(transitTimeoutValidationMessage);
return transitTimeoutValidationMessage;
}
}
return transitTimeoutValidationMessage;
} | class RntbdClientChannelHealthChecker implements ChannelHealthChecker {
private static final Logger logger = LoggerFactory.getLogger(RntbdClientChannelHealthChecker.class);
private static final long recentReadWindowInNanos = 1_000_000_000L;
private static final long readHangGracePeriodInNanos = (45L + 10L) * 1_000_000_000L;
private static final long writeHangGracePeriodInNanos = 2L * 1_000_000_000L;
@JsonProperty
private final long idleConnectionTimeoutInNanos;
@JsonProperty
private final long readDelayLimitInNanos;
@JsonProperty
private final long writeDelayLimitInNanos;
@JsonProperty
private final long networkRequestTimeoutInNanos;
@JsonProperty
private final boolean timeoutDetectionEnabled;
@JsonProperty
private final long timeoutTimeLimitInNanos;
@JsonProperty
private final int timeoutHighFrequencyThreshold;
@JsonProperty
private final long timeoutHighFrequencyTimeLimitInNanos;
@JsonProperty
private final int timeoutOnWriteThreshold;
@JsonProperty
private final long timeoutOnWriteTimeLimitInNanos;
public RntbdClientChannelHealthChecker(final Config config) {
checkNotNull(config, "expected non-null config");
checkArgument(config.receiveHangDetectionTimeInNanos() > readHangGracePeriodInNanos,
"config.receiveHangDetectionTimeInNanos: %s",
config.receiveHangDetectionTimeInNanos());
checkArgument(config.sendHangDetectionTimeInNanos() > writeHangGracePeriodInNanos,
"config.sendHangDetectionTimeInNanos: %s",
config.sendHangDetectionTimeInNanos());
this.idleConnectionTimeoutInNanos = config.idleConnectionTimeoutInNanos();
this.readDelayLimitInNanos = config.receiveHangDetectionTimeInNanos();
this.writeDelayLimitInNanos = config.sendHangDetectionTimeInNanos();
this.networkRequestTimeoutInNanos = config.tcpNetworkRequestTimeoutInNanos();
this.timeoutDetectionEnabled = config.timeoutDetectionEnabled();
this.timeoutTimeLimitInNanos = config.timeoutDetectionTimeLimitInNanos();
this.timeoutHighFrequencyThreshold = config.timeoutDetectionHighFrequencyThreshold();
this.timeoutHighFrequencyTimeLimitInNanos = config.timeoutDetectionHighFrequencyTimeLimitInNanos();
this.timeoutOnWriteThreshold = config.timeoutDetectionOnWriteThreshold();
this.timeoutOnWriteTimeLimitInNanos = config.timeoutDetectionOnWriteTimeLimitInNanos();
}
/**
* Returns the idle connection timeout interval in nanoseconds.
* <p>
* A channel is considered idle if {@link
* the last channel read is greater than {@link
*
* @return Idle connection timeout interval in nanoseconds.
*/
public long idleConnectionTimeoutInNanos() {
return this.idleConnectionTimeoutInNanos;
}
/**
* Returns the read delay limit in nanoseconds.
* <p>
* A channel will be declared unhealthy if the gap between the last channel write and the last channel read grows
* beyond this value.
* <p>
* Constraint: {@link
*
* @return Read delay limit in nanoseconds.
*/
public long readDelayLimitInNanos() {
return this.readDelayLimitInNanos;
}
/**
* Returns the write delay limit in nanoseconds.
* <p>
* A channel will be declared unhealthy if the gap between the last channel write attempt and the last channel write
* grows beyond this value.
* <p>
* Constraint: {@link
*
* @return Write delay limit in nanoseconds.
*/
public long writeDelayLimitInNanos() {
return this.writeDelayLimitInNanos;
}
/**
* Determines whether a specified channel is healthy.
*
* @param channel A channel whose health is to be checked.
* @return A future with a result of {@code true} if the channel is healthy, or {@code false} otherwise.
*/
public Future<Boolean> isHealthy(final Channel channel) {
checkNotNull(channel, "expected non-null channel");
final Promise<Boolean> promise = channel.eventLoop().newPromise();
this.isHealthyWithFailureReason(channel)
.addListener((Future<String> future) -> {
if (future.isSuccess()) {
if (RntbdHealthCheckResults.SuccessValue.equals(future.get())) {
promise.setSuccess(Boolean.TRUE);
} else {
promise.setSuccess(Boolean.FALSE);
}
} else {
promise.setFailure(future.cause());
}
});
return promise;
}
/**
* Determines whether a specified channel is healthy.
*
* @param channel A channel whose health is to be checked.
* @return A future with a result reason {@link RntbdHealthCheckResults} if the channel is healthy, otherwise return the failed reason.
*/
public Future<String> isHealthyWithFailureReason(final Channel channel) {
checkNotNull(channel, "expected non-null channel");
final RntbdRequestManager requestManager = channel.pipeline().get(RntbdRequestManager.class);
final Promise<String> promise = channel.eventLoop().newPromise();
if (requestManager == null) {
reportIssueUnless(logger, !channel.isActive(), channel, "active with no request manager");
return promise.setSuccess("active with no request manager");
}
final Timestamps timestamps = requestManager.snapshotTimestamps();
final Instant currentTime = Instant.now();
if (Duration.between(timestamps.lastChannelReadTime(), currentTime).toNanos() < recentReadWindowInNanos) {
return promise.setSuccess(RntbdHealthCheckResults.SuccessValue);
}
String writeIsHangMessage = this.isWriteHang(timestamps, currentTime, requestManager, channel);
if (StringUtils.isNotEmpty(writeIsHangMessage)) {
return promise.setSuccess(writeIsHangMessage);
}
String readIsHangMessage = this.isReadHang(timestamps, currentTime, requestManager, channel);
if (StringUtils.isNotEmpty(readIsHangMessage)) {
return promise.setSuccess(readIsHangMessage);
}
String transitTimeoutValidationMessage = this.transitTimeoutValidation(timestamps, currentTime, requestManager, channel);
if (StringUtils.isNotEmpty(transitTimeoutValidationMessage)) {
return promise.setSuccess(transitTimeoutValidationMessage);
}
String idleConnectionValidationMessage = this.idleConnectionValidation(timestamps, currentTime, channel);
if(StringUtils.isNotEmpty(idleConnectionValidationMessage)) {
return promise.setSuccess(idleConnectionValidationMessage);
}
channel.writeAndFlush(RntbdHealthCheckRequest.MESSAGE).addListener(completed -> {
if (completed.isSuccess()) {
promise.setSuccess(RntbdHealthCheckResults.SuccessValue);
} else {
String msg = MessageFormat.format(
"{0} health check request failed due to: {1}",
channel,
completed.cause().toString()
);
logger.warn(msg);
promise.setSuccess(msg);
}
});
return promise;
}
private String isWriteHang(Timestamps timestamps, Instant currentTime, RntbdRequestManager requestManager, Channel channel) {
final long writeDelayInNanos =
Duration.between(timestamps.lastChannelWriteTime(), timestamps.lastChannelWriteAttemptTime()).toNanos();
final long writeHangDurationInNanos =
Duration.between(timestamps.lastChannelWriteAttemptTime(), currentTime).toNanos();
String writeHangMessage = StringUtils.EMPTY;
if (writeDelayInNanos > this.writeDelayLimitInNanos && writeHangDurationInNanos > writeHangGracePeriodInNanos) {
final Optional<RntbdContext> rntbdContext = requestManager.rntbdContext();
final int pendingRequestCount = requestManager.pendingRequestCount();
writeHangMessage = MessageFormat.format(
"{0} health check failed due to non-responding write: [lastChannelWriteAttemptTime: {1}, " +
"lastChannelWriteTime: {2}, writeDelayInNanos: {3}, writeDelayLimitInNanos: {4}, " +
"rntbdContext: {5}, pendingRequestCount: {6}]",
channel,
timestamps.lastChannelWriteAttemptTime(),
timestamps.lastChannelWriteTime(),
writeDelayInNanos,
this.writeDelayLimitInNanos,
rntbdContext,
pendingRequestCount);
logger.warn(writeHangMessage);
}
return writeHangMessage;
}
private String isReadHang(Timestamps timestamps, Instant currentTime, RntbdRequestManager requestManager, Channel channel) {
final long readDelay = Duration.between(timestamps.lastChannelReadTime(), timestamps.lastChannelWriteTime()).toNanos();
final long readHangDuration = Duration.between(timestamps.lastChannelWriteTime(), currentTime).toNanos();
String readHangMessage = StringUtils.EMPTY;
if (readDelay > this.readDelayLimitInNanos && readHangDuration > readHangGracePeriodInNanos) {
final Optional<RntbdContext> rntbdContext = requestManager.rntbdContext();
final int pendingRequestCount = requestManager.pendingRequestCount();
readHangMessage = MessageFormat.format(
"{0} health check failed due to non-responding read: [lastChannelWrite: {1}, lastChannelRead: {2}, "
+ "readDelay: {3}, readDelayLimit: {4}, rntbdContext: {5}, pendingRequestCount: {6}]",
channel,
timestamps.lastChannelWriteTime(),
timestamps.lastChannelReadTime(),
readDelay,
this.readDelayLimitInNanos,
rntbdContext,
pendingRequestCount);
logger.warn(readHangMessage);
}
return readHangMessage;
}
private String idleConnectionValidation(Timestamps timestamps, Instant currentTime, Channel channel) {
String errorMessage = StringUtils.EMPTY;
if (this.idleConnectionTimeoutInNanos > 0L) {
if (Duration.between(currentTime, timestamps.lastChannelReadTime()).toNanos() > this.idleConnectionTimeoutInNanos) {
errorMessage = MessageFormat.format(
"{0} health check failed due to idle connection timeout: [lastChannelWrite: {1}, lastChannelRead: {2}, "
+ "idleConnectionTimeout: {3}, currentTime: {4}]",
channel,
timestamps.lastChannelWriteTime(),
timestamps.lastChannelReadTime(),
idleConnectionTimeoutInNanos,
currentTime);
logger.warn(errorMessage);
}
}
return errorMessage;
}
@Override
public String toString() {
return RntbdObjectMapper.toString(this);
}
public static final class Timestamps {
private static final AtomicReferenceFieldUpdater<Timestamps, Instant> lastPingUpdater =
newUpdater(Timestamps.class, Instant.class, "lastPingTime");
private static final AtomicReferenceFieldUpdater<Timestamps, Instant>lastReadUpdater =
newUpdater(Timestamps.class, Instant.class, "lastReadTime");
private static final AtomicReferenceFieldUpdater<Timestamps, Instant> lastWriteUpdater =
newUpdater(Timestamps.class, Instant.class, "lastWriteTime");
private static final AtomicReferenceFieldUpdater<Timestamps, Instant> lastWriteAttemptUpdater =
newUpdater(Timestamps.class, Instant.class, "lastWriteAttemptTime");
private static final AtomicIntegerFieldUpdater<Timestamps> transitTimeoutCountUpdater =
AtomicIntegerFieldUpdater.newUpdater(Timestamps.class, "transitTimeoutCount");
private static final AtomicIntegerFieldUpdater<Timestamps> transitTimeoutWriteCountUpdater =
AtomicIntegerFieldUpdater.newUpdater(Timestamps.class, "transitTimeoutWriteCount");
private static final AtomicReferenceFieldUpdater<Timestamps, Instant> transitTimeoutStartingTimeUpdater =
newUpdater(Timestamps.class, Instant.class, "transitTimeoutStartingTime");
private volatile Instant lastPingTime;
private volatile Instant lastReadTime;
private volatile Instant lastWriteTime;
private volatile Instant lastWriteAttemptTime;
private volatile int transitTimeoutCount;
private volatile int transitTimeoutWriteCount;
private volatile Instant transitTimeoutStartingTime;
public Timestamps() {
lastPingUpdater.set(this, Instant.now());
lastReadUpdater.set(this, Instant.now());
lastWriteUpdater.set(this, Instant.now());
lastWriteAttemptUpdater.set(this, Instant.now());
}
@SuppressWarnings("CopyConstructorMissesField")
public Timestamps(Timestamps other) {
checkNotNull(other, "other: null");
this.lastPingTime = lastPingUpdater.get(other);
this.lastReadTime = lastReadUpdater.get(other);
this.lastWriteTime = lastWriteUpdater.get(other);
this.lastWriteAttemptTime = lastWriteAttemptUpdater.get(other);
this.transitTimeoutCount = transitTimeoutCountUpdater.get(other);
this.transitTimeoutWriteCount = transitTimeoutWriteCountUpdater.get(other);
this.transitTimeoutStartingTime = transitTimeoutStartingTimeUpdater.get(other);
}
public void channelPingCompleted() {
lastPingUpdater.set(this, Instant.now());
}
public void channelReadCompleted() {
lastReadUpdater.set(this, Instant.now());
}
public void channelWriteAttempted() {
lastWriteAttemptUpdater.set(this, Instant.now());
}
public void channelWriteCompleted() {
lastWriteUpdater.set(this, Instant.now());
}
public void transitTimeout(boolean isReadOnly, Instant requestCreatedTime) {
if (transitTimeoutCountUpdater.incrementAndGet(this) == 1) {
transitTimeoutStartingTimeUpdater.set(this, requestCreatedTime);
}
if (!isReadOnly) {
transitTimeoutWriteCountUpdater.incrementAndGet(this);
}
}
public void resetTransitTimeout() {
transitTimeoutCountUpdater.set(this, 0);
transitTimeoutWriteCountUpdater.set(this, 0);
transitTimeoutStartingTimeUpdater.set(this, null);
}
@JsonProperty
public Instant lastChannelPingTime() {
return lastPingUpdater.get(this);
}
@JsonProperty
public Instant lastChannelReadTime() {
return lastReadUpdater.get(this);
}
@JsonProperty
public Instant lastChannelWriteTime() {
return lastWriteUpdater.get(this);
}
@JsonProperty
public Instant lastChannelWriteAttemptTime() {
return lastWriteAttemptUpdater.get(this);
}
@JsonProperty
public int transitTimeoutCount() {
return transitTimeoutCountUpdater.get(this);
}
@JsonProperty
public int tansitTimeoutWriteCount() {
return transitTimeoutWriteCountUpdater.get(this);
}
@JsonProperty
public Instant transitTimeoutStartingTime() {
return transitTimeoutStartingTimeUpdater.get(this);
}
@Override
public String toString() {
return RntbdObjectMapper.toString(this);
}
}
} | class RntbdClientChannelHealthChecker implements ChannelHealthChecker {
private static final Logger logger = LoggerFactory.getLogger(RntbdClientChannelHealthChecker.class);
private static final long recentReadWindowInNanos = 1_000_000_000L;
private static final long readHangGracePeriodInNanos = (45L + 10L) * 1_000_000_000L;
private static final long writeHangGracePeriodInNanos = 2L * 1_000_000_000L;
@JsonProperty
private final long idleConnectionTimeoutInNanos;
@JsonProperty
private final long readDelayLimitInNanos;
@JsonProperty
private final long writeDelayLimitInNanos;
@JsonProperty
private final long networkRequestTimeoutInNanos;
@JsonProperty
private final boolean timeoutDetectionEnabled;
@JsonProperty
private final double timeoutDetectionDisableCPUThreshold;
@JsonProperty
private final long timeoutTimeLimitInNanos;
@JsonProperty
private final int timeoutHighFrequencyThreshold;
@JsonProperty
private final long timeoutHighFrequencyTimeLimitInNanos;
@JsonProperty
private final int timeoutOnWriteThreshold;
@JsonProperty
private final long timeoutOnWriteTimeLimitInNanos;
public RntbdClientChannelHealthChecker(final Config config) {
checkNotNull(config, "expected non-null config");
checkArgument(config.receiveHangDetectionTimeInNanos() > readHangGracePeriodInNanos,
"config.receiveHangDetectionTimeInNanos: %s",
config.receiveHangDetectionTimeInNanos());
checkArgument(config.sendHangDetectionTimeInNanos() > writeHangGracePeriodInNanos,
"config.sendHangDetectionTimeInNanos: %s",
config.sendHangDetectionTimeInNanos());
this.idleConnectionTimeoutInNanos = config.idleConnectionTimeoutInNanos();
this.readDelayLimitInNanos = config.receiveHangDetectionTimeInNanos();
this.writeDelayLimitInNanos = config.sendHangDetectionTimeInNanos();
this.networkRequestTimeoutInNanos = config.tcpNetworkRequestTimeoutInNanos();
this.timeoutDetectionEnabled = config.timeoutDetectionEnabled();
this.timeoutDetectionDisableCPUThreshold = config.timeoutDetectionDisableCPUThreshold();
this.timeoutTimeLimitInNanos = config.timeoutDetectionTimeLimitInNanos();
this.timeoutHighFrequencyThreshold = config.timeoutDetectionHighFrequencyThreshold();
this.timeoutHighFrequencyTimeLimitInNanos = config.timeoutDetectionHighFrequencyTimeLimitInNanos();
this.timeoutOnWriteThreshold = config.timeoutDetectionOnWriteThreshold();
this.timeoutOnWriteTimeLimitInNanos = config.timeoutDetectionOnWriteTimeLimitInNanos();
}
/**
* Returns the idle connection timeout interval in nanoseconds.
* <p>
* A channel is considered idle if {@link
* the last channel read is greater than {@link
*
* @return Idle connection timeout interval in nanoseconds.
*/
public long idleConnectionTimeoutInNanos() {
return this.idleConnectionTimeoutInNanos;
}
/**
* Returns the read delay limit in nanoseconds.
* <p>
* A channel will be declared unhealthy if the gap between the last channel write and the last channel read grows
* beyond this value.
* <p>
* Constraint: {@link
*
* @return Read delay limit in nanoseconds.
*/
public long readDelayLimitInNanos() {
return this.readDelayLimitInNanos;
}
/**
* Returns the write delay limit in nanoseconds.
* <p>
* A channel will be declared unhealthy if the gap between the last channel write attempt and the last channel write
* grows beyond this value.
* <p>
* Constraint: {@link
*
* @return Write delay limit in nanoseconds.
*/
public long writeDelayLimitInNanos() {
return this.writeDelayLimitInNanos;
}
/**
* Determines whether a specified channel is healthy.
*
* @param channel A channel whose health is to be checked.
* @return A future with a result of {@code true} if the channel is healthy, or {@code false} otherwise.
*/
public Future<Boolean> isHealthy(final Channel channel) {
checkNotNull(channel, "expected non-null channel");
final Promise<Boolean> promise = channel.eventLoop().newPromise();
this.isHealthyWithFailureReason(channel)
.addListener((Future<String> future) -> {
if (future.isSuccess()) {
if (RntbdHealthCheckResults.SuccessValue.equals(future.get())) {
promise.setSuccess(Boolean.TRUE);
} else {
promise.setSuccess(Boolean.FALSE);
}
} else {
promise.setFailure(future.cause());
}
});
return promise;
}
/**
* Determines whether a specified channel is healthy.
*
* @param channel A channel whose health is to be checked.
* @return A future with a result reason {@link RntbdHealthCheckResults} if the channel is healthy, otherwise return the failed reason.
*/
public Future<String> isHealthyWithFailureReason(final Channel channel) {
checkNotNull(channel, "expected non-null channel");
final RntbdRequestManager requestManager = channel.pipeline().get(RntbdRequestManager.class);
final Promise<String> promise = channel.eventLoop().newPromise();
if (requestManager == null) {
reportIssueUnless(logger, !channel.isActive(), channel, "active with no request manager");
return promise.setSuccess("active with no request manager");
}
final Timestamps timestamps = requestManager.snapshotTimestamps();
final Instant currentTime = Instant.now();
if (Duration.between(timestamps.lastChannelReadTime(), currentTime).toNanos() < recentReadWindowInNanos) {
return promise.setSuccess(RntbdHealthCheckResults.SuccessValue);
}
String writeIsHangMessage = this.isWriteHang(timestamps, currentTime, requestManager, channel);
if (StringUtils.isNotEmpty(writeIsHangMessage)) {
return promise.setSuccess(writeIsHangMessage);
}
String readIsHangMessage = this.isReadHang(timestamps, currentTime, requestManager, channel);
if (StringUtils.isNotEmpty(readIsHangMessage)) {
return promise.setSuccess(readIsHangMessage);
}
String transitTimeoutValidationMessage = this.transitTimeoutValidation(timestamps, currentTime, requestManager, channel);
if (StringUtils.isNotEmpty(transitTimeoutValidationMessage)) {
return promise.setSuccess(transitTimeoutValidationMessage);
}
String idleConnectionValidationMessage = this.idleConnectionValidation(timestamps, currentTime, channel);
if(StringUtils.isNotEmpty(idleConnectionValidationMessage)) {
return promise.setSuccess(idleConnectionValidationMessage);
}
channel.writeAndFlush(RntbdHealthCheckRequest.MESSAGE).addListener(completed -> {
if (completed.isSuccess()) {
promise.setSuccess(RntbdHealthCheckResults.SuccessValue);
} else {
String msg = MessageFormat.format(
"{0} health check request failed due to: {1}",
channel,
completed.cause().toString()
);
logger.warn(msg);
promise.setSuccess(msg);
}
});
return promise;
}
private String isWriteHang(Timestamps timestamps, Instant currentTime, RntbdRequestManager requestManager, Channel channel) {
final long writeDelayInNanos =
Duration.between(timestamps.lastChannelWriteTime(), timestamps.lastChannelWriteAttemptTime()).toNanos();
final long writeHangDurationInNanos =
Duration.between(timestamps.lastChannelWriteAttemptTime(), currentTime).toNanos();
String writeHangMessage = StringUtils.EMPTY;
if (writeDelayInNanos > this.writeDelayLimitInNanos && writeHangDurationInNanos > writeHangGracePeriodInNanos) {
final Optional<RntbdContext> rntbdContext = requestManager.rntbdContext();
final int pendingRequestCount = requestManager.pendingRequestCount();
writeHangMessage = MessageFormat.format(
"{0} health check failed due to non-responding write: [lastChannelWriteAttemptTime: {1}, " +
"lastChannelWriteTime: {2}, writeDelayInNanos: {3}, writeDelayLimitInNanos: {4}, " +
"rntbdContext: {5}, pendingRequestCount: {6}]",
channel,
timestamps.lastChannelWriteAttemptTime(),
timestamps.lastChannelWriteTime(),
writeDelayInNanos,
this.writeDelayLimitInNanos,
rntbdContext,
pendingRequestCount);
logger.warn(writeHangMessage);
}
return writeHangMessage;
}
private String isReadHang(Timestamps timestamps, Instant currentTime, RntbdRequestManager requestManager, Channel channel) {
final long readDelay = Duration.between(timestamps.lastChannelReadTime(), timestamps.lastChannelWriteTime()).toNanos();
final long readHangDuration = Duration.between(timestamps.lastChannelWriteTime(), currentTime).toNanos();
String readHangMessage = StringUtils.EMPTY;
if (readDelay > this.readDelayLimitInNanos && readHangDuration > readHangGracePeriodInNanos) {
final Optional<RntbdContext> rntbdContext = requestManager.rntbdContext();
final int pendingRequestCount = requestManager.pendingRequestCount();
readHangMessage = MessageFormat.format(
"{0} health check failed due to non-responding read: [lastChannelWrite: {1}, lastChannelRead: {2}, "
+ "readDelay: {3}, readDelayLimit: {4}, rntbdContext: {5}, pendingRequestCount: {6}]",
channel,
timestamps.lastChannelWriteTime(),
timestamps.lastChannelReadTime(),
readDelay,
this.readDelayLimitInNanos,
rntbdContext,
pendingRequestCount);
logger.warn(readHangMessage);
}
return readHangMessage;
}
private String idleConnectionValidation(Timestamps timestamps, Instant currentTime, Channel channel) {
String errorMessage = StringUtils.EMPTY;
if (this.idleConnectionTimeoutInNanos > 0L) {
if (Duration.between(currentTime, timestamps.lastChannelReadTime()).toNanos() > this.idleConnectionTimeoutInNanos) {
errorMessage = MessageFormat.format(
"{0} health check failed due to idle connection timeout: [lastChannelWrite: {1}, lastChannelRead: {2}, "
+ "idleConnectionTimeout: {3}, currentTime: {4}]",
channel,
timestamps.lastChannelWriteTime(),
timestamps.lastChannelReadTime(),
idleConnectionTimeoutInNanos,
currentTime);
logger.warn(errorMessage);
}
}
return errorMessage;
}
@Override
public String toString() {
return RntbdObjectMapper.toString(this);
}
public static final class Timestamps {
private static final AtomicReferenceFieldUpdater<Timestamps, Instant> lastPingUpdater =
newUpdater(Timestamps.class, Instant.class, "lastPingTime");
private static final AtomicReferenceFieldUpdater<Timestamps, Instant>lastReadUpdater =
newUpdater(Timestamps.class, Instant.class, "lastReadTime");
private static final AtomicReferenceFieldUpdater<Timestamps, Instant> lastWriteUpdater =
newUpdater(Timestamps.class, Instant.class, "lastWriteTime");
private static final AtomicReferenceFieldUpdater<Timestamps, Instant> lastWriteAttemptUpdater =
newUpdater(Timestamps.class, Instant.class, "lastWriteAttemptTime");
private static final AtomicIntegerFieldUpdater<Timestamps> transitTimeoutCountUpdater =
AtomicIntegerFieldUpdater.newUpdater(Timestamps.class, "transitTimeoutCount");
private static final AtomicIntegerFieldUpdater<Timestamps> transitTimeoutWriteCountUpdater =
AtomicIntegerFieldUpdater.newUpdater(Timestamps.class, "transitTimeoutWriteCount");
private static final AtomicReferenceFieldUpdater<Timestamps, Instant> transitTimeoutStartingTimeUpdater =
newUpdater(Timestamps.class, Instant.class, "transitTimeoutStartingTime");
private volatile Instant lastPingTime;
private volatile Instant lastReadTime;
private volatile Instant lastWriteTime;
private volatile Instant lastWriteAttemptTime;
private volatile int transitTimeoutCount;
private volatile int transitTimeoutWriteCount;
private volatile Instant transitTimeoutStartingTime;
public Timestamps() {
lastPingUpdater.set(this, Instant.now());
lastReadUpdater.set(this, Instant.now());
lastWriteUpdater.set(this, Instant.now());
lastWriteAttemptUpdater.set(this, Instant.now());
}
@SuppressWarnings("CopyConstructorMissesField")
public Timestamps(Timestamps other) {
checkNotNull(other, "other: null");
this.lastPingTime = lastPingUpdater.get(other);
this.lastReadTime = lastReadUpdater.get(other);
this.lastWriteTime = lastWriteUpdater.get(other);
this.lastWriteAttemptTime = lastWriteAttemptUpdater.get(other);
this.transitTimeoutCount = transitTimeoutCountUpdater.get(other);
this.transitTimeoutWriteCount = transitTimeoutWriteCountUpdater.get(other);
this.transitTimeoutStartingTime = transitTimeoutStartingTimeUpdater.get(other);
}
public void channelPingCompleted() {
lastPingUpdater.set(this, Instant.now());
}
public void channelReadCompleted() {
lastReadUpdater.set(this, Instant.now());
}
public void channelWriteAttempted() {
lastWriteAttemptUpdater.set(this, Instant.now());
}
public void channelWriteCompleted() {
lastWriteUpdater.set(this, Instant.now());
}
public void transitTimeout(boolean isReadOnly, Instant requestCreatedTime) {
if (transitTimeoutCountUpdater.incrementAndGet(this) == 1) {
transitTimeoutStartingTimeUpdater.set(this, requestCreatedTime);
}
if (!isReadOnly) {
transitTimeoutWriteCountUpdater.incrementAndGet(this);
}
}
public void resetTransitTimeout() {
transitTimeoutCountUpdater.set(this, 0);
transitTimeoutWriteCountUpdater.set(this, 0);
transitTimeoutStartingTimeUpdater.set(this, null);
}
@JsonProperty
public Instant lastChannelPingTime() {
return lastPingUpdater.get(this);
}
@JsonProperty
public Instant lastChannelReadTime() {
return lastReadUpdater.get(this);
}
@JsonProperty
public Instant lastChannelWriteTime() {
return lastWriteUpdater.get(this);
}
@JsonProperty
public Instant lastChannelWriteAttemptTime() {
return lastWriteAttemptUpdater.get(this);
}
@JsonProperty
public int transitTimeoutCount() {
return transitTimeoutCountUpdater.get(this);
}
@JsonProperty
public int tansitTimeoutWriteCount() {
return transitTimeoutWriteCountUpdater.get(this);
}
@JsonProperty
public Instant transitTimeoutStartingTime() {
return transitTimeoutStartingTimeUpdater.get(this);
}
@Override
public String toString() {
return RntbdObjectMapper.toString(this);
}
}
} | |
What happens service side if it receives an empty `clientFilters` list? The other option would be to defer this instantiation and only perform it when `addFilter` is called and `clientFilters == null` | public FeatureFlagConfigurationSetting(String featureId, boolean isEnabled) {
this.featureId = featureId;
this.isEnabled = isEnabled;
super.setKey(KEY_PREFIX + featureId);
super.setContentType(FEATURE_FLAG_CONTENT_TYPE);
clientFilters = new ArrayList<>();
} | clientFilters = new ArrayList<>(); | public FeatureFlagConfigurationSetting(String featureId, boolean isEnabled) {
this.featureId = featureId;
this.isEnabled = isEnabled;
super.setKey(KEY_PREFIX + featureId);
super.setContentType(FEATURE_FLAG_CONTENT_TYPE);
} | class FeatureFlagConfigurationSetting extends ConfigurationSetting {
private static final ClientLogger LOGGER = new ClientLogger(FeatureFlagConfigurationSetting.class);
private static final String FEATURE_FLAG_CONTENT_TYPE = "application/vnd.microsoft.appconfig.ff+json;charset=utf-8";
private String featureId;
private boolean isEnabled;
private String description;
private String displayName;
private List<FeatureFlagFilter> clientFilters;
/**
* A prefix is used to construct a feature flag configuration setting's key.
*/
public static final String KEY_PREFIX = ".appconfig.featureflag/";
/**
* The constructor for a feature flag configuration setting.
*
* @param featureId A feature flag identification value that used to construct in setting's key. The key of setting
* is {@code KEY_PREFIX} concatenate {@code featureId}.
* @param isEnabled A boolean value to turn on/off the feature flag setting.
*/
/**
* Sets the key of this setting.
*
* @param key The key to associate with this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
*/
@Override
public FeatureFlagConfigurationSetting setKey(String key) {
super.setKey(key);
return this;
}
/**
* Sets the value of this setting.
*
* @param value The value to associate with this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
* @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format.
*/
@Override
public FeatureFlagConfigurationSetting setValue(String value) {
super.setValue(value);
final FeatureFlagConfigurationSetting updatedSetting = readFeatureFlagConfigurationSettingValue(value);
this.featureId = updatedSetting.getFeatureId();
this.description = updatedSetting.getDescription();
this.isEnabled = updatedSetting.isEnabled();
this.displayName = updatedSetting.getDisplayName();
if (updatedSetting.getClientFilters() != null) {
this.clientFilters = StreamSupport.stream(updatedSetting.getClientFilters().spliterator(), false)
.collect(Collectors.toList());
} else {
this.clientFilters = null;
}
return this;
}
/**
* Sets the label of this configuration setting. {@link
* set.
*
* @param label The label of this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
*/
@Override
public FeatureFlagConfigurationSetting setLabel(String label) {
super.setLabel(label);
return this;
}
/**
* Sets the content type. By default, the content type is null.
*
* @param contentType The content type of this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
*/
@Override
public FeatureFlagConfigurationSetting setContentType(String contentType) {
super.setContentType(contentType);
return this;
}
/**
* Sets the ETag for this configuration setting.
*
* @param etag The ETag for the configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
*/
@Override
public FeatureFlagConfigurationSetting setETag(String etag) {
super.setETag(etag);
return this;
}
/**
* Sets the tags for this configuration setting.
*
* @param tags The tags to add to this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
*/
@Override
public FeatureFlagConfigurationSetting setTags(Map<String, String> tags) {
super.setTags(tags);
return this;
}
/**
* Get the feature ID of this configuration setting.
*
* @return the feature ID of this configuration setting.
*/
public String getFeatureId() {
return featureId;
}
/**
* Set the feature ID of this configuration setting.
*
* @param featureId the feature ID of this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
* @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format.
*/
public FeatureFlagConfigurationSetting setFeatureId(String featureId) {
this.featureId = featureId;
super.setKey(KEY_PREFIX + featureId);
updateSettingValue();
return this;
}
/**
* Get the boolean indicator to show if the setting is turn on or off.
*
* @return the boolean indicator to show if the setting is turn on or off.
*/
public boolean isEnabled() {
return this.isEnabled;
}
/**
* Set the boolean indicator to show if the setting is turn on or off.
*
* @param isEnabled the boolean indicator to show if the setting is turn on or off.
* @return The updated {@link FeatureFlagConfigurationSetting} object.
* @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format.
*/
public FeatureFlagConfigurationSetting setEnabled(boolean isEnabled) {
this.isEnabled = isEnabled;
updateSettingValue();
return this;
}
/**
* Get the description of this configuration setting.
*
* @return the description of this configuration setting.
*/
public String getDescription() {
return description;
}
/**
* Set the description of this configuration setting.
*
* @param description the description of this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
* @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format.
*/
public FeatureFlagConfigurationSetting setDescription(String description) {
this.description = description;
updateSettingValue();
return this;
}
/**
* Get the display name of this configuration setting.
*
* @return the display name of this configuration setting.
*/
public String getDisplayName() {
return displayName;
}
/**
* Set the display name of this configuration setting.
*
* @param displayName the display name of this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
* @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format.
*/
public FeatureFlagConfigurationSetting setDisplayName(String displayName) {
this.displayName = displayName;
updateSettingValue();
return this;
}
/**
* Gets the feature flag filters of this configuration setting.
*
* @return the feature flag filters of this configuration setting.
*/
public List<FeatureFlagFilter> getClientFilters() {
return clientFilters;
}
/**
* Sets the feature flag filters of this configuration setting.
*
* @param clientFilters the feature flag filters of this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
* @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format.
*/
public FeatureFlagConfigurationSetting setClientFilters(List<FeatureFlagFilter> clientFilters) {
this.clientFilters = clientFilters;
updateSettingValue();
return this;
}
/**
* Add a feature flag filter to this configuration setting.
*
* @param clientFilter a feature flag filter to add to this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
*/
public FeatureFlagConfigurationSetting addClientFilter(FeatureFlagFilter clientFilter) {
clientFilters.add(clientFilter);
updateSettingValue();
return this;
}
private void updateSettingValue() {
try {
super.setValue(writeFeatureFlagConfigurationSetting(this));
} catch (IOException exception) {
LOGGER.logExceptionAsError(new IllegalArgumentException(
"Can't parse Feature Flag configuration setting value.", exception));
}
}
} | class FeatureFlagConfigurationSetting extends ConfigurationSetting {
private static final ClientLogger LOGGER = new ClientLogger(FeatureFlagConfigurationSetting.class);
private static final String FEATURE_FLAG_CONTENT_TYPE = "application/vnd.microsoft.appconfig.ff+json;charset=utf-8";
private String featureId;
private boolean isEnabled;
private String description;
private String displayName;
private List<FeatureFlagFilter> clientFilters;
/**
* A prefix is used to construct a feature flag configuration setting's key.
*/
public static final String KEY_PREFIX = ".appconfig.featureflag/";
/**
* The constructor for a feature flag configuration setting.
*
* @param featureId A feature flag identification value that used to construct in setting's key. The key of setting
* is {@code KEY_PREFIX} concatenate {@code featureId}.
* @param isEnabled A boolean value to turn on/off the feature flag setting.
*/
/**
* Sets the key of this setting.
*
* @param key The key to associate with this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
*/
@Override
public FeatureFlagConfigurationSetting setKey(String key) {
super.setKey(key);
return this;
}
/**
* Sets the value of this setting.
*
* @param value The value to associate with this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
* @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format.
*/
@Override
public FeatureFlagConfigurationSetting setValue(String value) {
super.setValue(value);
final FeatureFlagConfigurationSetting updatedSetting = readFeatureFlagConfigurationSettingValue(value);
this.featureId = updatedSetting.getFeatureId();
this.description = updatedSetting.getDescription();
this.isEnabled = updatedSetting.isEnabled();
this.displayName = updatedSetting.getDisplayName();
this.clientFilters = StreamSupport.stream(updatedSetting.getClientFilters().spliterator(), false)
.collect(Collectors.toList());
return this;
}
/**
* Sets the label of this configuration setting. {@link
* set.
*
* @param label The label of this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
*/
@Override
public FeatureFlagConfigurationSetting setLabel(String label) {
super.setLabel(label);
return this;
}
/**
* Sets the content type. By default, the content type is null.
*
* @param contentType The content type of this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
*/
@Override
public FeatureFlagConfigurationSetting setContentType(String contentType) {
super.setContentType(contentType);
return this;
}
/**
* Sets the ETag for this configuration setting.
*
* @param etag The ETag for the configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
*/
@Override
public FeatureFlagConfigurationSetting setETag(String etag) {
super.setETag(etag);
return this;
}
/**
* Sets the tags for this configuration setting.
*
* @param tags The tags to add to this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
*/
@Override
public FeatureFlagConfigurationSetting setTags(Map<String, String> tags) {
super.setTags(tags);
return this;
}
/**
* Get the feature ID of this configuration setting.
*
* @return the feature ID of this configuration setting.
*/
public String getFeatureId() {
return featureId;
}
/**
* Set the feature ID of this configuration setting.
*
* @param featureId the feature ID of this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
* @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format.
*/
public FeatureFlagConfigurationSetting setFeatureId(String featureId) {
this.featureId = featureId;
super.setKey(KEY_PREFIX + featureId);
updateSettingValue();
return this;
}
/**
* Get the boolean indicator to show if the setting is turn on or off.
*
* @return the boolean indicator to show if the setting is turn on or off.
*/
public boolean isEnabled() {
return this.isEnabled;
}
/**
* Set the boolean indicator to show if the setting is turn on or off.
*
* @param isEnabled the boolean indicator to show if the setting is turn on or off.
* @return The updated {@link FeatureFlagConfigurationSetting} object.
* @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format.
*/
public FeatureFlagConfigurationSetting setEnabled(boolean isEnabled) {
this.isEnabled = isEnabled;
updateSettingValue();
return this;
}
/**
* Get the description of this configuration setting.
*
* @return the description of this configuration setting.
*/
public String getDescription() {
return description;
}
/**
* Set the description of this configuration setting.
*
* @param description the description of this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
* @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format.
*/
public FeatureFlagConfigurationSetting setDescription(String description) {
this.description = description;
updateSettingValue();
return this;
}
/**
* Get the display name of this configuration setting.
*
* @return the display name of this configuration setting.
*/
public String getDisplayName() {
return displayName;
}
/**
* Set the display name of this configuration setting.
*
* @param displayName the display name of this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
* @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format.
*/
public FeatureFlagConfigurationSetting setDisplayName(String displayName) {
this.displayName = displayName;
updateSettingValue();
return this;
}
/**
* Gets the feature flag filters of this configuration setting.
*
* @return the feature flag filters of this configuration setting.
*/
public List<FeatureFlagFilter> getClientFilters() {
if (clientFilters == null) {
clientFilters = new ArrayList<>();
}
return clientFilters;
}
/**
* Sets the feature flag filters of this configuration setting.
*
* @param clientFilters the feature flag filters of this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
* @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format.
*/
public FeatureFlagConfigurationSetting setClientFilters(List<FeatureFlagFilter> clientFilters) {
this.clientFilters = clientFilters;
updateSettingValue();
return this;
}
/**
* Add a feature flag filter to this configuration setting.
*
* @param clientFilter a feature flag filter to add to this configuration setting.
*
* @return The updated {@link FeatureFlagConfigurationSetting} object.
*/
public FeatureFlagConfigurationSetting addClientFilter(FeatureFlagFilter clientFilter) {
if (clientFilters == null) {
clientFilters = new ArrayList<>();
}
clientFilters.add(clientFilter);
updateSettingValue();
return this;
}
private void updateSettingValue() {
try {
super.setValue(writeFeatureFlagConfigurationSetting(this));
} catch (IOException exception) {
LOGGER.logExceptionAsError(new IllegalArgumentException(
"Can't parse Feature Flag configuration setting value.", exception));
}
}
} |
By defining two functions like this, we can remove the last parameter. ```ballerina private void addTypeCastForBinaryExprA(BLangBinaryExpr binaryExpr, BType targetType, BType sourceType, boolean isRhsExpr) { if (sourceType.tag == TypeTags.UNION && sourceType.isNullable()) { binaryExpr.rhsExpr = addNilType(targetType, binaryExpr.rhsExpr); } else { binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, targetType); } } private void addTypeCastForBinaryExprB(BLangBinaryExpr binaryExpr, BType targetType, BType sourceType) { if (sourceType.tag == TypeTags.UNION && sourceType.isNullable()) { binaryExpr.lhsExpr = addNilType(targetType, binaryExpr.lhsExpr); } else { binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, targetType); } } ``` Please use proper function names. Also the name `sourceType` does not match here. | private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner, foreach.pos,
VIRTUAL);
BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,
foreach.nillableResultType, iteratorSymbol, resultSymbol);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangStatementExpression statementExpression = ASTBuilderUtil.createStatementExpression(
resultVariableDefinition, resultReferenceInWhile);
statementExpression.setBType(foreach.nillableResultType);
BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, statementExpression, userDefineType);
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);
BLangExpression expr = valueAccessExpr.expr;
valueAccessExpr.expr = addConversionExprIfRequired(expr, symTable.mapAllType);
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.setBType(type);
return recordType;
}
@Override
public void visit(BLangWhile whileNode) {
if (whileNode.onFailClause != null) {
BLangOnFailClause onFailClause = whileNode.onFailClause;
whileNode.onFailClause = null;
whileNode.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(whileNode.pos, whileNode, onFailClause);
result = rewrite(doStmt, env);
} else {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
}
private BLangDo wrapStatementWithinDo(Location location, BLangStatement statement,
BLangOnFailClause onFailClause) {
BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();
BLangBlockStmt doBlock = ASTBuilderUtil.createBlockStmt(location);
doBlock.scope = new Scope(env.scope.owner);
bLDo.body = doBlock;
bLDo.pos = location;
bLDo.onFailClause = onFailClause;
bLDo.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
doBlock.stmts.add(statement);
return bLDo;
}
@Override
public void visit(BLangLock lockNode) {
BLangOnFailClause currentOnFailClause = this.onFailClause;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
if (lockNode.onFailClause != null) {
blockStmt.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
rewrite(lockNode.onFailClause, env);
}
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);
statementExpression.setBType(symTable.nilType);
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.setBType(nillableError);
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError,
this.env.scope.owner, lockNode.pos, VIRTUAL);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr,
nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
unLockStmt.relatedLock = lockStmt;
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
this.onFailClause = currentOnFailClause;
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
private BLangOnFailClause createTrxInternalOnFail(Location pos, BLangSimpleVarRef shouldPanicRef,
BLangSimpleVarRef shouldRetryRef) {
BLangOnFailClause trxOnFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
trxOnFailClause.pos = pos;
trxOnFailClause.body = ASTBuilderUtil.createBlockStmt(pos);
trxOnFailClause.body.scope = new Scope(env.scope.owner);
trxOnFailClause.isInternal = true;
BVarSymbol trxOnFailErrorSym = new BVarSymbol(0, names.fromString("$trxError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable trxOnFailError = ASTBuilderUtil.createVariable(pos,
"$trxError$", symTable.errorType, null, trxOnFailErrorSym);
trxOnFailClause.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
trxOnFailError);
trxOnFailClause.body.scope.define(trxOnFailErrorSym.name, trxOnFailErrorSym);
transactionDesugar.createRollbackIfFailed(pos, trxOnFailClause.body, trxOnFailErrorSym,
trxBlockId, shouldRetryRef);
BLangGroupExpr shouldNotPanic = new BLangGroupExpr();
shouldNotPanic.setBType(symTable.booleanType);
shouldNotPanic.expression = createNotBinaryExpression(pos, shouldPanicRef);
BLangSimpleVarRef caughtError = ASTBuilderUtil.createVariableRef(pos, trxOnFailErrorSym);
BLangBlockStmt failBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = caughtError;
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, shouldNotPanic, failBlock, panicNode);
trxOnFailClause.body.stmts.add(exitIf);
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = caughtError;
failBlock.stmts.add(failStmt);
trxOnFailClause.bodyContainsFail = true;
return trxOnFailClause;
}
@Override
public void visit(BLangTransaction transactionNode) {
if (transactionNode.onFailClause != null) {
BLangOnFailClause onFailClause = transactionNode.onFailClause;
transactionNode.onFailClause = null;
transactionNode.transactionBody.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(transactionNode.pos, transactionNode, onFailClause);
result = rewrite(doStmt, env);
} else {
BLangLiteral currentTrxBlockId = this.trxBlockId;
String uniqueId = String.valueOf(++transactionBlockCount);
this.trxBlockId = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.stringType, uniqueId);
boolean currShouldReturnErrors = this.shouldReturnErrors;
this.shouldReturnErrors = true;
BLangOnFailClause currOnFailClause = this.onFailClause;
BLangLiteral falseLiteral = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.booleanType, false);
BVarSymbol shouldPanicVarSymbol = new BVarSymbol(0, names.fromString("$shouldPanic$"),
env.scope.owner.pkgID, symTable.booleanType, this.env.scope.owner, transactionNode.pos, VIRTUAL);
shouldPanicVarSymbol.closure = true;
BLangSimpleVariable shouldPanicVariable = ASTBuilderUtil.createVariable(transactionNode.pos,
"$shouldPanic$", symTable.booleanType, falseLiteral, shouldPanicVarSymbol);
BLangSimpleVariableDef shouldPanicDef = ASTBuilderUtil.createVariableDef(transactionNode.pos,
shouldPanicVariable);
BLangSimpleVarRef shouldPanicRef = ASTBuilderUtil.createVariableRef(transactionNode.pos,
shouldPanicVarSymbol);
BLangOnFailClause trxInternalOnFail = createTrxInternalOnFail(transactionNode.pos, shouldPanicRef,
this.shouldRetryRef);
enclosingShouldPanic.put(trxInternalOnFail, shouldPanicRef);
boolean userDefinedOnFailAvbl = this.onFailClause != null;
analyzeOnFailClause(trxInternalOnFail, transactionNode.transactionBody);
BLangBlockStmt transactionStmtBlock =
transactionDesugar.rewrite(transactionNode, trxBlockId, env, uniqueId);
transactionStmtBlock.stmts.add(0, shouldPanicDef);
transactionStmtBlock.scope.define(shouldPanicVarSymbol.name, shouldPanicVarSymbol);
transactionStmtBlock.failureBreakMode = userDefinedOnFailAvbl ?
BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE :
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
result = rewrite(transactionStmtBlock, this.env);
this.shouldReturnErrors = currShouldReturnErrors;
this.trxBlockId = currentTrxBlockId;
swapAndResetEnclosingOnFail(currOnFailClause);
}
}
@Override
public void visit(BLangRollback rollbackNode) {
BLangBlockStmt rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, trxBlockId, this.shouldRetryRef);
result = rewrite(rollbackStmtExpr, env);
}
private BLangOnFailClause createRetryInternalOnFail(Location pos,
BLangSimpleVarRef retryResultRef,
BLangSimpleVarRef retryManagerRef,
BLangSimpleVarRef shouldRetryRef,
BLangSimpleVarRef continueLoopRef,
BLangSimpleVarRef returnResult) {
BLangOnFailClause internalOnFail = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
internalOnFail.pos = pos;
internalOnFail.body = ASTBuilderUtil.createBlockStmt(pos);
internalOnFail.body.scope = new Scope(env.scope.owner);
BVarSymbol caughtErrorSym = new BVarSymbol(0, names.fromString("$caughtError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable caughtError = ASTBuilderUtil.createVariable(pos,
"$caughtError$", symTable.errorType, null, caughtErrorSym);
internalOnFail.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
caughtError);
env.scope.define(caughtErrorSym.name, caughtErrorSym);
BLangSimpleVarRef caughtErrorRef = ASTBuilderUtil.createVariableRef(pos, caughtErrorSym);
BLangAssignment errorAssignment = ASTBuilderUtil.createAssignmentStmt(pos, retryResultRef, caughtErrorRef);
internalOnFail.body.stmts.add(errorAssignment);
BLangAssignment continueLoopTrue = ASTBuilderUtil.createAssignmentStmt(pos, continueLoopRef,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
internalOnFail.body.stmts.add(continueLoopTrue);
BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(pos,
retryManagerRef, caughtErrorRef);
BLangAssignment shouldRetryAssignment = ASTBuilderUtil.createAssignmentStmt(pos, shouldRetryRef,
shouldRetryInvocation);
internalOnFail.body.stmts.add(shouldRetryAssignment);
BLangGroupExpr shouldNotRetryCheck = new BLangGroupExpr();
shouldNotRetryCheck.setBType(symTable.booleanType);
shouldNotRetryCheck.expression = createNotBinaryExpression(pos, shouldRetryRef);
BLangGroupExpr exitCheck = new BLangGroupExpr();
exitCheck.setBType(symTable.booleanType);
exitCheck.expression = shouldNotRetryCheck;
BLangBlockStmt exitLogicBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, exitCheck, exitLogicBlock, null);
if (this.onFailClause != null) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = retryResultRef;
exitLogicBlock.stmts.add(failStmt);
internalOnFail.bodyContainsFail = true;
internalOnFail.body.stmts.add(exitIf);
BLangContinue loopContinueStmt = (BLangContinue) TreeBuilder.createContinueNode();
loopContinueStmt.pos = pos;
internalOnFail.body.stmts.add(loopContinueStmt);
} else {
BLangAssignment returnErrorTrue = ASTBuilderUtil.createAssignmentStmt(pos, returnResult,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
exitLogicBlock.stmts.add(returnErrorTrue);
internalOnFail.body.stmts.add(exitIf);
}
return internalOnFail;
}
BLangUnaryExpr createNotBinaryExpression(Location pos, BLangExpression expression) {
List<BType> paramTypes = new ArrayList<>();
paramTypes.add(symTable.booleanType);
BInvokableType type = new BInvokableType(paramTypes, symTable.booleanType,
null);
BOperatorSymbol notOperatorSymbol = new BOperatorSymbol(
names.fromString(OperatorKind.NOT.value()), symTable.rootPkgSymbol.pkgID, type, symTable.rootPkgSymbol,
symTable.builtinPos, VIRTUAL);
return ASTBuilderUtil.createUnaryExpr(pos, expression, symTable.booleanType,
OperatorKind.NOT, notOperatorSymbol);
}
BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, BLangFunctionBody lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func =
ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.setBType(new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null));
return lambdaFunction;
}
protected BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, List<BLangStatement> fnBodyStmts,
SymbolEnv env, Scope bodyScope) {
BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
body.scope = bodyScope;
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(fnBodyStmts, bodyEnv);
return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body);
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.getBType().tag == TypeTags.ARRAY
&& ((BArrayType) literalExpr.getBType()).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.setBType(literalExpr.getBType());
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replace("\t", "").replace("\n", "").replace("\r", "")
.replace(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorSpreadOpExpr listConstructorSpreadOpExpr) {
listConstructorSpreadOpExpr.expr = rewriteExpr(listConstructorSpreadOpExpr.expr);
result = listConstructorSpreadOpExpr;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
BType listConstructorType = Types.getReferredType(listConstructor.getBType());
if (listConstructorType.tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.getBType()));
result = rewriteExpr(expr);
} else if (getElementType(listConstructorType).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
rewriteExprs(tableConstructorExpr.recordLiteralList);
result = tableConstructorExpr;
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
BType arrayLiteralType = Types.getReferredType(arrayLiteral.getBType());
if (arrayLiteralType.tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.getBType()));
return;
} else if (getElementType(arrayLiteralType).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.getBType());
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
return;
}
List<BLangExpression> exprs = tupleLiteral.exprs;
BTupleType tupleType = (BTupleType) tupleLiteral.getBType();
List<BType> tupleMemberTypes = tupleType.tupleTypes;
int tupleMemberTypeSize = tupleMemberTypes.size();
int tupleExprSize = exprs.size();
boolean isInRestType = false;
int i = 0;
for (BLangExpression expr: exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
BType spreadOpType = ((BLangListConstructorSpreadOpExpr) expr).expr.getBType();
spreadOpType = Types.getReferredType(spreadOpType);
if (spreadOpType.tag == TypeTags.ARRAY) {
BArrayType spreadOpBArray = (BArrayType) spreadOpType;
if (spreadOpBArray.size >= 0) {
i += spreadOpBArray.size;
continue;
}
} else {
BTupleType spreadOpTuple = (BTupleType) spreadOpType;
if (types.isFixedLengthTuple(spreadOpTuple)) {
i += spreadOpTuple.tupleTypes.size();
continue;
}
}
isInRestType = true;
continue;
}
BType expType = expr.impConversionExpr == null ? expr.getBType() : expr.impConversionExpr.getBType();
BType targetType = tupleType.restType;
if (!isInRestType && i < tupleMemberTypeSize) {
targetType = tupleMemberTypes.get(i);
}
types.setImplicitCastExpr(expr, expType, targetType);
i++;
}
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
result = rewriteExpr(groupExpr.expression);
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));
result = rewriteExpr(rewriteMappingConstructor(recordLiteral));
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.setBType(symTable.stringType);
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
Types.getReferredType(varRefExpr.symbol.type).tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&
!((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.LET) == SymTag.LET) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
BType referredType = Types.getReferredType(constSymbol.literalType);
if (referredType.tag <= TypeTags.BOOLEAN || referredType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.getBType()));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
if (!enclLocks.isEmpty()) {
BVarSymbol symbol = (BVarSymbol) varRefExpr.symbol;
BLangLockStmt lockStmt = enclLocks.peek();
lockStmt.addLockVariable(symbol);
lockStmt.addLockVariable(this.globalVariablesDependsOn.getOrDefault(symbol, new HashSet<>()));
}
}
genVarRefExpr.setBType(varRefExpr.getBType());
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.isLValue)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.isLValue = varRefExpr.isLValue;
genVarRefExpr.setBType(varRefExpr.symbol.type);
result = genVarRefExpr;
return;
}
genVarRefExpr.isLValue = varRefExpr.isLValue;
BType targetType = genVarRefExpr.getBType();
genVarRefExpr.setBType(genVarRefExpr.symbol.type);
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
rewriteFieldBasedAccess(nsPrefixedFieldBasedAccess);
}
private void rewriteFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(fieldAccessExpr.expr.getBType());
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.getBType(), varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.field.pos,
StringEscapeUtils.unescapeJava(fieldAccessExpr.field.value));
BType refType = Types.getReferredType(varRefType);
int varRefTypeTag = refType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
result = rewriteObjectMemberAccessAsField(fieldAccessExpr);
return;
} else {
boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation;
if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) {
BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) refType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc;
if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) ||
(initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) {
isStoreOnCreation = true;
}
}
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false,
isStoreOnCreation);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation);
}
} else if (types.isLax(refType)) {
if (!(refType.tag == TypeTags.XML || refType.tag == TypeTags.XML_ELEMENT)) {
if (refType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) refType).constraint.tag)) {
result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));
return;
}
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else {
BLangInvocation xmlAccessInvocation = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);
xmlAccessInvocation.setBType(fieldAccessExpr.getBType());
result = xmlAccessInvocation;
return;
}
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.isStoreOnCreation);
} else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.isLValue = fieldAccessExpr.isLValue;
targetVarRef.setBType(fieldAccessExpr.getBType());
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
rewriteFieldBasedAccess(fieldAccessExpr);
}
private BLangNode rewriteObjectMemberAccessAsField(BLangFieldBasedAccess fieldAccessExpr) {
Location pos = fieldAccessExpr.pos;
BInvokableSymbol originalMemberFuncSymbol = (BInvokableSymbol) fieldAccessExpr.symbol;
BLangFunction func = (BLangFunction) TreeBuilder.createFunctionNode();
String funcName = "$anon$method$delegate$" + originalMemberFuncSymbol.name.value + "$" + lambdaFunctionCount++;
BInvokableSymbol funcSymbol = new BInvokableSymbol(SymTag.INVOKABLE, (Flags.ANONYMOUS | Flags.LAMBDA),
Names.fromString(funcName), env.enclPkg.packageID,
originalMemberFuncSymbol.type, env.scope.owner, pos,
VIRTUAL);
funcSymbol.retType = originalMemberFuncSymbol.retType;
funcSymbol.bodyExist = true;
funcSymbol.params = new ArrayList<>();
funcSymbol.scope = new Scope(funcSymbol);
func.pos = pos;
func.name = createIdentifier(pos, funcName);
func.flagSet.add(Flag.LAMBDA);
func.flagSet.add(Flag.ANONYMOUS);
func.body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
func.symbol = funcSymbol;
func.setBType(funcSymbol.type);
func.closureVarSymbols = new LinkedHashSet<>();
BLangExpression receiver = fieldAccessExpr.expr;
BLangSimpleVariableDef intermediateObjDef = null;
if (receiver.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BSymbol receiverSymbol = ((BLangVariableReference) receiver).symbol;
receiverSymbol.closure = true;
func.closureVarSymbols.add(new ClosureVarSymbol(receiverSymbol, pos));
} else {
BLangSimpleVariableDef varDef = createVarDef("$$temp$obj$" + annonVarCount++, receiver.getBType(),
receiver, pos);
intermediateObjDef = varDef;
varDef.var.symbol.closure = true;
env.scope.define(varDef.var.symbol.name, varDef.var.symbol);
BLangSimpleVarRef variableRef = createVariableRef(pos, varDef.var.symbol);
func.closureVarSymbols.add(new ClosureVarSymbol(varDef.var.symbol, pos));
receiver = variableRef;
}
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
for (BVarSymbol param : originalMemberFuncSymbol.params) {
BLangSimpleVariable fParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
fParam.symbol = new BVarSymbol(0, param.name, env.enclPkg.packageID, param.type, funcSymbol, pos,
VIRTUAL);
fParam.pos = pos;
fParam.name = createIdentifier(pos, param.name.value);
fParam.setBType(param.type);
func.requiredParams.add(fParam);
funcSymbol.params.add(fParam.symbol);
funcSymbol.scope.define(fParam.symbol.name, fParam.symbol);
BLangSimpleVarRef paramRef = createVariableRef(pos, fParam.symbol);
requiredArgs.add(paramRef);
}
ArrayList<BLangExpression> restArgs = new ArrayList<>();
if (originalMemberFuncSymbol.restParam != null) {
BLangSimpleVariable restParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
func.restParam = restParam;
BVarSymbol restSym = originalMemberFuncSymbol.restParam;
restParam.name = ASTBuilderUtil.createIdentifier(pos, restSym.name.value);
restParam.symbol = new BVarSymbol(0, restSym.name, env.enclPkg.packageID, restSym.type, funcSymbol, pos,
VIRTUAL);
restParam.pos = pos;
restParam.setBType(restSym.type);
funcSymbol.restParam = restParam.symbol;
funcSymbol.scope.define(restParam.symbol.name, restParam.symbol);
BLangSimpleVarRef restArg = createVariableRef(pos, restParam.symbol);
BLangRestArgsExpression restArgExpr = new BLangRestArgsExpression();
restArgExpr.expr = restArg;
restArgExpr.pos = pos;
restArgExpr.setBType(restSym.type);
restArgExpr.expectedType = restArgExpr.getBType();
restArgs.add(restArgExpr);
}
BLangIdentifier field = fieldAccessExpr.field;
BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode();
retStmt.expr = createObjectMethodInvocation(
receiver, field, fieldAccessExpr.symbol, requiredArgs, restArgs);
((BLangBlockFunctionBody) func.body).addStatement(retStmt);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = func;
lambdaFunction.capturedClosureEnv = env.createClone();
env.enclPkg.functions.add(func);
env.enclPkg.topLevelNodes.add(func);
lambdaFunction.parent = env.enclInvokable;
lambdaFunction.setBType(func.getBType());
if (intermediateObjDef == null) {
return rewrite(lambdaFunction, env);
} else {
BLangStatementExpression expr = createStatementExpression(intermediateObjDef, rewrite(lambdaFunction, env));
expr.setBType(lambdaFunction.getBType());
return rewrite(expr, env);
}
}
private BLangInvocation createObjectMethodInvocation(BLangExpression receiver, BLangIdentifier field,
BSymbol invocableSymbol,
List<BLangExpression> requiredArgs,
List<BLangExpression> restArgs) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.name = field;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = receiver;
invocationNode.symbol = invocableSymbol;
invocationNode.setBType(((BInvokableType) invocableSymbol.type).retType);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = restArgs;
return invocationNode;
}
private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangStatementExpression statementExpression = new BLangStatementExpression();
BLangBlockStmt block = new BLangBlockStmt();
statementExpression.stmt = block;
BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.errorType);
Location pos = fieldAccessExpr.pos;
BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos);
block.addStatement(result);
BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);
resultRef.setBType(fieldAccessType);
statementExpression.setBType(fieldAccessType);
BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(
fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);
BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);
BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.nilType);
mapAccessExpr.setBType(xmlOrNil);
BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos);
BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);
block.addStatement(mapResult);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);
BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);
ifStmt.expr = isLikeNilExpr;
BLangBlockStmt resultNilBody = new BLangBlockStmt();
ifStmt.body = resultNilBody;
BLangBlockStmt resultHasValueBody = new BLangBlockStmt();
ifStmt.elseStmt = resultHasValueBody;
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env,
names.fromString(""), names.fromString("error"));
errorConstructorExpr.setBType(symbol.type);
List<BLangExpression> positionalArgs = new ArrayList<>();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
positionalArgs.add(createStringLiteral(pos, "{" + RuntimeConstants.MAP_LANG_LIB + "}InvalidKey"));
BLangNamedArgsExpression message = new BLangNamedArgsExpression();
message.name = ASTBuilderUtil.createIdentifier(pos, "key");
message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);
namedArgs.add(message);
errorConstructorExpr.positionalArgs = positionalArgs;
errorConstructorExpr.namedArgs = namedArgs;
BLangSimpleVariableDef errorDef =
createVarDef("$_invalid_key_error", symTable.errorType, errorConstructorExpr, pos);
resultNilBody.addStatement(errorDef);
BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);
BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);
errorVarAssignment.varRef = resultRef;
errorVarAssignment.expr = errorRef;
BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(
pos, resultHasValueBody);
mapResultAssignment.varRef = resultRef;
mapResultAssignment.expr = mapResultRef;
statementExpression.expr = resultRef;
return statementExpression;
}
private BLangInvocation rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {
ArrayList<BLangExpression> args = new ArrayList<>();
String fieldName = fieldAccessExpr.field.value;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;
fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);
}
if (fieldName.equals("_")) {
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,
fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());
}
BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);
args.add(attributeNameLiteral);
args.add(isOptionalAccessToLiteral(fieldAccessExpr));
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,
new ArrayList<>());
}
private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {
return rewrite(
createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);
}
private String createExpandedQName(String nsURI, String localName) {
return "{" + nsURI + "}" + localName;
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangIndexBasedAccess targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(indexAccessExpr.expr.getBType());
BType varRefType = Types.getReferredType(effectiveType);
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.getBType(), varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr,
(BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (varRefType.tag == TypeTags.TABLE) {
if (targetVarRef.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY) {
BLangTupleLiteral listConstructorExpr = new BLangTupleLiteral();
listConstructorExpr.exprs = ((BLangTableMultiKeyExpr) indexAccessExpr.indexExpr).multiKeyIndexExprs;
List<BType> memberTypes = new ArrayList<>();
((BLangTableMultiKeyExpr) indexAccessExpr.indexExpr).multiKeyIndexExprs.
forEach(expression -> memberTypes.add(expression.getBType()));
listConstructorExpr.setBType(new BTupleType(memberTypes));
indexAccessExpr.indexExpr = listConstructorExpr;
}
targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.isLValue = indexAccessExpr.isLValue;
targetVarRef.setBType(indexAccessExpr.getBType());
result = targetVarRef;
}
@Override
public void visit(BLangTableMultiKeyExpr tableMultiKeyExpr) {
rewriteExprs(tableMultiKeyExpr.multiKeyIndexExprs);
result = tableMultiKeyExpr;
}
@Override
public void visit(BLangInvocation iExpr) {
rewriteInvocation(iExpr, false);
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.size() == 1) {
errorConstructorExpr.positionalArgs.add(createNilLiteral());
}
errorConstructorExpr.positionalArgs.set(1,
addConversionExprIfRequired(errorConstructorExpr.positionalArgs.get(1), symTable.errorType));
rewriteExprs(errorConstructorExpr.positionalArgs);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(errorConstructorExpr.pos,
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
if (errorConstructorExpr.namedArgs.isEmpty()) {
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.getBType());
} else {
for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) {
BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (Types.getReferredType(recordLiteral.getBType()).tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.getBType());
}
recordLiteral.fields.add(member);
}
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral),
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
}
errorConstructorExpr.errorDetail = errorDetail;
result = errorConstructorExpr;
}
@Override
public void visit(BLangInvocation.BLangActionInvocation actionInvocation) {
if (!actionInvocation.async && actionInvocation.invokedInsideTransaction) {
transactionDesugar.startTransactionCoordinatorOnce(env, actionInvocation.pos);
}
rewriteInvocation(actionInvocation, actionInvocation.async);
}
private void rewriteInvocation(BLangInvocation invocation, boolean async) {
BLangInvocation invRef = invocation;
if (!enclLocks.isEmpty()) {
BLangLockStmt lock = enclLocks.peek();
lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars);
}
reorderArguments(invocation);
rewriteExprs(invocation.requiredArgs);
fixStreamTypeCastsInInvocationParams(invocation);
fixNonRestArgTypeCastInTypeParamInvocation(invocation);
rewriteExprs(invocation.restArgs);
annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos,
invocation.symbol.pkgID, invocation.symbol.owner, env);
if (invocation.functionPointerInvocation) {
visitFunctionPointerInvocation(invocation);
return;
}
invocation.expr = rewriteExpr(invocation.expr);
result = invRef;
BInvokableSymbol invSym = (BInvokableSymbol) invocation.symbol;
if (Symbols.isFlagOn(invSym.retType.flags, Flags.PARAMETERIZED)) {
BType retType = unifier.build(invSym.retType);
invocation.setBType(invocation.async ? new BFutureType(TypeTags.FUTURE, retType, null) : retType);
}
if (invocation.expr == null) {
fixTypeCastInTypeParamInvocation(invocation, invRef);
if (invocation.exprSymbol == null) {
return;
}
invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol);
invocation.expr = rewriteExpr(invocation.expr);
}
switch (Types.getReferredType(invocation.expr.getBType()).tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!invocation.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(invocation.requiredArgs);
argExprs.add(0, invocation.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs,
invocation.symbol, invocation.getBType(),
invocation.expr, async);
attachedFunctionInvocation.name = invocation.name;
attachedFunctionInvocation.annAttachments = invocation.annAttachments;
result = invRef = attachedFunctionInvocation;
}
break;
}
if (invocation.objectInitMethod && Symbols.isFlagOn(invocation.expr.getBType().flags, Flags.OBJECT_CTOR)) {
BObjectType initializingObject = (BObjectType) invocation.expr.getBType();
BLangClassDefinition classDef = initializingObject.classDef;
if (classDef.hasClosureVars) {
OCEDynamicEnvironmentData oceEnvData = initializingObject.classDef.oceEnvData;
if (oceEnvData.attachedFunctionInvocation == null) {
oceEnvData.attachedFunctionInvocation = (BLangAttachedFunctionInvocation) result;
}
}
}
fixTypeCastInTypeParamInvocation(invocation, invRef);
}
private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {
if (!iExpr.langLibInvocation) {
return;
}
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
for (int i = 0; i < requiredArgs.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));
}
}
/* This function is a workaround and need improvement
* Notes for improvement :
* 1. Both arguments are same.
* 2. Due to current type param logic we put type param flag on the original type.
* 3. Error type having Cloneable type with type param flag, change expression type by this code.
* 4. using error type is a problem as Cloneable type is an typeparm eg: ExprBodiedFunctionTest
* added never to CloneableType type param
* @typeParam type
* CloneableType Cloneable|never;
*
*/
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
var returnTypeOfInvokable = ((BInvokableSymbol) iExpr.symbol).retType;
if (!iExpr.langLibInvocation && !TypeParamAnalyzer.containsTypeParam(returnTypeOfInvokable)) {
return;
}
BType originalInvType = genIExpr.getBType();
if (!genIExpr.async) {
genIExpr.setBType(returnTypeOfInvokable);
}
this.result = addConversionExprIfRequired(genIExpr, originalInvType);
}
private void fixStreamTypeCastsInInvocationParams(BLangInvocation iExpr) {
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
if (!params.isEmpty()) {
for (int i = 0; i < requiredArgs.size(); i++) {
BVarSymbol param = params.get(i);
if (Types.getReferredType(param.type).tag == TypeTags.STREAM) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), param.type));
}
}
}
}
private BLangLiteral createNilLiteral() {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = null;
literal.setBType(symTable.nilType);
return literal;
}
public void visit(BLangTypeInit typeInitExpr) {
if (Types.getReferredType(typeInitExpr.getBType()).tag == TypeTags.STREAM) {
result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));
} else {
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.getBType());
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
objVarDef.var.name.pos = symTable.builtinPos;
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
BLangInvocation typeInitInvocation = typeInitExpr.initInvocation;
typeInitInvocation.exprSymbol = objVarDef.var.symbol;
typeInitInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol;
typeInitInvocation.objectInitMethod = true;
if (Types.getReferredType(typeInitInvocation.getBType()).tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitInvocation;
typeInitInvocation.name.value = GENERATED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);
stmtExpr.setBType(objVarRef.symbol.type);
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitInvocation.getBType(),
typeInitInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.getBType(), null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(symTable.builtinPos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(symTable.builtinPos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultVarRef.symbol.type);
return stmtExpr;
}
private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CONSTRUCT_STREAM).symbol;
BType constraintType = ((BStreamType) typeInitExpr.getBType()).constraint;
BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol);
BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr();
constraintTdExpr.resolvedType = constraintType;
constraintTdExpr.setBType(constraintTdType);
BType completionType = ((BStreamType) typeInitExpr.getBType()).completionType;
BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol);
BLangTypedescExpr completionTdExpr = new BLangTypedescExpr();
completionTdExpr.resolvedType = completionType;
completionTdExpr.setBType(completionTdType);
List<BLangExpression> args = new ArrayList<>(Lists.of(constraintTdExpr, completionTdExpr));
if (!typeInitExpr.argsExpr.isEmpty()) {
args.add(typeInitExpr.argsExpr.get(0));
}
BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(
typeInitExpr.pos, symbol, args, symResolver);
streamConstructInvocation.setBType(new BStreamType(TypeTags.STREAM, constraintType, completionType, null));
return streamConstructInvocation;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr,
Location location) {
BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner, location, VIRTUAL);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(location, name, type, expr, (BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(location);
objVarDef.var = objVar;
objVarDef.setBType(objVar.getBType());
return objVarDef;
}
private BType getObjectType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context");
}
BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorType);
errorTypeNode.pos = symTable.builtinPos;
return errorTypeNode;
}
BLangErrorType getErrorOrNillTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorOrNilType);
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef =
createVarDef("$ternary_result$", ternaryExpr.getBType(), null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(ternaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.getBType());
expr.pos = waitExpr.pos;
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (Types.getReferredType(trapExpr.expr.getBType()).tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.getBType());
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (isNullableBinaryExpr(binaryExpr)) {
BLangStatementExpression stmtExpr = createStmtExprForNullableBinaryExpr(binaryExpr);
result = rewrite(stmtExpr, env);
return;
}
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
lhsExpr = createTypeCastExpr(lhsExpr, symTable.intType);
rhsExpr = createTypeCastExpr(rhsExpr, symTable.intType);
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
rhsExpr = getModifiedIntRangeEndExpr(rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, lhsExpr, rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (TypeTags.isIntegerTypeTag(lhsExprTypeTag) && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
}
boolean isBinaryShiftOperator = symResolver.isBinaryShiftOperator(binaryOpKind);
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryOpKind);
if (lhsExprTypeTag == rhsExprTypeTag) {
if (!isBinaryShiftOperator && !isArithmeticOperator) {
return;
}
if (types.isValueType(binaryExpr.lhsExpr.getBType())) {
return;
}
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(lhsExprTypeTag) &&
(rhsExprTypeTag == TypeTags.XML || rhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(rhsExprTypeTag) &&
(lhsExprTypeTag == TypeTags.XML || lhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
if (symResolver.isBinaryComparisonOperator(binaryOpKind)) {
createTypeCastExprForRelationalExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (isArithmeticOperator) {
createTypeCastExprForArithmeticExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (isBinaryShiftOperator) {
createTypeCastExprForBinaryShiftExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
}
private BLangStatementExpression createStmtExprForNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
/*
* int? x = 3;
* int? y = 5;
* int? z = x + y;
* Above is desugared to
* int? $result$;
*
* int? $lhsExprVar$ = x;
* int? $rhsExprVar$ = y;
* if (lhsVar is () or rhsVar is ()) {
* $result$ = ();
* } else {
* $result$ = $lhsExprVar$ + $rhsExprVar$;
* }
* int z = $result$;
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BUnionType exprBType = (BUnionType) binaryExpr.getBType();
BType nonNilType = exprBType.getMemberTypes().iterator().next();
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryExpr.opKind);
boolean isShiftOperator = symResolver.isBinaryShiftOperator(binaryExpr.opKind);
boolean isBitWiseOperator = !isArithmeticOperator && !isShiftOperator;
BType rhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.rhsExpr.getBType().isNullable()) {
rhsType = types.getSafeType(binaryExpr.rhsExpr.getBType(), true, false);
} else {
rhsType = binaryExpr.rhsExpr.getBType();
}
}
BType lhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.lhsExpr.getBType().isNullable()) {
lhsType = types.getSafeType(binaryExpr.lhsExpr.getBType(), true, false);
} else {
lhsType = binaryExpr.lhsExpr.getBType();
}
}
if (binaryExpr.lhsExpr.getBType().isNullable()) {
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
}
BLangSimpleVariableDef tempVarDef = createVarDef("result",
binaryExpr.getBType(), null, binaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangSimpleVariableDef lhsVarDef = createVarDef("$lhsExprVar$", binaryExpr.lhsExpr.getBType(),
binaryExpr.lhsExpr, binaryExpr.pos);
BLangSimpleVarRef lhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, lhsVarDef.var.symbol);
blockStmt.addStatement(lhsVarDef);
BLangSimpleVariableDef rhsVarDef = createVarDef("$rhsExprVar$", binaryExpr.rhsExpr.getBType(),
binaryExpr.rhsExpr, binaryExpr.pos);
BLangSimpleVarRef rhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, rhsVarDef.var.symbol);
blockStmt.addStatement(rhsVarDef);
BLangTypeTestExpr typeTestExprOne = createTypeCheckExpr(binaryExpr.pos, lhsVarRef, getNillTypeNode());
typeTestExprOne.setBType(symTable.booleanType);
BLangTypeTestExpr typeTestExprTwo = createTypeCheckExpr(binaryExpr.pos, rhsVarRef, getNillTypeNode());
typeTestExprTwo.setBType(symTable.booleanType);
BLangBinaryExpr ifBlockCondition = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, typeTestExprOne,
typeTestExprTwo, symTable.booleanType, OperatorKind.OR, binaryExpr.opSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangBinaryExpr newBinaryExpr = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, lhsVarRef, rhsVarRef,
nonNilType, binaryExpr.opKind, binaryExpr.opSymbol);
newBinaryExpr.lhsExpr = createTypeCastExpr(lhsVarRef, lhsType);
newBinaryExpr.rhsExpr = createTypeCastExpr(rhsVarRef, rhsType);
bLangAssignmentElse.expr = newBinaryExpr;
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(binaryExpr.pos, blockStmt);
ifStatement.expr = ifBlockCondition;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(binaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
if ((binaryExpr.lhsExpr.getBType() != null && binaryExpr.rhsExpr.getBType() != null) &&
(binaryExpr.rhsExpr.getBType().isNullable() ||
binaryExpr.lhsExpr.getBType().isNullable())) {
switch (binaryExpr.getOperatorKind()) {
case ADD:
case SUB:
case MUL:
case DIV:
case MOD:
case BITWISE_LEFT_SHIFT:
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
case BITWISE_AND:
case BITWISE_OR:
case BITWISE_XOR:
return true;
}
}
return false;
}
private void createTypeCastExprForArithmeticExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
if ((TypeTags.isIntegerTypeTag(lhsExprTypeTag) && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) ||
(TypeTags.isStringTypeTag(lhsExprTypeTag) && TypeTags.isStringTypeTag(rhsExprTypeTag)) ||
(TypeTags.isXMLTypeTag(lhsExprTypeTag) && TypeTags.isXMLTypeTag(rhsExprTypeTag))) {
return;
}
if (TypeTags.isXMLTypeTag(lhsExprTypeTag) && !TypeTags.isXMLTypeTag(rhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.rhsExpr.getBType())) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.xmlType);
return;
}
if (TypeTags.isXMLTypeTag(rhsExprTypeTag) && !TypeTags.isXMLTypeTag(lhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.lhsExpr.getBType())) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.getBType());
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.getBType());
}
private void createTypeCastExprForBinaryShiftExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if (isLhsIntegerType || lhsExprTypeTag == TypeTags.BYTE) {
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
}
private void createTypeCastExprForRelationalExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
BType lhsExprType = binaryExpr.lhsExpr.getBType();
BType rhsExprType = binaryExpr.rhsExpr.getBType();
if ((isLhsIntegerType && isRhsIntegerType) || (lhsExprTypeTag == TypeTags.BYTE &&
rhsExprTypeTag == TypeTags.BYTE)) {
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
addTypeCastForBinaryExpr(binaryExpr, lhsExprType, rhsExprType, true);
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
addTypeCastForBinaryExpr(binaryExpr, rhsExprType, lhsExprType, false);
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
addTypeCastForBinaryExpr(binaryExpr, lhsExprType, rhsExprType, true);
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
addTypeCastForBinaryExpr(binaryExpr, rhsExprType, lhsExprType, false);
return;
}
if (isLhsIntegerType && !isRhsIntegerType) {
addTypeCastForBinaryExpr(binaryExpr, symTable.intType, rhsExprType, true);
return;
}
if (!isLhsIntegerType && isRhsIntegerType) {
addTypeCastForBinaryExpr(binaryExpr, symTable.intType, lhsExprType, false);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE || rhsExprTypeTag == TypeTags.BYTE) {
if ((lhsExprTypeTag == TypeTags.UNION && lhsExprType.isNullable()) ||
(rhsExprTypeTag == TypeTags.UNION && rhsExprType.isNullable())) {
binaryExpr.lhsExpr = addNilType(symTable.intType, binaryExpr.lhsExpr);
binaryExpr.rhsExpr = addNilType(symTable.intType, binaryExpr.rhsExpr);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
boolean isLhsStringType = TypeTags.isStringTypeTag(lhsExprTypeTag);
boolean isRhsStringType = TypeTags.isStringTypeTag(rhsExprTypeTag);
if (isLhsStringType && isRhsStringType) {
return;
}
if (isLhsStringType && !isRhsStringType) {
addTypeCastForBinaryExpr(binaryExpr, symTable.stringType, rhsExprType, true);
return;
}
if (!isLhsStringType && isRhsStringType) {
addTypeCastForBinaryExpr(binaryExpr, symTable.stringType, lhsExprType, false);
}
}
private void addTypeCastForBinaryExpr(BLangBinaryExpr binaryExpr, BType targetType, BType sourceType,
boolean isRhsExpr) {
if (sourceType.tag == TypeTags.UNION && sourceType.isNullable()) {
if (isRhsExpr) {
binaryExpr.lhsExpr = addNilType(targetType, binaryExpr.lhsExpr);
} else {
binaryExpr.rhsExpr = addNilType(targetType, binaryExpr.rhsExpr);
}
return;
}
if (isRhsExpr) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, targetType);
} else {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, targetType);
}
}
private BLangExpression addNilType(BType exprType, BLangExpression expr) {
LinkedHashSet<BType> members = new LinkedHashSet<>(2);
members.add(exprType);
members.add(new BNilType());
BUnionType unionType = new BUnionType(null, members, true, false);
return createTypeCastExpr(expr, unionType);
}
private BLangInvocation replaceWithIntRange(Location location, BLangExpression lhsExpr,
BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(location, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.setBType(symTable.intRangeType);
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.expectedType == null) {
return;
}
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.expectedType.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
BLangMatchExpression matchExpr = ASTBuilderUtil.createMatchExpression(elvisExpr.lhsExpr);
matchExpr.patternClauses.add(getMatchNullPatternGivenExpression(elvisExpr.pos,
rewriteExpr(elvisExpr.rhsExpr)));
matchExpr.setBType(elvisExpr.getBType());
matchExpr.pos = elvisExpr.pos;
result = rewriteExpr(matchExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (isNullableUnaryExpr(unaryExpr)) {
BLangStatementExpression statementExpression = createStmtExprForNilableUnaryExpr(unaryExpr);
result = rewrite(statementExpression, env);
return;
}
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
OperatorKind opKind = unaryExpr.operator;
if (opKind == OperatorKind.ADD || opKind == OperatorKind.SUB) {
createTypeCastExprForUnaryPlusAndMinus(unaryExpr);
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
private void createTypeCastExprForUnaryPlusAndMinus(BLangUnaryExpr unaryExpr) {
BLangExpression expr = unaryExpr.expr;
if (TypeTags.isIntegerTypeTag(expr.getBType().tag)) {
return;
}
unaryExpr.expr = createTypeCastExpr(expr, unaryExpr.getBType());
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final Location pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == Types.getReferredType(unaryExpr.getBType()).tag) {
binaryExpr.setBType(symTable.byteType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.setBType(symTable.intType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
private BLangStatementExpression createStmtExprForNilableUnaryExpr(BLangUnaryExpr unaryExpr) {
/*
* int? x = 3;
* int? y = +x;
*
*
* Above is desugared to
* int? $result$;
* if (x is ()) {
* $result$ = ();
* } else {
* $result$ = +x;
* }
* int y = $result$
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BUnionType exprBType = (BUnionType) unaryExpr.getBType();
BType nilLiftType = exprBType.getMemberTypes().iterator().next();
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
BLangSimpleVariableDef tempVarDef = createVarDef("$result",
unaryExpr.getBType(), createNilLiteral(), unaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(unaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangTypeTestExpr typeTestExpr = createTypeCheckExpr(unaryExpr.pos, unaryExpr.expr,
getNillTypeNode());
typeTestExpr.setBType(symTable.booleanType);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangExpression expr = createTypeCastExpr(unaryExpr.expr, nilLiftType);
bLangAssignmentElse.expr = ASTBuilderUtil.createUnaryExpr(unaryExpr.pos, expr,
nilLiftType, unaryExpr.operator, unaryExpr.opSymbol);
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(unaryExpr.pos, blockStmt);
ifStatement.expr = typeTestExpr;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(unaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableUnaryExpr(BLangUnaryExpr unaryExpr) {
if (unaryExpr.getBType() != null && unaryExpr.getBType().isNullable()) {
switch (unaryExpr.operator) {
case ADD:
case SUB:
case BITWISE_COMPLEMENT:
return true;
}
}
return false;
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
BType targetType = conversionExpr.targetType;
conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
if (!env.enclPkg.lambdaFunctions.contains(bLangLambdaFunction)) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.setBType(bLangArrowFunction.body.expr.getBType());
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.setBType(bLangArrowFunction.funcType);
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value),
new Name(funcNode.name.originalValue),
env.enclPkg.symbol.pkgID,
bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true,
bLangArrowFunction.pos, VIRTUAL);
funcSymbol.originalName = new Name(funcNode.name.originalValue);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.restParam = getRestSymbol(funcNode);
funcSymbol.retType = funcNode.returnTypeNode.getBType();
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.setBType(
new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.getBType(), null));
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
lambdaFunction.capturedClosureEnv = env;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
}
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) {
for (BLangExpression xmlItem : xmlSequenceLiteral.xmlItems) {
rewriteExpr(xmlItem);
}
result = xmlSequenceLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
/**
* The raw template literal gets desugared to a type init expression. For each literal, a new object class type
* def is generated from the object type. The type init expression creates an instance of this generated object
* type. For example, consider the following statements:
* string name = "Pubudu";
* 'object:RawTemplate rt = `Hello ${name}!`;
*
* The raw template literal above is desugared to:
* type RawTemplate$Impl$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
*
*
* 'object:RawTemplate rt = new RawTemplate$Impl$0([name]);
*
* @param rawTemplateLiteral The raw template literal to be desugared.
*/
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
Location pos = rawTemplateLiteral.pos;
BObjectType objType = (BObjectType) Types.getReferredType(rawTemplateLiteral.getBType());
BLangClassDefinition objClassDef =
desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos);
BObjectType classObjType = (BObjectType) objClassDef.getBType();
BVarSymbol insertionsSym = classObjType.fields.get("insertions").symbol;
BLangListConstructorExpr insertionsList = ASTBuilderUtil.createListConstructorExpr(pos, insertionsSym.type);
insertionsList.exprs.addAll(rawTemplateLiteral.insertions);
insertionsList.expectedType = insertionsSym.type;
BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType);
typeNewExpr.argsExpr.add(insertionsList);
typeNewExpr.initInvocation.argExprs.add(insertionsList);
typeNewExpr.initInvocation.requiredArgs.add(insertionsList);
result = rewriteExpr(typeNewExpr);
}
/**
* This method desugars a raw template literal object class for the provided raw template object type as follows.
* A literal defined as 'object:RawTemplate rt = `Hello ${name}!`;
* is desugared to,
* type $anonType$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
* @param strings The string portions of the literal
* @param objectType The abstract object type for which an object class needs to be generated
* @param pos The diagnostic position info for the type node
* @return Returns the generated concrete object class def
*/
private BLangClassDefinition desugarTemplateLiteralObjectTypedef(List<BLangLiteral> strings, BObjectType objectType,
Location pos) {
BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol;
Name objectClassName = names.fromString(
anonModelHelper.getNextRawTemplateTypeKey(env.enclPkg.packageID, tSymbol.name));
BObjectTypeSymbol classTSymbol = Symbols.createClassSymbol(tSymbol.flags, objectClassName,
env.enclPkg.packageID, null, env.enclPkg.symbol,
pos, VIRTUAL, false);
classTSymbol.flags |= Flags.CLASS;
BObjectType objectClassType = new BObjectType(classTSymbol, classTSymbol.flags);
objectClassType.fields = objectType.fields;
classTSymbol.type = objectClassType;
objectClassType.typeIdSet.add(objectType.typeIdSet);
BLangClassDefinition classDef = TypeDefBuilderHelper.createClassDef(pos, classTSymbol, env);
classDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value);
BType stringsType = objectClassType.fields.get("strings").symbol.type;
BLangListConstructorExpr stringsList = ASTBuilderUtil.createListConstructorExpr(pos, stringsType);
stringsList.exprs.addAll(strings);
stringsList.expectedType = stringsType;
classDef.fields.get(0).expr = stringsList;
BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(classDef, env);
classDef.initFunction = userDefinedInitFunction;
env.enclPkg.functions.add(userDefinedInitFunction);
env.enclPkg.topLevelNodes.add(userDefinedInitFunction);
BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(classDef, env);
tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,
tempGeneratedInitFunction.symbol.scope, env);
SemanticAnalyzer.AnalyzerData data = new SemanticAnalyzer.AnalyzerData(env);
this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, data);
classDef.generatedInitFunction = tempGeneratedInitFunction;
env.enclPkg.functions.add(classDef.generatedInitFunction);
env.enclPkg.topLevelNodes.add(classDef.generatedInitFunction);
return rewrite(classDef, env);
}
/**
* Creates a user-defined init() method for the provided object type node. If there are fields without default
* values specified in the type node, this will add parameters for those fields in the init() method and assign the
* param values to the respective fields in the method body.
*
* @param classDefn The object type node for which the init() method is generated
* @param env The symbol env for the object type node
* @return The generated init() method
*/
private BLangFunction createUserDefinedObjectInitFn(BLangClassDefinition classDefn, SymbolEnv env) {
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(classDefn.pos, classDefn.symbol, env,
names, Names.USER_DEFINED_INIT_SUFFIX,
symTable, classDefn.getBType());
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefn.getBType().tsymbol);
typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), classDefn.pos);
classDefn.initFunction = initFunction;
initFunction.returnTypeNode.setBType(symTable.nilType);
BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body;
BInvokableType initFnType = (BInvokableType) initFunction.getBType();
for (BLangSimpleVariable field : classDefn.fields) {
if (field.expr != null) {
continue;
}
BVarSymbol fieldSym = field.symbol;
BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type,
initFunction.symbol, classDefn.pos, VIRTUAL);
BLangSimpleVariable param = ASTBuilderUtil.createVariable(classDefn.pos, fieldSym.name.value,
fieldSym.type, null, paramSym);
param.flagSet.add(Flag.FINAL);
initFunction.symbol.scope.define(paramSym.name, paramSym);
initFunction.symbol.params.add(paramSym);
initFnType.paramTypes.add(param.getBType());
initFunction.requiredParams.add(param);
BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym);
BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.getBType(),
initFunction.receiver.symbol, field.name);
initFuncBody.addStatement(fieldInit);
}
return initFunction;
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.getBType());
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.getBType());
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangTransactionalExpr transactionalExpr) {
BInvokableSymbol isTransactionalSymbol =
(BInvokableSymbol) transactionDesugar.getInternalTransactionModuleInvokableSymbol(IS_TRANSACTIONAL);
result = ASTBuilderUtil
.createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(),
Collections.emptyList(), symResolver);
}
@Override
public void visit(BLangCommitExpr commitExpr) {
BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangFail failNode) {
if (this.onFailClause != null && !this.skipFailStmtRewrite) {
if (this.onFailClause.bodyContainsFail) {
result = rewriteNestedOnFail(this.onFailClause, failNode);
} else {
result = createOnFailInvocation(onFailClause, failNode);
}
} else {
BLangReturn stmt = ASTBuilderUtil.createReturnStmt(failNode.pos, rewrite(failNode.expr, env));
stmt.desugared = true;
result = stmt;
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTableAccessExpr tableKeyAccessExpr) {
result = tableKeyAccessExpr;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);
ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters);
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,
xmlElementAccess.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
}
private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX));
String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null;
ArrayList<BLangExpression> args = new ArrayList<>();
for (BLangXMLElementFilter filter : filters) {
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace));
if (nsSymbol == symTable.notFoundSymbol) {
if (defaultNS != null && !filter.name.equals("*")) {
String expandedName = createExpandedQName(defaultNS, filter.name);
args.add(createStringLiteral(filter.elemNamePos, expandedName));
} else {
args.add(createStringLiteral(filter.elemNamePos, filter.name));
}
} else {
BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol;
String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name);
BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName);
args.add(stringLiteral);
}
}
return args;
}
private BLangInvocation createLanglibXMLInvocation(Location pos, String functionName,
BLangExpression invokeOnExpr,
ArrayList<BLangExpression> args,
ArrayList<BLangExpression> restArgs) {
invokeOnExpr = rewriteExpr(invokeOnExpr);
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = invokeOnExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(invokeOnExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = rewriteExprs(restArgs);
invocationNode.setBType(((BInvokableType) invocationNode.symbol.type).getReturnType());
invocationNode.langLibInvocation = true;
return invocationNode;
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);
xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);
ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
} else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,
xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());
result = rewriteExpr(invocationNode);
} else {
BLangExpression childIndexExpr;
if (xmlNavigation.childIndex == null) {
childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);
} else {
childIndexExpr = xmlNavigation.childIndex;
}
ArrayList<BLangExpression> args = new ArrayList<>();
args.add(rewriteExpr(childIndexExpr));
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);
result = rewriteExpr(invocationNode);
}
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);
result = typedescExpr;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
addMatchExprDefaultCase(bLangMatchExpression);
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar =
ASTBuilderUtil.createVariable(bLangMatchExpression.pos, matchTempResultVarName,
bLangMatchExpression.getBType(), null,
new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID,
bLangMatchExpression.getBType(),
this.env.scope.owner, bLangMatchExpression.pos, VIRTUAL));
BLangSimpleVariableDef tempResultVarDef =
ASTBuilderUtil.createVariableDef(bLangMatchExpression.pos, tempResultVar);
tempResultVarDef.desugared = true;
BLangBlockStmt stmts = ASTBuilderUtil.createBlockStmt(bLangMatchExpression.pos, Lists.of(tempResultVarDef));
List<BLangMatchTypedBindingPatternClause> patternClauses = new ArrayList<>();
for (int i = 0; i < bLangMatchExpression.patternClauses.size(); i++) {
BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i);
pattern.expr = rewriteExpr(pattern.expr);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
pattern.expr = addConversionExprIfRequired(pattern.expr, tempResultVarRef.getBType());
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pattern.pos, tempResultVarRef, pattern.expr);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(pattern.pos, Lists.of(assignmentStmt));
patternClauses.add(ASTBuilderUtil.createMatchStatementPattern(pattern.pos, pattern.variable, patternBody));
}
stmts.addStatement(ASTBuilderUtil.createMatchStatement(bLangMatchExpression.pos, bLangMatchExpression.expr,
patternClauses));
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
BLangStatementExpression statementExpr = createStatementExpression(stmts, tempResultVarRef);
statementExpr.setBType(bLangMatchExpression.getBType());
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
if (checkedExpr.equivalentErrorTypeList == null) {
result = rewriteExpr(checkedExpr.expr);
return;
}
String checkedExprVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable checkedExprVar =
ASTBuilderUtil.createVariable(checkedExpr.pos, checkedExprVarName, checkedExpr.getBType(), null,
new BVarSymbol(0, names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExpr.getBType(),
this.env.scope.owner, checkedExpr.pos, VIRTUAL));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(checkedExpr.pos, checkedExprVar);
checkedExprVarDef.desugared = true;
BLangMatchTypedBindingPatternClause patternSuccessCase =
getSafeAssignSuccessPattern(checkedExprVar.pos, checkedExprVar.symbol.type, true,
checkedExprVar.symbol, null);
BLangMatchTypedBindingPatternClause patternErrorCase =
getSafeAssignErrorPattern(checkedExpr.pos, this.env.enclInvokable.symbol,
checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(checkedExpr.pos, checkedExpr.expr,
new ArrayList<BLangMatchTypedBindingPatternClause>() {{
add(patternSuccessCase);
add(patternErrorCase);
}});
BLangBlockStmt generatedStmtBlock = ASTBuilderUtil.createBlockStmt(checkedExpr.pos,
new ArrayList<BLangStatement>() {{
add(checkedExprVarDef);
add(matchStmt);
}});
BLangSimpleVarRef tempCheckedExprVarRef = ASTBuilderUtil.createVariableRef(
checkedExpr.pos, checkedExprVar.symbol);
BLangStatementExpression statementExpr = createStatementExpression(
generatedStmtBlock, tempCheckedExprVarRef);
statementExpr.setBType(checkedExpr.getBType());
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceClass.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangObjectConstructorExpression bLangObjectConstructorExpression) {
visit(bLangObjectConstructorExpression.classNode);
bLangObjectConstructorExpression.classNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(bLangObjectConstructorExpression.typeInit);
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.setBType(annotAccessExpr.getBType());
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.getBType(),
binaryExpr.rhsExpr.getBType()),
annotAccessExpr.getBType(), null), null,
symTable.builtinPos, VIRTUAL);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.getBType())) {
expr = addConversionExprIfRequired(expr, symTable.anyType);
}
if (typeTestExpr.isNegation) {
BLangTypeTestExpr bLangTypeTestExpr = ASTBuilderUtil.createTypeTestExpr(typeTestExpr.pos,
typeTestExpr.expr, typeTestExpr.typeNode);
BLangGroupExpr bLangGroupExpr = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
bLangGroupExpr.expression = bLangTypeTestExpr;
bLangGroupExpr.setBType(typeTestExpr.getBType());
BLangUnaryExpr unaryExpr = ASTBuilderUtil.createUnaryExpr(typeTestExpr.pos, bLangGroupExpr,
typeTestExpr.getBType(),
OperatorKind.NOT, null);
result = rewriteExpr(unaryExpr);
return;
}
typeTestExpr.expr = rewriteExpr(expr);
typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);
result = typeTestExpr;
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangQueryExpr queryExpr) {
boolean prevSkipFailStmtRewrite = this.skipFailStmtRewrite;
this.skipFailStmtRewrite = true;
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryExpr, env);
result = rewrite(stmtExpr, env);
this.skipFailStmtRewrite = prevSkipFailStmtRewrite;
}
@Override
public void visit(BLangQueryAction queryAction) {
boolean prevSkipFailStmtRewrite = this.skipFailStmtRewrite;
this.skipFailStmtRewrite = true;
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env);
result = rewrite(stmtExpr, env);
this.skipFailStmtRewrite = prevSkipFailStmtRewrite;
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
BType refType = Types.getReferredType(constSymbol.literalType);
if (refType.tag <= TypeTags.BOOLEAN || refType.tag == TypeTags.NIL) {
if (refType.tag != TypeTags.NIL && (constSymbol.value == null ||
constSymbol.value.value == null)) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangDynamicArgExpr dynamicParamExpr) {
dynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument);
dynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition);
result = dynamicParamExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.getBType(), constantRef.value);
}
BLangSimpleVariableDef getIteratorVariableDefinition(Location pos, BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = pos;
iteratorInvocation.expr = dataReference;
iteratorInvocation.symbol = iteratorInvokableSymbol;
iteratorInvocation.setBType(iteratorInvokableSymbol.retType);
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
iteratorInvokableSymbol.retType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$",
iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);
}
BLangSimpleVariableDef getIteratorNextVariableDefinition(Location pos, BType nillableResultType,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$",
nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(pos, resultVariable);
}
BLangInvocation createIteratorNextInvocation(Location pos, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next");
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol =
getNextFunc((BObjectType) Types.getReferredType(iteratorSymbol.type)).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.setBType(nextFuncSymbol.retType);
return nextInvocation;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (bAttachedFunction.funcName.value.equals("next")) {
return bAttachedFunction;
}
}
return null;
}
BLangFieldBasedAccess getValueAccessExpression(Location location, BType varType,
BVarSymbol resultSymbol) {
return getFieldAccessExpression(location, "value", varType, resultSymbol);
}
BLangFieldBasedAccess getFieldAccessExpression(Location pos, String fieldName, BType varType,
BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName);
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = pos;
fieldBasedAccessExpression.setBType(varType);
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.getBType();
return fieldBasedAccessExpression;
}
private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.body.expr.pos;
returnNode.setExpression(bLangArrowFunction.body.expr);
blockNode.addStatement(returnNode);
return blockNode;
}
protected BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.setBType(retType);
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
BLangExpression onExpr,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = onExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.getBType(), names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(onExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symResolver.lookupMethodInModule(symTable.langInternalModuleSymbol,
names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.setBType(new BArrayType(symTable.anyType));
return expr;
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangValueExpression expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.setBType(iExpr.symbol.type);
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), null, expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(),
expr.getBType(),
expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(Location pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.intType);
return literal;
}
private BLangLiteral createByteLiteral(Location pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {
if (types.isSameType(expr.getBType(), targetType)) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.setBType(targetType);
conversionExpr.targetType = targetType;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getElementType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag != TypeTags.ARRAY) {
return bType;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol) ||
(invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {
return;
}
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;
if (invokableNode.symbol.type.getReturnType().isNullable() && (funcBody.stmts.size() < 1
|| funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
Location invPos = invokableNode.pos;
Location returnStmtPos;
if (invokableNode.name.value.contains(GENERATED_INIT_SUFFIX.value)) {
returnStmtPos = null;
} else {
returnStmtPos = new BLangDiagnosticLocation(invPos.lineRange().filePath(),
invPos.lineRange().endLine().line(),
invPos.lineRange().endLine().line(),
invPos.lineRange().startLine().offset(),
invPos.lineRange().startLine().offset(), 0, 0);
}
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
funcBody.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || Types.getReferredType(symbol.type).tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
List<BLangExpression> restArgs = iExpr.restArgs;
int originalRequiredArgCount = iExpr.requiredArgs.size();
BLangSimpleVarRef varargRef = null;
BLangBlockStmt blockStmt = null;
BType varargVarType = null;
int restArgCount = restArgs.size();
if (restArgCount > 0 &&
restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR &&
originalRequiredArgCount < invokableSymbol.params.size()) {
BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr;
Location varargExpPos = expr.pos;
varargVarType = expr.getBType();
String varargVarName = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID,
varargVarType, this.env.scope.owner, varargExpPos, VIRTUAL);
varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);
BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);
varDef.var = var;
varDef.setBType(varargVarType);
blockStmt = createBlockStmt(varargExpPos);
blockStmt.stmts.add(varDef);
}
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol, varargRef);
}
if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) {
if (invokableSymbol.restParam == null) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
List<BLangExpression> exprs = new ArrayList<>();
BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;
BType elemType = arrayType.eType;
for (BLangExpression restArg : restArgs) {
exprs.add(addConversionExprIfRequired(restArg, elemType));
}
arrayLiteral.exprs = exprs;
arrayLiteral.setBType(arrayType);
if (restArgCount != 0) {
iExpr.restArgs = new ArrayList<>();
}
iExpr.restArgs.add(arrayLiteral);
return;
}
if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
if (iExpr.requiredArgs.size() == originalRequiredArgCount) {
return;
}
BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);
stmtExpression.setBType(firstNonRestArg.getBType());
iExpr.requiredArgs.add(0, stmtExpression);
if (invokableSymbol.restParam == null) {
restArgs.remove(0);
return;
}
BLangRestArgsExpression restArgsExpression = (BLangRestArgsExpression) restArgs.remove(0);
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
if (Types.getReferredType(restArgsExpression.getBType()).tag == TypeTags.RECORD) {
BLangExpression expr = ASTBuilderUtil.createEmptyArrayLiteral(invokableSymbol.pos, restParamType);
restArgs.add(expr);
return;
}
Location pos = restArgsExpression.pos;
BLangArrayLiteral newArrayLiteral = createArrayLiteralExprNode();
newArrayLiteral.setBType(restParamType);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, newArrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);
BLangInvocation lengthInvocation = createLengthInvocation(pos, varargRef);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndex,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i",
foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.getBType(),
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess valueExpr = ASTBuilderUtil.createIndexAccessExpr(varargRef, foreachVarRef);
BType refType = Types.getReferredType(varargVarType);
if (refType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) refType;
if (arrayType.state == BArrayState.CLOSED &&
arrayType.size == (iExpr.requiredArgs.size() - originalRequiredArgCount)) {
valueExpr.setBType(restParamType.eType);
} else {
valueExpr.setBType(arrayType.eType);
}
} else {
valueExpr.setBType(symTable.anyOrErrorType);
}
BLangExpression pushExpr = addConversionExprIfRequired(valueExpr, restParamType.eType);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, foreachBody);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
List.of(pushExpr),
restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
foreach.body = foreachBody;
BLangBlockStmt newArrayBlockStmt = createBlockStmt(pos);
newArrayBlockStmt.addStatement(varDef);
newArrayBlockStmt.addStatement(foreach);
BLangStatementExpression newArrayStmtExpression = createStatementExpression(newArrayBlockStmt, arrayVarRef);
newArrayStmtExpression.setBType(restParamType);
restArgs.add(addConversionExprIfRequired(newArrayStmtExpression, restParamType));
return;
}
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.setBType(restParamType);
BType elemType = restParamType.eType;
Location pos = restArgs.get(0).pos;
List<BLangExpression> exprs = new ArrayList<>();
for (int i = 0; i < restArgCount - 1; i++) {
exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType));
}
arrayLiteral.exprs = exprs;
BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
pushRestArgsExpr.pos = pos;
pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType,
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangBlockStmt pushBlockStmt = createBlockStmt(pos);
pushBlockStmt.stmts.add(varDef);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
new ArrayList<BLangExpression>() {{
add(pushRestArgsExpr);
}}, restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);
stmtExpression.setBType(restParamType);
iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }};
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new LinkedHashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
List<BLangRecordLiteral> incRecordLiterals = new ArrayList<>();
BLangRecordLiteral incRecordParamAllowAdditionalFields = null;
int varargIndex = 0;
BType varargType = null;
boolean tupleTypedVararg = false;
if (varargRef != null) {
varargType = varargRef.getBType();
tupleTypedVararg = Types.getReferredType(varargType).tag == TypeTags.TUPLE;
}
for (int i = 0; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.remove(param.name.value));
} else if (param.getFlags().contains(Flag.INCLUDED)) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
BType paramType = param.type;
recordLiteral.setBType(paramType);
args.add(recordLiteral);
incRecordLiterals.add(recordLiteral);
if (((BRecordType) Types.getReferredType(paramType)).restFieldType != symTable.noType) {
incRecordParamAllowAdditionalFields = recordLiteral;
}
} else if (varargRef == null) {
BLangExpression expr = new BLangIgnoreExpr();
expr.setBType(param.type);
args.add(expr);
} else {
if (Types.getReferredType(varargRef.getBType()).tag == TypeTags.RECORD) {
if (param.isDefaultable) {
BLangInvocation hasKeyInvocation = createLangLibInvocationNode(HAS_KEY, varargRef,
List.of(createStringLiteral(param.pos, param.name.value)), null, varargRef.pos);
BLangExpression indexExpr = rewriteExpr(createStringLiteral(param.pos, param.name.value));
BLangIndexBasedAccess memberAccessExpr =
ASTBuilderUtil.createMemberAccessExprNode(param.type, varargRef, indexExpr);
BLangExpression ignoreExpr = ASTBuilderUtil.createIgnoreExprNode(param.type);
BLangTernaryExpr ternaryExpr = ASTBuilderUtil.createTernaryExprNode(param.type,
hasKeyInvocation, memberAccessExpr, ignoreExpr);
args.add(ASTBuilderUtil.createDynamicParamExpression(hasKeyInvocation, ternaryExpr));
} else {
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(varargRef,
ASTBuilderUtil.createIdentifier(param.pos, param.name.value));
fieldBasedAccessExpression.setBType(param.type);
args.add(fieldBasedAccessExpression);
}
} else {
BLangExpression indexExpr = rewriteExpr(createIntLiteral(varargIndex));
BType memberAccessExprType = tupleTypedVararg ?
((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType;
args.add(addConversionExprIfRequired(ASTBuilderUtil.createMemberAccessExprNode(memberAccessExprType,
varargRef, indexExpr), param.type));
varargIndex++;
}
}
}
if (namedArgs.size() > 0) {
setFieldsForIncRecordLiterals(namedArgs, incRecordLiterals, incRecordParamAllowAdditionalFields);
}
iExpr.requiredArgs = args;
}
private void setFieldsForIncRecordLiterals(Map<String, BLangExpression> namedArgs,
List<BLangRecordLiteral> incRecordLiterals,
BLangRecordLiteral incRecordParamAllowAdditionalFields) {
for (String name : namedArgs.keySet()) {
boolean isAdditionalField = true;
BLangNamedArgsExpression expr = (BLangNamedArgsExpression) namedArgs.get(name);
for (BLangRecordLiteral recordLiteral : incRecordLiterals) {
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(recordLiteral.getBType())).fields;
if (fields.containsKey(name) &&
Types.getReferredType(fields.get(name).type).tag != TypeTags.NEVER) {
isAdditionalField = false;
createAndAddRecordFieldForIncRecordLiteral(recordLiteral, expr);
break;
}
}
if (isAdditionalField) {
createAndAddRecordFieldForIncRecordLiteral(incRecordParamAllowAdditionalFields, expr);
}
}
}
private void createAndAddRecordFieldForIncRecordLiteral(BLangRecordLiteral recordLiteral,
BLangNamedArgsExpression expr) {
BLangSimpleVarRef varRef = new BLangSimpleVarRef();
varRef.variableName = expr.name;
BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField = ASTBuilderUtil.
createBLangRecordKeyValue(varRef, expr.expr);
recordLiteral.fields.add(recordKeyValueField);
}
private BLangMatchTypedBindingPatternClause getSafeAssignErrorPattern(Location location,
BSymbol invokableSymbol,
List<BType> equivalentErrorTypes,
boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = types.
getReferredType(((BInvokableType) invokableSymbol.type).retType);
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<BType>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable patternFailureCaseVar =
ASTBuilderUtil.createVariable(location, patternFailureCaseVarName, symTable.errorType, null,
new BVarSymbol(0, names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType,
this.env.scope.owner, location, VIRTUAL));
BLangVariableReference patternFailureCaseVarRef =
ASTBuilderUtil.createVariableRef(location, patternFailureCaseVar.symbol);
BLangBlockStmt patternBlockFailureCase = (BLangBlockStmt) TreeBuilder.createBlockNode();
patternBlockFailureCase.pos = location;
if (!isCheckPanicExpr && (returnOnError || this.onFailClause != null)) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = location;
failStmt.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(failStmt);
if (returnOnError && this.shouldReturnErrors) {
BLangReturn errorReturn = ASTBuilderUtil.createReturnStmt(location,
rewrite(patternFailureCaseVarRef, env));
errorReturn.desugared = true;
failStmt.exprStmt = errorReturn;
}
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = location;
panicNode.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(panicNode);
}
return ASTBuilderUtil.createMatchStatementPattern(location, patternFailureCaseVar, patternBlockFailureCase);
}
private BLangMatchTypedBindingPatternClause getSafeAssignSuccessPattern(Location location,
BType lhsType, boolean isVarDef, BVarSymbol varSymbol, BLangExpression lhsExpr) {
String patternSuccessCaseVarName = GEN_VAR_PREFIX.value + "t_match";
BLangSimpleVariable patternSuccessCaseVar =
ASTBuilderUtil.createVariable(location, patternSuccessCaseVarName, lhsType, null,
new BVarSymbol(0, names.fromString(patternSuccessCaseVarName),
this.env.scope.owner.pkgID, lhsType,
this.env.scope.owner, location, VIRTUAL));
BLangExpression varRefExpr;
if (isVarDef) {
varRefExpr = ASTBuilderUtil.createVariableRef(location, varSymbol);
} else {
varRefExpr = lhsExpr;
}
BLangVariableReference patternSuccessCaseVarRef = ASTBuilderUtil.createVariableRef(location,
patternSuccessCaseVar.symbol);
BLangAssignment assignmentStmtSuccessCase = ASTBuilderUtil.createAssignmentStmt(location,
varRefExpr, patternSuccessCaseVarRef);
BLangBlockStmt patternBlockSuccessCase = ASTBuilderUtil.createBlockStmt(location,
new ArrayList<BLangStatement>() {{
add(assignmentStmtSuccessCase);
}});
return ASTBuilderUtil.createMatchStatementPattern(location,
patternSuccessCaseVar, patternBlockSuccessCase);
}
private BLangStatement generateIfElseStmt(BLangMatch matchStmt, BLangSimpleVariable matchExprVar) {
List<BLangMatchBindingPatternClause> patterns = matchStmt.patternClauses;
BLangIf parentIfNode = generateIfElseStmt(patterns.get(0), matchExprVar);
BLangIf currentIfNode = parentIfNode;
for (int i = 1; i < patterns.size(); i++) {
BLangMatchBindingPatternClause patternClause = patterns.get(i);
if (i == patterns.size() - 1 && patternClause.isLastPattern) {
currentIfNode.elseStmt = getMatchPatternElseBody(patternClause, matchExprVar);
} else {
currentIfNode.elseStmt = generateIfElseStmt(patternClause, matchExprVar);
currentIfNode = (BLangIf) currentIfNode.elseStmt;
}
}
return parentIfNode;
}
/**
* Generate an if-else statement from the given match statement.
*
* @param pattern match pattern statement node
* @param matchExprVar variable node of the match expression
* @return if else statement node
*/
private BLangIf generateIfElseStmt(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) {
BLangExpression ifCondition = createPatternIfCondition(pattern, matchExprVar.symbol);
if (NodeKind.MATCH_TYPED_PATTERN_CLAUSE == pattern.getKind()) {
BLangBlockStmt patternBody = getMatchPatternBody(pattern, matchExprVar);
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, patternBody, null);
}
BType expectedType = matchExprVar.getBType();
if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) {
BLangMatchStructuredBindingPatternClause matchPattern = (BLangMatchStructuredBindingPatternClause) pattern;
expectedType = getStructuredBindingPatternType(matchPattern.bindingPatternVariable);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
BLangSimpleVariableDef varDef = forceCastIfApplicable(matchExprVar.symbol, pattern.pos, expectedType);
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, varDef.var.symbol);
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
if (structuredPattern.typeGuardExpr != null) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(structuredPattern.pos);
blockStmt.addStatement(varDef);
blockStmt.addStatement(varDefStmt);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt,
structuredPattern.typeGuardExpr);
stmtExpr.setBType(symTable.booleanType);
ifCondition = ASTBuilderUtil
.createBinaryExpr(pattern.pos, ifCondition, stmtExpr, symTable.booleanType, OperatorKind.AND,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.AND, symTable.booleanType,
symTable.booleanType));
} else {
structuredPattern.body.stmts.add(0, varDef);
structuredPattern.body.stmts.add(1, varDefStmt);
}
}
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, pattern.body, null);
}
private BLangBlockStmt getMatchPatternBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body;
BLangMatchTypedBindingPatternClause patternClause = (BLangMatchTypedBindingPatternClause) pattern;
if (patternClause.variable.name.value.equals(Names.IGNORE.value)) {
return patternClause.body;
}
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(patternClause.pos,
matchExprVar.symbol);
BLangExpression patternVarExpr = addConversionExprIfRequired(matchExprVarRef,
patternClause.variable.getBType());
BLangSimpleVariable patternVar =
ASTBuilderUtil.createVariable(patternClause.pos, "", patternClause.variable.getBType(),
patternVarExpr, patternClause.variable.symbol);
BLangSimpleVariableDef patternVarDef = ASTBuilderUtil.createVariableDef(patternVar.pos, patternVar);
patternClause.body.stmts.add(0, patternVarDef);
body = patternClause.body;
return body;
}
private BLangBlockStmt getMatchPatternElseBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body = pattern.body;
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, matchExprVar.symbol);
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
structuredPattern.body.stmts.add(0, varDefStmt);
body = structuredPattern.body;
}
return body;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.getBType();
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
BLangExpression impConversionExpr = expr.impConversionExpr;
expr.impConversionExpr = null;
return impConversionExpr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.setBType(lhsType);
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
conversionExpr.internal = true;
return conversionExpr;
}
private BLangExpression createPatternIfCondition(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol) {
BType patternType;
switch (patternClause.getKind()) {
case MATCH_STATIC_PATTERN_CLAUSE:
BLangMatchStaticBindingPatternClause staticPattern =
(BLangMatchStaticBindingPatternClause) patternClause;
patternType = staticPattern.literal.getBType();
break;
case MATCH_STRUCTURED_PATTERN_CLAUSE:
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) patternClause;
patternType = getStructuredBindingPatternType(structuredPattern.bindingPatternVariable);
break;
default:
BLangMatchTypedBindingPatternClause simplePattern = (BLangMatchTypedBindingPatternClause) patternClause;
patternType = simplePattern.variable.getBType();
break;
}
BLangExpression binaryExpr;
BType[] memberTypes;
BType refType = Types.getReferredType(patternType);
if (refType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) refType;
memberTypes = unionType.getMemberTypes().toArray(new BType[0]);
} else {
memberTypes = new BType[1];
memberTypes[0] = patternType;
}
if (memberTypes.length == 1) {
binaryExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
} else {
BLangExpression lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
BLangExpression rhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[1]);
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.getBType(), rhsExpr.getBType()));
for (int i = 2; i < memberTypes.length; i++) {
lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[i]);
rhsExpr = binaryExpr;
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.getBType(), rhsExpr.getBType()));
}
}
return binaryExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + UNDERSCORE + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner, recordVariable.pos,
VIRTUAL);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType,
recordSymbol, bindingPatternVariable.pos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BRecordType) recordVariable.restParam.getBType()).restFieldType :
symTable.anydataType;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,
recordVarType,
bindingPatternVariable.pos);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordVarType, recordSymbol, recordTypeNode, env);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + UNDERSCORE + errorCount++),
env.enclPkg.symbol.pkgID,
null, null, errorVariable.pos, VIRTUAL);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++,
errorVariable.pos);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
recordTypeNode.initFunction = TypeDefBuilderHelper
.createInitFunctionForRecordType(recordTypeNode, env, names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(detailType, detailType.tsymbol,
recordTypeNode, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorTypeSymbol.type = errorType;
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(errorType, errorTypeSymbol,
createErrorTypeNode(errorType), env);
return errorType;
}
return bindingPatternVariable.getBType();
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(Flags.PUBLIC, names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID, symTable.pureType, null,
field.valueBindingPattern.pos, VIRTUAL);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.getBType(),
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo, Location pos) {
BRecordType detailRecordType = createAnonRecordType(pos);
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(Flags.PUBLIC, fieldName, detailRecordType.tsymbol.pkgID, fieldType,
detailRecordType.tsymbol, detailEntry.key.pos, VIRTUAL);
detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordType.tsymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BRecordType createAnonRecordType(Location pos) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString(anonModelHelper.getNextRecordVarKey(env.enclPkg.packageID)),
env.enclPkg.symbol.pkgID, null, null, pos, VIRTUAL);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner,
false, symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, symTable.builtinPos);
}
BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(errorType);
return errorTypeNode;
}
private BLangExpression createPatternMatchBinaryExpr(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol, BType patternType) {
Location pos = patternClause.pos;
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
if (NodeKind.MATCH_STATIC_PATTERN_CLAUSE == patternClause.getKind()) {
BLangMatchStaticBindingPatternClause pattern = (BLangMatchStaticBindingPatternClause) patternClause;
return createBinaryExpression(pos, varRef, pattern.literal);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == patternClause.getKind()) {
return createIsLikeExpression(pos, ASTBuilderUtil.createVariableRef(pos, varSymbol), patternType);
}
if (patternType == symTable.nilType) {
BLangLiteral bLangLiteral = ASTBuilderUtil.createLiteral(pos, symTable.nilType, null);
return ASTBuilderUtil.createBinaryExpr(pos, varRef, bLangLiteral, symTable.booleanType,
OperatorKind.EQUAL, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.EQUAL,
symTable.anyType, symTable.nilType));
} else {
return createIsAssignableExpression(pos, varSymbol, patternType);
}
}
private BLangExpression createBinaryExpression(Location pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.setBType(symTable.anyType);
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.getBType(),
expression.getBType());
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.getBType(),
binaryExpr, env);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsAssignableExpr createIsAssignableExpression(Location pos,
BVarSymbol varSymbol,
BType patternType) {
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
return ASTBuilderUtil.createIsAssignableExpr(pos, varRef, patternType, symTable.booleanType, names,
symTable.builtinPos);
}
private BLangIsLikeExpr createIsLikeExpression(Location pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.setBType(variable.getBType());
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,
BVarSymbol selfSymbol) {
return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.getBType(), selfSymbol,
variable.name);
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr,
BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol,
BLangIdentifier fieldName) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName);
fieldAccess.symbol = fieldSymbol;
fieldAccess.setBType(fieldType);
fieldAccess.isStoreOnCreation = true;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = expr;
assignmentStmt.pos = function.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private void addMatchExprDefaultCase(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes;
List<BType> unmatchedTypes = new ArrayList<>();
if (Types.getReferredType(bLangMatchExpression.expr.getBType()).tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) bLangMatchExpression.expr.getBType();
exprTypes = new ArrayList<>(unionType.getMemberTypes());
} else {
exprTypes = Lists.of(bLangMatchExpression.getBType());
}
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
if (this.types.isAssignable(type, pattern.variable.getBType())) {
assignable = true;
break;
}
}
if (!assignable) {
unmatchedTypes.add(type);
}
}
if (unmatchedTypes.isEmpty()) {
return;
}
BType defaultPatternType;
if (unmatchedTypes.size() == 1) {
defaultPatternType = unmatchedTypes.get(0);
} else {
defaultPatternType = BUnionType.create(null, new LinkedHashSet<>(unmatchedTypes));
}
String patternCaseVarName = GEN_VAR_PREFIX.value + "t_match_default";
BLangSimpleVariable patternMatchCaseVar =
ASTBuilderUtil.createVariable(bLangMatchExpression.pos, patternCaseVarName, defaultPatternType, null,
new BVarSymbol(0, names.fromString(patternCaseVarName),
this.env.scope.owner.pkgID, defaultPatternType,
this.env.scope.owner, bLangMatchExpression.pos, VIRTUAL));
BLangMatchExprPatternClause defaultPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
defaultPattern.variable = patternMatchCaseVar;
defaultPattern.expr = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, patternMatchCaseVar.symbol);
defaultPattern.pos = bLangMatchExpression.pos;
bLangMatchExpression.patternClauses.add(defaultPattern);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.isLValue || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.getBType();
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar =
ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.getBType(), null,
new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.getBType(),
this.env.scope.owner, accessExpr.pos, VIRTUAL));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.getBType(), tempResultVar);
BLangMatch matcEXpr = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matcEXpr));
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.setBType(originalExprType);
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successPattern = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {
BType originalType = Types.getReferredType(accessExpr.originalType);
if (TypeTags.isXMLTypeTag(originalType.tag) || isMapJson(originalType)) {
accessExpr.setBType(BUnionType.create(null, originalType, symTable.errorType));
} else {
accessExpr.setBType(originalType);
}
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.getBType());
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(accessExpr.pos, accessExpr.expr, new ArrayList<>());
boolean isAllTypesRecords = false;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>();
BType referredType = Types.getReferredType(accessExpr.expr.getBType());
if (referredType.tag == TypeTags.UNION) {
memTypes = new LinkedHashSet<>(((BUnionType) referredType).getMemberTypes());
isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes);
}
if (accessExpr.nilSafeNavigation) {
matchStmt.patternClauses.add(getMatchNullPattern(accessExpr, tempResultVar));
matchStmt.setBType(type);
memTypes.remove(symTable.nilType);
}
if (accessExpr.errorSafeNavigation) {
matchStmt.patternClauses.add(getMatchErrorPattern(accessExpr, tempResultVar));
matchStmt.setBType(type);
matchStmt.pos = accessExpr.pos;
memTypes.remove(symTable.errorType);
}
BLangMatchTypedBindingPatternClause successPattern = null;
Name field = getFieldName(accessExpr);
if (field == Names.EMPTY) {
successPattern = getSuccessPattern(accessExpr.expr.getBType(), accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.patternClauses.add(successPattern);
pushToMatchStatementStack(matchStmt, accessExpr, successPattern);
return;
}
if (isAllTypesRecords) {
for (BType memberType : memTypes) {
BRecordType recordType = (BRecordType) Types.getReferredType(memberType);
if (recordType.fields.containsKey(field.value) || !recordType.sealed) {
successPattern = getSuccessPattern(memberType, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.patternClauses.add(successPattern);
}
}
matchStmt.patternClauses.add(getMatchAllAndNilReturnPattern(accessExpr, tempResultVar));
pushToMatchStatementStack(matchStmt, accessExpr, successPattern);
return;
}
successPattern =
getSuccessPattern(accessExpr.expr.getBType(), accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.patternClauses.add(successPattern);
pushToMatchStatementStack(matchStmt, accessExpr, successPattern);
}
private boolean isMapJson(BType originalType) {
return originalType.tag == TypeTags.MAP && ((BMapType) originalType).getConstraint().tag == TypeTags.JSON;
}
private void pushToMatchStatementStack(BLangMatch matchStmt, BLangAccessExpression accessExpr,
BLangMatchTypedBindingPatternClause successPattern) {
this.matchStmtStack.push(matchStmt);
if (this.successPattern != null) {
this.successPattern.body = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(matchStmt));
}
this.successPattern = successPattern;
}
private Name getFieldName(BLangAccessExpression accessExpr) {
Name field = Names.EMPTY;
if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
field = new Name(((BLangFieldBasedAccess) accessExpr).field.value);
} else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr;
if (indexBasedExpression.getKind() == NodeKind.LITERAL) {
field = new Name(((BLangLiteral) indexBasedExpression).value.toString());
}
}
return field;
}
private boolean isAllTypesAreRecordsInUnion(LinkedHashSet<BType> memTypes) {
for (BType memType : memTypes) {
int typeTag = Types.getReferredType(memType).tag;
if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) {
return false;
}
}
return true;
}
private BLangMatchTypedBindingPatternClause getMatchErrorPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
BLangSimpleVariable errorPatternVar =
ASTBuilderUtil.createVariable(expr.pos, errorPatternVarName, symTable.errorType, null,
new BVarSymbol(0, names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.errorType,
this.env.scope.owner, expr.pos, VIRTUAL));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, errorPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause errorPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, errorPatternVar, patternBody);
return errorPattern;
}
private BLangMatchExprPatternClause getMatchNullPatternGivenExpression(Location pos,
BLangExpression expr) {
String nullPatternVarName = IGNORE.toString();
BLangSimpleVariable errorPatternVar =
ASTBuilderUtil.createVariable(pos, nullPatternVarName, symTable.nilType, null,
new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType,
this.env.scope.owner, pos, VIRTUAL));
BLangMatchExprPatternClause nullPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
nullPattern.variable = errorPatternVar;
nullPattern.expr = expr;
nullPattern.pos = pos;
return nullPattern;
}
private BLangMatchTypedBindingPatternClause getMatchNullPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
BLangSimpleVariable nullPatternVar =
ASTBuilderUtil.createVariable(expr.pos, nullPatternVarName, symTable.nilType, null,
new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType,
this.env.scope.owner, expr.pos, VIRTUAL));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, nullPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause nullPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, nullPatternVar, patternBody);
return nullPattern;
}
private BLangMatchStaticBindingPatternClause getMatchAllAndNilReturnPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, createLiteral(expr.pos,
symTable.nilType, Names.NIL_VALUE));
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchStaticBindingPatternClause matchAllPattern =
(BLangMatchStaticBindingPatternClause) TreeBuilder.createMatchStatementStaticBindingPattern();
String matchAllVarName = "_";
matchAllPattern.literal =
ASTBuilderUtil.createVariableRef(expr.pos, new BVarSymbol(0, names.fromString(matchAllVarName),
this.env.scope.owner.pkgID, symTable.anyType,
this.env.scope.owner, expr.pos, VIRTUAL));
matchAllPattern.body = patternBody;
return matchAllPattern;
}
private BLangMatchTypedBindingPatternClause getSuccessPattern(BType type, BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar,
boolean liftError) {
type = types.getSafeType(type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
BVarSymbol successPatternSymbol;
if (Types.getReferredType(type).tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner,
accessExpr.pos, VIRTUAL);
} else {
successPatternSymbol = new BVarSymbol(0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner,
accessExpr.pos, VIRTUAL);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
BLangAccessExpression tempAccessExpr = nodeCloner.cloneNode(accessExpr);
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr;
}
if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) {
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol =
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol;
}
tempAccessExpr.expr = ASTBuilderUtil.createVariableRef(accessExpr.pos, successPatternVar.symbol);
tempAccessExpr.errorSafeNavigation = false;
tempAccessExpr.nilSafeNavigation = false;
accessExpr.cloneRef = null;
if (TypeTags.isXMLTypeTag(Types.getReferredType(tempAccessExpr.expr.getBType()).tag)) {
tempAccessExpr.setBType(BUnionType.create(null, accessExpr.originalType, symTable.errorType,
symTable.nilType));
} else {
tempAccessExpr.setBType(accessExpr.originalType);
}
tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess;
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.getBType());
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause successPattern =
ASTBuilderUtil.createMatchStatementPattern(accessExpr.pos, successPatternVar, patternBody);
this.safeNavigationAssignment = assignmentStmt;
return successPattern;
}
BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.setBType(symTable.nilType);
return nillTypeNode;
}
private BLangValueExpression cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangExpression varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression(originalAccessExpr.expr);
}
varRef.setBType(types.getSafeType(originalAccessExpr.expr.getBType(), true, false));
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.isLValue = originalAccessExpr.isLValue;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.setBType(originalAccessExpr.originalType);
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.booleanType);
literal.pos = symTable.builtinPos;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
case TypeTags.TYPEREFDESC:
return isDefaultableMappingType(Types.getReferredType(type));
default:
return false;
}
}
private BLangFunction createInitFunctionForClassDefn(BLangClassDefinition classDefinition, SymbolEnv env) {
BType returnType = symTable.nilType;
if (classDefinition.initFunction != null) {
returnType = classDefinition.initFunction.getBType().getReturnType();
}
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(null, classDefinition.symbol,
env, names, GENERATED_INIT_SUFFIX,
classDefinition.getBType(), returnType);
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefinition.getBType().tsymbol);
typeSymbol.generatedInitializerFunc = new BAttachedFunction(GENERATED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), null);
classDefinition.generatedInitFunction = initFunction;
initFunction.returnTypeNode.setBType(returnType);
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.getBType(), null,
symTable.builtinPos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(binaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
protected boolean isMappingOrObjectConstructorOrObjInit(BLangExpression expression) {
switch (expression.getKind()) {
case TYPE_INIT_EXPR:
case RECORD_LITERAL_EXPR:
case OBJECT_CTOR_EXPRESSION:
return true;
case CHECK_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangCheckedExpr) expression).expr);
case TYPE_CONVERSION_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangTypeConversionExpr) expression).expr);
default:
return false;
}
}
private BType getRestType(BInvokableSymbol invokableSymbol) {
if (invokableSymbol != null && invokableSymbol.restParam != null) {
return invokableSymbol.restParam.type;
}
return null;
}
private BType getRestType(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.getBType();
}
return null;
}
private BVarSymbol getRestSymbol(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.symbol;
}
return null;
}
private boolean isComputedKey(RecordLiteralNode.RecordField field) {
if (!field.isKeyValueField()) {
return false;
}
return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;
}
private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {
List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields;
BType type = mappingConstructorExpr.getBType();
Location pos = mappingConstructorExpr.pos;
List<RecordLiteralNode.RecordField> rewrittenFields = new ArrayList<>(fields.size());
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
BLangRecordLiteral.BLangRecordKey key = keyValueField.key;
BLangExpression origKey = key.expr;
BLangExpression keyExpr;
if (key.computedKey) {
keyExpr = origKey;
} else {
keyExpr = origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos,
StringEscapeUtils.unescapeJava(((BLangSimpleVarRef) origKey).variableName.value)) :
((BLangLiteral) origKey);
}
BLangRecordLiteral.BLangRecordKeyValueField rewrittenField =
ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr),
rewriteExpr(keyValueField.valueExpr));
rewrittenField.pos = keyValueField.pos;
rewrittenField.key.pos = key.pos;
rewrittenFields.add(rewrittenField);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;
rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue(
rewriteExpr(createStringLiteral(pos,
StringEscapeUtils.unescapeJava(varRefField.variableName.value))),
rewriteExpr(varRefField)));
} else {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
spreadOpField.expr = rewriteExpr(spreadOpField.expr);
rewrittenFields.add(spreadOpField);
}
}
fields.clear();
BType refType = Types.getReferredType(type);
return refType.tag == TypeTags.RECORD ?
new BLangStructLiteral(pos, type, refType.tsymbol, rewrittenFields) :
new BLangMapLiteral(pos, type, rewrittenFields);
}
protected void addTransactionInternalModuleImport() {
if (!env.enclPkg.packageID.equals(PackageID.TRANSACTION_INTERNAL)) {
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
pkgNameComps.add(ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.TRANSACTION.value));
importDcl.pkgNameComps = pkgNameComps;
importDcl.pos = env.enclPkg.symbol.pos;
importDcl.orgName = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.BALLERINA_INTERNAL_ORG.value);
importDcl.alias = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "trx");
importDcl.version = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "");
importDcl.symbol = symTable.internalTransactionModuleSymbol;
env.enclPkg.imports.add(importDcl);
env.enclPkg.symbol.imports.add(importDcl.symbol);
}
}
} | private void addTypeCastForBinaryExpr(BLangBinaryExpr binaryExpr, BType targetType, BType sourceType, | private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner, foreach.pos,
VIRTUAL);
BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,
foreach.nillableResultType, iteratorSymbol, resultSymbol);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangStatementExpression statementExpression = ASTBuilderUtil.createStatementExpression(
resultVariableDefinition, resultReferenceInWhile);
statementExpression.setBType(foreach.nillableResultType);
BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, statementExpression, userDefineType);
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);
BLangExpression expr = valueAccessExpr.expr;
valueAccessExpr.expr = addConversionExprIfRequired(expr, symTable.mapAllType);
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.setBType(type);
return recordType;
}
@Override
public void visit(BLangWhile whileNode) {
if (whileNode.onFailClause != null) {
BLangOnFailClause onFailClause = whileNode.onFailClause;
whileNode.onFailClause = null;
whileNode.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(whileNode.pos, whileNode, onFailClause);
result = rewrite(doStmt, env);
} else {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
}
private BLangDo wrapStatementWithinDo(Location location, BLangStatement statement,
BLangOnFailClause onFailClause) {
BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();
BLangBlockStmt doBlock = ASTBuilderUtil.createBlockStmt(location);
doBlock.scope = new Scope(env.scope.owner);
bLDo.body = doBlock;
bLDo.pos = location;
bLDo.onFailClause = onFailClause;
bLDo.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
doBlock.stmts.add(statement);
return bLDo;
}
@Override
public void visit(BLangLock lockNode) {
BLangOnFailClause currentOnFailClause = this.onFailClause;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
if (lockNode.onFailClause != null) {
blockStmt.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
rewrite(lockNode.onFailClause, env);
}
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);
statementExpression.setBType(symTable.nilType);
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.setBType(nillableError);
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError,
this.env.scope.owner, lockNode.pos, VIRTUAL);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr,
nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
unLockStmt.relatedLock = lockStmt;
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
this.onFailClause = currentOnFailClause;
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
private BLangOnFailClause createTrxInternalOnFail(Location pos, BLangSimpleVarRef shouldPanicRef,
BLangSimpleVarRef shouldRetryRef) {
BLangOnFailClause trxOnFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
trxOnFailClause.pos = pos;
trxOnFailClause.body = ASTBuilderUtil.createBlockStmt(pos);
trxOnFailClause.body.scope = new Scope(env.scope.owner);
trxOnFailClause.isInternal = true;
BVarSymbol trxOnFailErrorSym = new BVarSymbol(0, names.fromString("$trxError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable trxOnFailError = ASTBuilderUtil.createVariable(pos,
"$trxError$", symTable.errorType, null, trxOnFailErrorSym);
trxOnFailClause.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
trxOnFailError);
trxOnFailClause.body.scope.define(trxOnFailErrorSym.name, trxOnFailErrorSym);
transactionDesugar.createRollbackIfFailed(pos, trxOnFailClause.body, trxOnFailErrorSym,
trxBlockId, shouldRetryRef);
BLangGroupExpr shouldNotPanic = new BLangGroupExpr();
shouldNotPanic.setBType(symTable.booleanType);
shouldNotPanic.expression = createNotBinaryExpression(pos, shouldPanicRef);
BLangSimpleVarRef caughtError = ASTBuilderUtil.createVariableRef(pos, trxOnFailErrorSym);
BLangBlockStmt failBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = caughtError;
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, shouldNotPanic, failBlock, panicNode);
trxOnFailClause.body.stmts.add(exitIf);
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = caughtError;
failBlock.stmts.add(failStmt);
trxOnFailClause.bodyContainsFail = true;
return trxOnFailClause;
}
@Override
public void visit(BLangTransaction transactionNode) {
if (transactionNode.onFailClause != null) {
BLangOnFailClause onFailClause = transactionNode.onFailClause;
transactionNode.onFailClause = null;
transactionNode.transactionBody.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(transactionNode.pos, transactionNode, onFailClause);
result = rewrite(doStmt, env);
} else {
BLangLiteral currentTrxBlockId = this.trxBlockId;
String uniqueId = String.valueOf(++transactionBlockCount);
this.trxBlockId = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.stringType, uniqueId);
boolean currShouldReturnErrors = this.shouldReturnErrors;
this.shouldReturnErrors = true;
BLangOnFailClause currOnFailClause = this.onFailClause;
BLangLiteral falseLiteral = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.booleanType, false);
BVarSymbol shouldPanicVarSymbol = new BVarSymbol(0, names.fromString("$shouldPanic$"),
env.scope.owner.pkgID, symTable.booleanType, this.env.scope.owner, transactionNode.pos, VIRTUAL);
shouldPanicVarSymbol.closure = true;
BLangSimpleVariable shouldPanicVariable = ASTBuilderUtil.createVariable(transactionNode.pos,
"$shouldPanic$", symTable.booleanType, falseLiteral, shouldPanicVarSymbol);
BLangSimpleVariableDef shouldPanicDef = ASTBuilderUtil.createVariableDef(transactionNode.pos,
shouldPanicVariable);
BLangSimpleVarRef shouldPanicRef = ASTBuilderUtil.createVariableRef(transactionNode.pos,
shouldPanicVarSymbol);
BLangOnFailClause trxInternalOnFail = createTrxInternalOnFail(transactionNode.pos, shouldPanicRef,
this.shouldRetryRef);
enclosingShouldPanic.put(trxInternalOnFail, shouldPanicRef);
boolean userDefinedOnFailAvbl = this.onFailClause != null;
analyzeOnFailClause(trxInternalOnFail, transactionNode.transactionBody);
BLangBlockStmt transactionStmtBlock =
transactionDesugar.rewrite(transactionNode, trxBlockId, env, uniqueId);
transactionStmtBlock.stmts.add(0, shouldPanicDef);
transactionStmtBlock.scope.define(shouldPanicVarSymbol.name, shouldPanicVarSymbol);
transactionStmtBlock.failureBreakMode = userDefinedOnFailAvbl ?
BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE :
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
result = rewrite(transactionStmtBlock, this.env);
this.shouldReturnErrors = currShouldReturnErrors;
this.trxBlockId = currentTrxBlockId;
swapAndResetEnclosingOnFail(currOnFailClause);
}
}
@Override
public void visit(BLangRollback rollbackNode) {
BLangBlockStmt rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, trxBlockId, this.shouldRetryRef);
result = rewrite(rollbackStmtExpr, env);
}
private BLangOnFailClause createRetryInternalOnFail(Location pos,
BLangSimpleVarRef retryResultRef,
BLangSimpleVarRef retryManagerRef,
BLangSimpleVarRef shouldRetryRef,
BLangSimpleVarRef continueLoopRef,
BLangSimpleVarRef returnResult) {
BLangOnFailClause internalOnFail = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
internalOnFail.pos = pos;
internalOnFail.body = ASTBuilderUtil.createBlockStmt(pos);
internalOnFail.body.scope = new Scope(env.scope.owner);
BVarSymbol caughtErrorSym = new BVarSymbol(0, names.fromString("$caughtError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable caughtError = ASTBuilderUtil.createVariable(pos,
"$caughtError$", symTable.errorType, null, caughtErrorSym);
internalOnFail.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
caughtError);
env.scope.define(caughtErrorSym.name, caughtErrorSym);
BLangSimpleVarRef caughtErrorRef = ASTBuilderUtil.createVariableRef(pos, caughtErrorSym);
BLangAssignment errorAssignment = ASTBuilderUtil.createAssignmentStmt(pos, retryResultRef, caughtErrorRef);
internalOnFail.body.stmts.add(errorAssignment);
BLangAssignment continueLoopTrue = ASTBuilderUtil.createAssignmentStmt(pos, continueLoopRef,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
internalOnFail.body.stmts.add(continueLoopTrue);
BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(pos,
retryManagerRef, caughtErrorRef);
BLangAssignment shouldRetryAssignment = ASTBuilderUtil.createAssignmentStmt(pos, shouldRetryRef,
shouldRetryInvocation);
internalOnFail.body.stmts.add(shouldRetryAssignment);
BLangGroupExpr shouldNotRetryCheck = new BLangGroupExpr();
shouldNotRetryCheck.setBType(symTable.booleanType);
shouldNotRetryCheck.expression = createNotBinaryExpression(pos, shouldRetryRef);
BLangGroupExpr exitCheck = new BLangGroupExpr();
exitCheck.setBType(symTable.booleanType);
exitCheck.expression = shouldNotRetryCheck;
BLangBlockStmt exitLogicBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, exitCheck, exitLogicBlock, null);
if (this.onFailClause != null) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = retryResultRef;
exitLogicBlock.stmts.add(failStmt);
internalOnFail.bodyContainsFail = true;
internalOnFail.body.stmts.add(exitIf);
BLangContinue loopContinueStmt = (BLangContinue) TreeBuilder.createContinueNode();
loopContinueStmt.pos = pos;
internalOnFail.body.stmts.add(loopContinueStmt);
} else {
BLangAssignment returnErrorTrue = ASTBuilderUtil.createAssignmentStmt(pos, returnResult,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
exitLogicBlock.stmts.add(returnErrorTrue);
internalOnFail.body.stmts.add(exitIf);
}
return internalOnFail;
}
BLangUnaryExpr createNotBinaryExpression(Location pos, BLangExpression expression) {
List<BType> paramTypes = new ArrayList<>();
paramTypes.add(symTable.booleanType);
BInvokableType type = new BInvokableType(paramTypes, symTable.booleanType,
null);
BOperatorSymbol notOperatorSymbol = new BOperatorSymbol(
names.fromString(OperatorKind.NOT.value()), symTable.rootPkgSymbol.pkgID, type, symTable.rootPkgSymbol,
symTable.builtinPos, VIRTUAL);
return ASTBuilderUtil.createUnaryExpr(pos, expression, symTable.booleanType,
OperatorKind.NOT, notOperatorSymbol);
}
BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, BLangFunctionBody lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func =
ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.setBType(new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null));
return lambdaFunction;
}
protected BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, List<BLangStatement> fnBodyStmts,
SymbolEnv env, Scope bodyScope) {
BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
body.scope = bodyScope;
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(fnBodyStmts, bodyEnv);
return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body);
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.getBType().tag == TypeTags.ARRAY
&& ((BArrayType) literalExpr.getBType()).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.setBType(literalExpr.getBType());
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replace("\t", "").replace("\n", "").replace("\r", "")
.replace(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorSpreadOpExpr listConstructorSpreadOpExpr) {
listConstructorSpreadOpExpr.expr = rewriteExpr(listConstructorSpreadOpExpr.expr);
result = listConstructorSpreadOpExpr;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
BType listConstructorType = Types.getReferredType(listConstructor.getBType());
if (listConstructorType.tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.getBType()));
result = rewriteExpr(expr);
} else if (getElementType(listConstructorType).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
rewriteExprs(tableConstructorExpr.recordLiteralList);
result = tableConstructorExpr;
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
BType arrayLiteralType = Types.getReferredType(arrayLiteral.getBType());
if (arrayLiteralType.tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.getBType()));
return;
} else if (getElementType(arrayLiteralType).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.getBType());
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
return;
}
List<BLangExpression> exprs = tupleLiteral.exprs;
BTupleType tupleType = (BTupleType) tupleLiteral.getBType();
List<BType> tupleMemberTypes = tupleType.tupleTypes;
int tupleMemberTypeSize = tupleMemberTypes.size();
int tupleExprSize = exprs.size();
boolean isInRestType = false;
int i = 0;
for (BLangExpression expr: exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
BType spreadOpType = ((BLangListConstructorSpreadOpExpr) expr).expr.getBType();
spreadOpType = Types.getReferredType(spreadOpType);
if (spreadOpType.tag == TypeTags.ARRAY) {
BArrayType spreadOpBArray = (BArrayType) spreadOpType;
if (spreadOpBArray.size >= 0) {
i += spreadOpBArray.size;
continue;
}
} else {
BTupleType spreadOpTuple = (BTupleType) spreadOpType;
if (types.isFixedLengthTuple(spreadOpTuple)) {
i += spreadOpTuple.tupleTypes.size();
continue;
}
}
isInRestType = true;
continue;
}
BType expType = expr.impConversionExpr == null ? expr.getBType() : expr.impConversionExpr.getBType();
BType targetType = tupleType.restType;
if (!isInRestType && i < tupleMemberTypeSize) {
targetType = tupleMemberTypes.get(i);
}
types.setImplicitCastExpr(expr, expType, targetType);
i++;
}
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
result = rewriteExpr(groupExpr.expression);
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));
result = rewriteExpr(rewriteMappingConstructor(recordLiteral));
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.setBType(symTable.stringType);
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BType type = varRefExpr.getBType();
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
Types.getReferredType(varRefExpr.symbol.type).tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&
!((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
if (varRefExpr.symbol.tag == SymTag.TYPE_DEF) {
type = ((BTypeDefinitionSymbol) varRefExpr.symbol).referenceType;
}
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.LET) == SymTag.LET) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
BType referredType = Types.getReferredType(constSymbol.literalType);
if (referredType.tag <= TypeTags.BOOLEAN || referredType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.getBType()));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
if (!enclLocks.isEmpty()) {
BVarSymbol symbol = (BVarSymbol) varRefExpr.symbol;
BLangLockStmt lockStmt = enclLocks.peek();
lockStmt.addLockVariable(symbol);
lockStmt.addLockVariable(this.globalVariablesDependsOn.getOrDefault(symbol, new HashSet<>()));
}
}
genVarRefExpr.setBType(type);
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.isLValue)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.isLValue = varRefExpr.isLValue;
genVarRefExpr.setBType(varRefExpr.symbol.type);
result = genVarRefExpr;
return;
}
genVarRefExpr.isLValue = varRefExpr.isLValue;
BType targetType = genVarRefExpr.getBType();
genVarRefExpr.setBType(genVarRefExpr.symbol.type);
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
rewriteFieldBasedAccess(nsPrefixedFieldBasedAccess);
}
private void rewriteFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(fieldAccessExpr.expr.getBType());
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.getBType(), varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.field.pos,
StringEscapeUtils.unescapeJava(fieldAccessExpr.field.value));
BType refType = Types.getReferredType(varRefType);
int varRefTypeTag = refType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
result = rewriteObjectMemberAccessAsField(fieldAccessExpr);
return;
} else {
boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation;
if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) {
BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) refType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc;
if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) ||
(initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) {
isStoreOnCreation = true;
}
}
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false,
isStoreOnCreation);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation);
}
} else if (types.isLax(refType)) {
if (!(refType.tag == TypeTags.XML || refType.tag == TypeTags.XML_ELEMENT)) {
if (refType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) refType).constraint.tag)) {
result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));
return;
}
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else {
BLangInvocation xmlAccessInvocation = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);
xmlAccessInvocation.setBType(fieldAccessExpr.getBType());
result = xmlAccessInvocation;
return;
}
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.isStoreOnCreation);
} else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.isLValue = fieldAccessExpr.isLValue;
targetVarRef.setBType(fieldAccessExpr.getBType());
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
rewriteFieldBasedAccess(fieldAccessExpr);
}
private BLangNode rewriteObjectMemberAccessAsField(BLangFieldBasedAccess fieldAccessExpr) {
Location pos = fieldAccessExpr.pos;
BInvokableSymbol originalMemberFuncSymbol = (BInvokableSymbol) fieldAccessExpr.symbol;
BLangFunction func = (BLangFunction) TreeBuilder.createFunctionNode();
String funcName = "$anon$method$delegate$" + originalMemberFuncSymbol.name.value + "$" + lambdaFunctionCount++;
BInvokableSymbol funcSymbol = new BInvokableSymbol(SymTag.INVOKABLE, (Flags.ANONYMOUS | Flags.LAMBDA),
Names.fromString(funcName), env.enclPkg.packageID,
originalMemberFuncSymbol.type, env.scope.owner, pos,
VIRTUAL);
funcSymbol.retType = originalMemberFuncSymbol.retType;
funcSymbol.bodyExist = true;
funcSymbol.params = new ArrayList<>();
funcSymbol.scope = new Scope(funcSymbol);
func.pos = pos;
func.name = createIdentifier(pos, funcName);
func.flagSet.add(Flag.LAMBDA);
func.flagSet.add(Flag.ANONYMOUS);
func.body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
func.symbol = funcSymbol;
func.setBType(funcSymbol.type);
func.closureVarSymbols = new LinkedHashSet<>();
BLangExpression receiver = fieldAccessExpr.expr;
BLangSimpleVariableDef intermediateObjDef = null;
if (receiver.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BSymbol receiverSymbol = ((BLangVariableReference) receiver).symbol;
receiverSymbol.closure = true;
func.closureVarSymbols.add(new ClosureVarSymbol(receiverSymbol, pos));
} else {
BLangSimpleVariableDef varDef = createVarDef("$$temp$obj$" + annonVarCount++, receiver.getBType(),
receiver, pos);
intermediateObjDef = varDef;
varDef.var.symbol.closure = true;
env.scope.define(varDef.var.symbol.name, varDef.var.symbol);
BLangSimpleVarRef variableRef = createVariableRef(pos, varDef.var.symbol);
func.closureVarSymbols.add(new ClosureVarSymbol(varDef.var.symbol, pos));
receiver = variableRef;
}
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
for (BVarSymbol param : originalMemberFuncSymbol.params) {
BLangSimpleVariable fParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
fParam.symbol = new BVarSymbol(0, param.name, env.enclPkg.packageID, param.type, funcSymbol, pos,
VIRTUAL);
fParam.pos = pos;
fParam.name = createIdentifier(pos, param.name.value);
fParam.setBType(param.type);
func.requiredParams.add(fParam);
funcSymbol.params.add(fParam.symbol);
funcSymbol.scope.define(fParam.symbol.name, fParam.symbol);
BLangSimpleVarRef paramRef = createVariableRef(pos, fParam.symbol);
requiredArgs.add(paramRef);
}
ArrayList<BLangExpression> restArgs = new ArrayList<>();
if (originalMemberFuncSymbol.restParam != null) {
BLangSimpleVariable restParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
func.restParam = restParam;
BVarSymbol restSym = originalMemberFuncSymbol.restParam;
restParam.name = ASTBuilderUtil.createIdentifier(pos, restSym.name.value);
restParam.symbol = new BVarSymbol(0, restSym.name, env.enclPkg.packageID, restSym.type, funcSymbol, pos,
VIRTUAL);
restParam.pos = pos;
restParam.setBType(restSym.type);
funcSymbol.restParam = restParam.symbol;
funcSymbol.scope.define(restParam.symbol.name, restParam.symbol);
BLangSimpleVarRef restArg = createVariableRef(pos, restParam.symbol);
restArgs.add(createRestArgsExpression(restArg));
}
BLangIdentifier field = fieldAccessExpr.field;
BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode();
retStmt.expr = createObjectMethodInvocation(
receiver, field, fieldAccessExpr.symbol, requiredArgs, restArgs);
((BLangBlockFunctionBody) func.body).addStatement(retStmt);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = func;
lambdaFunction.capturedClosureEnv = env.createClone();
env.enclPkg.functions.add(func);
env.enclPkg.topLevelNodes.add(func);
lambdaFunction.parent = env.enclInvokable;
lambdaFunction.setBType(func.getBType());
if (intermediateObjDef == null) {
return rewrite(lambdaFunction, env);
} else {
BLangStatementExpression expr = createStatementExpression(intermediateObjDef, rewrite(lambdaFunction, env));
expr.setBType(lambdaFunction.getBType());
return rewrite(expr, env);
}
}
private BLangInvocation createObjectMethodInvocation(BLangExpression receiver, BLangIdentifier field,
BSymbol invocableSymbol,
List<BLangExpression> requiredArgs,
List<BLangExpression> restArgs) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.name = field;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = receiver;
invocationNode.symbol = invocableSymbol;
invocationNode.setBType(((BInvokableType) invocableSymbol.type).retType);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = restArgs;
return invocationNode;
}
private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangStatementExpression statementExpression = new BLangStatementExpression();
BLangBlockStmt block = new BLangBlockStmt();
statementExpression.stmt = block;
BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.errorType);
Location pos = fieldAccessExpr.pos;
BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos);
block.addStatement(result);
BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);
resultRef.setBType(fieldAccessType);
statementExpression.setBType(fieldAccessType);
BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(
fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);
BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);
BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.nilType);
mapAccessExpr.setBType(xmlOrNil);
BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos);
BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);
block.addStatement(mapResult);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);
BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);
ifStmt.expr = isLikeNilExpr;
BLangBlockStmt resultNilBody = new BLangBlockStmt();
ifStmt.body = resultNilBody;
BLangBlockStmt resultHasValueBody = new BLangBlockStmt();
ifStmt.elseStmt = resultHasValueBody;
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env,
names.fromString(""), names.fromString("error"));
errorConstructorExpr.setBType(symbol.type);
List<BLangExpression> positionalArgs = new ArrayList<>();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
positionalArgs.add(createStringLiteral(pos, "{" + RuntimeConstants.MAP_LANG_LIB + "}InvalidKey"));
BLangNamedArgsExpression message = new BLangNamedArgsExpression();
message.name = ASTBuilderUtil.createIdentifier(pos, "key");
message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);
namedArgs.add(message);
errorConstructorExpr.positionalArgs = positionalArgs;
errorConstructorExpr.namedArgs = namedArgs;
BLangSimpleVariableDef errorDef =
createVarDef("$_invalid_key_error", symTable.errorType, errorConstructorExpr, pos);
resultNilBody.addStatement(errorDef);
BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);
BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);
errorVarAssignment.varRef = resultRef;
errorVarAssignment.expr = errorRef;
BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(
pos, resultHasValueBody);
mapResultAssignment.varRef = resultRef;
mapResultAssignment.expr = mapResultRef;
statementExpression.expr = resultRef;
return statementExpression;
}
private BLangInvocation rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {
ArrayList<BLangExpression> args = new ArrayList<>();
String fieldName = fieldAccessExpr.field.value;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;
fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);
}
if (fieldName.equals("_")) {
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,
fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());
}
BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);
args.add(attributeNameLiteral);
args.add(isOptionalAccessToLiteral(fieldAccessExpr));
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,
new ArrayList<>());
}
private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {
return rewrite(
createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);
}
private String createExpandedQName(String nsURI, String localName) {
return "{" + nsURI + "}" + localName;
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangIndexBasedAccess targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(indexAccessExpr.expr.getBType());
BType varRefType = Types.getReferredType(effectiveType);
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.getBType(), varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr,
(BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (varRefType.tag == TypeTags.TABLE) {
targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.isLValue = indexAccessExpr.isLValue;
targetVarRef.setBType(indexAccessExpr.getBType());
result = targetVarRef;
}
@Override
public void visit(BLangInvocation iExpr) {
rewriteInvocation(iExpr, false);
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.size() == 1) {
errorConstructorExpr.positionalArgs.add(createNilLiteral());
}
errorConstructorExpr.positionalArgs.set(1,
addConversionExprIfRequired(errorConstructorExpr.positionalArgs.get(1), symTable.errorType));
rewriteExprs(errorConstructorExpr.positionalArgs);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(errorConstructorExpr.pos,
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
if (errorConstructorExpr.namedArgs.isEmpty()) {
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.getBType());
} else {
for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) {
BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (Types.getReferredType(recordLiteral.getBType()).tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.getBType());
}
recordLiteral.fields.add(member);
}
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral),
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
}
errorConstructorExpr.errorDetail = errorDetail;
result = errorConstructorExpr;
}
@Override
public void visit(BLangInvocation.BLangActionInvocation actionInvocation) {
if (!actionInvocation.async && actionInvocation.invokedInsideTransaction) {
transactionDesugar.startTransactionCoordinatorOnce(env, actionInvocation.pos);
}
if (actionInvocation.async && Symbols.isFlagOn(actionInvocation.symbol.type.flags, Flags.ISOLATED)) {
addStrandAnnotationWithThreadAny(actionInvocation);
}
rewriteInvocation(actionInvocation, actionInvocation.async);
}
private void addStrandAnnotationWithThreadAny(BLangInvocation.BLangActionInvocation actionInvocation) {
if (this.strandAnnotAttachement == null) {
BLangPackage pkgNode = env.enclPkg;
List<BLangTypeDefinition> prevTypeDefinitions = new ArrayList<>(pkgNode.typeDefinitions);
this.strandAnnotAttachement =
annotationDesugar.createStrandAnnotationWithThreadAny(actionInvocation.pos, env);
addInitFunctionForRecordTypeNodeInTypeDef(pkgNode, prevTypeDefinitions);
}
actionInvocation.addAnnotationAttachment(this.strandAnnotAttachement);
((BInvokableSymbol) actionInvocation.symbol)
.addAnnotation(this.strandAnnotAttachement.annotationAttachmentSymbol);
}
@Override
public void visit(BLangInvocation.BLangResourceAccessInvocation resourceAccessInvocation) {
if (resourceAccessInvocation.invokedInsideTransaction) {
transactionDesugar.startTransactionCoordinatorOnce(env, resourceAccessInvocation.pos);
}
BLangInvocation pathParamInvocation = createInvocationForPathParams(resourceAccessInvocation);
reorderArguments(pathParamInvocation);
BResourceFunction targetResourceFunc = resourceAccessInvocation.targetResourceFunc;
List<Name> resourcePath = targetResourceFunc.resourcePath;
int pathParamInvocationRequiredArgCount = pathParamInvocation.requiredArgs.size();
BLangInvocation bLangInvocation = new BLangInvocation();
BLangStatementExpression firstRequiredArgFromRestArg = null;
boolean isFirstRequiredArgFromRestArgIncluded = false;
for (int i = 0; i < pathParamInvocationRequiredArgCount; i++) {
BLangExpression requiredArg = pathParamInvocation.requiredArgs.get(i);
Name resourcePathName = resourcePath.get(i);
if (firstRequiredArgFromRestArg == null && requiredArg.getKind() == NodeKind.STATEMENT_EXPRESSION) {
firstRequiredArgFromRestArg = (BLangStatementExpression) requiredArg;
if (resourcePathName.value.equals("*")) {
isFirstRequiredArgFromRestArgIncluded = true;
bLangInvocation.requiredArgs.add(requiredArg);
continue;
}
}
if (resourcePathName.value.equals("*")) {
if (firstRequiredArgFromRestArg != null && !isFirstRequiredArgFromRestArgIncluded &&
requiredArg.getKind() == NodeKind.TYPE_CONVERSION_EXPR) {
BLangStatementExpression statementExpression = new BLangStatementExpression();
statementExpression.expr = requiredArg;
statementExpression.stmt = firstRequiredArgFromRestArg.stmt;
statementExpression.setBType(requiredArg.getBType());
bLangInvocation.requiredArgs.add(statementExpression);
} else {
bLangInvocation.requiredArgs.add(requiredArg);
}
}
}
Name lastResourcePathName = resourcePath.get(resourcePath.size() - 1);
if (lastResourcePathName.value.equals("**")) {
for (BLangExpression restArg : pathParamInvocation.restArgs) {
if (firstRequiredArgFromRestArg != null && !isFirstRequiredArgFromRestArgIncluded &&
restArg.getKind() == NodeKind.STATEMENT_EXPRESSION) {
BLangStatementExpression restArgStmtExpr = (BLangStatementExpression) restArg;
((BLangBlockStmt) restArgStmtExpr.stmt).stmts.add(0,
((BLangBlockStmt) firstRequiredArgFromRestArg.stmt).stmts.get(0));
}
bLangInvocation.requiredArgs.add(restArg);
}
}
bLangInvocation.requiredArgs.addAll(resourceAccessInvocation.requiredArgs);
bLangInvocation.pkgAlias = resourceAccessInvocation.pkgAlias;
bLangInvocation.name = resourceAccessInvocation.name;
bLangInvocation.expr = resourceAccessInvocation.expr;
bLangInvocation.restArgs = resourceAccessInvocation.restArgs;
bLangInvocation.symbol = resourceAccessInvocation.symbol;
bLangInvocation.setBType(resourceAccessInvocation.getBType());
bLangInvocation.parent = resourceAccessInvocation.parent;
bLangInvocation.pos = resourceAccessInvocation.pos;
rewriteInvocation(bLangInvocation, false);
}
private BLangInvocation createInvocationForPathParams(
BLangInvocation.BLangResourceAccessInvocation resourceAccessInvocation) {
BLangInvocation bLangInvocation = new BLangInvocation();
BInvokableSymbol invokableSymbol = new BInvokableSymbol(
resourceAccessInvocation.symbol.tag,
resourceAccessInvocation.symbol.flags,
resourceAccessInvocation.symbol.name,
resourceAccessInvocation.symbol.pkgID,
resourceAccessInvocation.symbol.type,
resourceAccessInvocation.symbol,
resourceAccessInvocation.symbol.pos, VIRTUAL);
BResourceFunction targetResourceFunc = resourceAccessInvocation.targetResourceFunc;
List<Name> resourcePath = targetResourceFunc.resourcePath;
List<BLangExpression> resourceAccessPathSegments = resourceAccessInvocation.resourceAccessPathSegments.exprs;
List<BVarSymbol> invocationParams = new ArrayList<>(resourcePath.size());
BTupleType resourcePathType = targetResourceFunc.resourcePathType;
for (BType type : resourcePathType.tupleTypes) {
BVarSymbol param = new BVarSymbol(0, Names.EMPTY, this.env.scope.owner.pkgID, type,
this.env.scope.owner, type.tsymbol.pos, VIRTUAL);
invocationParams.add(param);
}
invokableSymbol.params = invocationParams;
BType resourcePathRestType = resourcePathType.restType;
if (resourcePathRestType != null) {
invokableSymbol.restParam = new BVarSymbol(0, Names.EMPTY, this.env.scope.owner.pkgID,
new BArrayType(resourcePathRestType), this.env.scope.owner,
resourcePathRestType.tsymbol.pos, VIRTUAL);
}
bLangInvocation.symbol = invokableSymbol;
for (int i = 0; i < resourceAccessPathSegments.size(); i++) {
BLangExpression resourceAccessPathSeg = resourceAccessPathSegments.get(i);
if (resourceAccessPathSeg.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
bLangInvocation.restArgs.add(createRestArgsExpression(
((BLangListConstructorSpreadOpExpr) resourceAccessPathSeg).expr));
} else if (i > invocationParams.size() - 1) {
bLangInvocation.restArgs.add(resourceAccessPathSeg);
} else {
bLangInvocation.requiredArgs.add(resourceAccessPathSeg);
}
}
return bLangInvocation;
}
private BLangRestArgsExpression createRestArgsExpression(BLangExpression expr) {
BLangRestArgsExpression bLangRestArgsExpression = new BLangRestArgsExpression();
bLangRestArgsExpression.expr = expr;
bLangRestArgsExpression.pos = expr.pos;
bLangRestArgsExpression.setBType(expr.getBType());
bLangRestArgsExpression.expectedType = bLangRestArgsExpression.getBType();
return bLangRestArgsExpression;
}
private void rewriteInvocation(BLangInvocation invocation, boolean async) {
BLangInvocation invRef = invocation;
if (!enclLocks.isEmpty()) {
BLangLockStmt lock = enclLocks.peek();
lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars);
}
reorderArguments(invocation);
rewriteExprs(invocation.requiredArgs);
if (invocation.langLibInvocation && !invocation.requiredArgs.isEmpty()) {
invocation.expr = invocation.requiredArgs.get(0);
} else {
invocation.expr = rewriteExpr(invocation.expr);
}
fixStreamTypeCastsInInvocationParams(invocation);
fixNonRestArgTypeCastInTypeParamInvocation(invocation);
rewriteExprs(invocation.restArgs);
annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos,
invocation.symbol.pkgID, invocation.symbol.owner, env);
if (invocation.functionPointerInvocation) {
visitFunctionPointerInvocation(invocation);
return;
}
result = invRef;
BInvokableSymbol invSym = (BInvokableSymbol) invocation.symbol;
if (Symbols.isFlagOn(invSym.retType.flags, Flags.PARAMETERIZED)) {
BType retType = unifier.build(invSym.retType);
invocation.setBType(invocation.async ? new BFutureType(TypeTags.FUTURE, retType, null) : retType);
}
if (invocation.expr == null) {
fixTypeCastInTypeParamInvocation(invocation, invRef);
if (invocation.exprSymbol == null) {
return;
}
invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol);
invocation.expr = rewriteExpr(invocation.expr);
}
switch (Types.getReferredType(invocation.expr.getBType()).tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!invocation.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(invocation.requiredArgs);
argExprs.add(0, invocation.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs,
invocation.symbol, invocation.getBType(),
invocation.expr, async);
attachedFunctionInvocation.name = invocation.name;
attachedFunctionInvocation.annAttachments = invocation.annAttachments;
result = invRef = attachedFunctionInvocation;
}
break;
}
if (invocation.objectInitMethod && Symbols.isFlagOn(invocation.expr.getBType().flags, Flags.OBJECT_CTOR)) {
BObjectType initializingObject = (BObjectType) invocation.expr.getBType();
BLangClassDefinition classDef = initializingObject.classDef;
if (classDef.hasClosureVars) {
OCEDynamicEnvironmentData oceEnvData = initializingObject.classDef.oceEnvData;
if (oceEnvData.attachedFunctionInvocation == null) {
oceEnvData.attachedFunctionInvocation = (BLangAttachedFunctionInvocation) result;
}
}
}
fixTypeCastInTypeParamInvocation(invocation, invRef);
}
private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {
if (!iExpr.langLibInvocation) {
return;
}
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
for (int i = 0; i < requiredArgs.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));
}
}
/* This function is a workaround and need improvement
* Notes for improvement :
* 1. Both arguments are same.
* 2. Due to current type param logic we put type param flag on the original type.
* 3. Error type having Cloneable type with type param flag, change expression type by this code.
* 4. using error type is a problem as Cloneable type is an typeparm eg: ExprBodiedFunctionTest
* added never to CloneableType type param
* @typeParam type
* CloneableType Cloneable|never;
*
*/
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
var returnTypeOfInvokable = ((BInvokableSymbol) iExpr.symbol).retType;
if (!iExpr.langLibInvocation && !TypeParamAnalyzer.containsTypeParam(returnTypeOfInvokable)) {
return;
}
BType originalInvType = genIExpr.getBType();
if (!genIExpr.async) {
genIExpr.setBType(returnTypeOfInvokable);
}
this.result = addConversionExprIfRequired(genIExpr, originalInvType);
}
private void fixStreamTypeCastsInInvocationParams(BLangInvocation iExpr) {
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
if (!params.isEmpty()) {
for (int i = 0; i < requiredArgs.size(); i++) {
BVarSymbol param = params.get(i);
if (Types.getReferredType(param.type).tag == TypeTags.STREAM) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), param.type));
}
}
}
}
private BLangLiteral createNilLiteral() {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = null;
literal.setBType(symTable.nilType);
return literal;
}
public void visit(BLangTypeInit typeInitExpr) {
if (Types.getReferredType(typeInitExpr.getBType()).tag == TypeTags.STREAM) {
result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));
} else {
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.getBType());
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
objVarDef.var.name.pos = symTable.builtinPos;
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
BLangInvocation typeInitInvocation = typeInitExpr.initInvocation;
typeInitInvocation.exprSymbol = objVarDef.var.symbol;
typeInitInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol;
typeInitInvocation.objectInitMethod = true;
if (Types.getReferredType(typeInitInvocation.getBType()).tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitInvocation;
typeInitInvocation.name.value = GENERATED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);
stmtExpr.setBType(objVarRef.symbol.type);
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitInvocation.getBType(),
typeInitInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.getBType(), null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(symTable.builtinPos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(symTable.builtinPos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultVarRef.symbol.type);
return stmtExpr;
}
private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CONSTRUCT_STREAM).symbol;
BType constraintType = ((BStreamType) typeInitExpr.getBType()).constraint;
BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol);
BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr();
constraintTdExpr.resolvedType = constraintType;
constraintTdExpr.setBType(constraintTdType);
BType completionType = ((BStreamType) typeInitExpr.getBType()).completionType;
BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol);
BLangTypedescExpr completionTdExpr = new BLangTypedescExpr();
completionTdExpr.resolvedType = completionType;
completionTdExpr.setBType(completionTdType);
List<BLangExpression> args = new ArrayList<>(Lists.of(constraintTdExpr, completionTdExpr));
if (!typeInitExpr.argsExpr.isEmpty()) {
args.add(typeInitExpr.argsExpr.get(0));
}
BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(
typeInitExpr.pos, symbol, args, symResolver);
streamConstructInvocation.setBType(new BStreamType(TypeTags.STREAM, constraintType, completionType, null));
return streamConstructInvocation;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr,
Location location) {
BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner, location, VIRTUAL);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(location, name, type, expr, (BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(location);
objVarDef.var = objVar;
objVarDef.setBType(objVar.getBType());
return objVarDef;
}
private BType getObjectType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context");
}
BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorType);
errorTypeNode.pos = symTable.builtinPos;
return errorTypeNode;
}
BLangErrorType getErrorOrNillTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorOrNilType);
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef =
createVarDef("$ternary_result$", ternaryExpr.getBType(), null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(ternaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.getBType());
expr.pos = waitExpr.pos;
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (Types.getReferredType(trapExpr.expr.getBType()).tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.getBType());
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (isNullableBinaryExpr(binaryExpr)) {
BLangStatementExpression stmtExpr = createStmtExprForNullableBinaryExpr(binaryExpr);
result = rewrite(stmtExpr, env);
return;
}
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
lhsExpr = createTypeCastExpr(lhsExpr, symTable.intType);
rhsExpr = createTypeCastExpr(rhsExpr, symTable.intType);
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
rhsExpr = getModifiedIntRangeEndExpr(rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, lhsExpr, rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (TypeTags.isIntegerTypeTag(lhsExprTypeTag) && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
}
boolean isBinaryShiftOperator = symResolver.isBinaryShiftOperator(binaryOpKind);
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryOpKind);
if (lhsExprTypeTag == rhsExprTypeTag) {
if (!isBinaryShiftOperator && !isArithmeticOperator) {
return;
}
if (types.isValueType(binaryExpr.lhsExpr.getBType())) {
return;
}
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(lhsExprTypeTag) &&
(rhsExprTypeTag == TypeTags.XML || rhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(rhsExprTypeTag) &&
(lhsExprTypeTag == TypeTags.XML || lhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
if (symResolver.isBinaryComparisonOperator(binaryOpKind)) {
createTypeCastExprForRelationalExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (isArithmeticOperator) {
createTypeCastExprForArithmeticExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (isBinaryShiftOperator) {
createTypeCastExprForBinaryShiftExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
}
private BLangStatementExpression createStmtExprForNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
/*
* int? x = 3;
* int? y = 5;
* int? z = x + y;
* Above is desugared to
* int? $result$;
*
* int? $lhsExprVar$ = x;
* int? $rhsExprVar$ = y;
* if (lhsVar is () or rhsVar is ()) {
* $result$ = ();
* } else {
* $result$ = $lhsExprVar$ + $rhsExprVar$;
* }
* int z = $result$;
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BUnionType exprBType = (BUnionType) binaryExpr.getBType();
BType nonNilType = exprBType.getMemberTypes().iterator().next();
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryExpr.opKind);
boolean isShiftOperator = symResolver.isBinaryShiftOperator(binaryExpr.opKind);
boolean isBitWiseOperator = !isArithmeticOperator && !isShiftOperator;
BType rhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.rhsExpr.getBType().isNullable()) {
rhsType = types.getSafeType(binaryExpr.rhsExpr.getBType(), true, false);
} else {
rhsType = binaryExpr.rhsExpr.getBType();
}
}
BType lhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.lhsExpr.getBType().isNullable()) {
lhsType = types.getSafeType(binaryExpr.lhsExpr.getBType(), true, false);
} else {
lhsType = binaryExpr.lhsExpr.getBType();
}
}
if (binaryExpr.lhsExpr.getBType().isNullable()) {
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
}
BLangSimpleVariableDef tempVarDef = createVarDef("result",
binaryExpr.getBType(), null, binaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangSimpleVariableDef lhsVarDef = createVarDef("$lhsExprVar$", binaryExpr.lhsExpr.getBType(),
binaryExpr.lhsExpr, binaryExpr.pos);
BLangSimpleVarRef lhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, lhsVarDef.var.symbol);
blockStmt.addStatement(lhsVarDef);
BLangSimpleVariableDef rhsVarDef = createVarDef("$rhsExprVar$", binaryExpr.rhsExpr.getBType(),
binaryExpr.rhsExpr, binaryExpr.pos);
BLangSimpleVarRef rhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, rhsVarDef.var.symbol);
blockStmt.addStatement(rhsVarDef);
BLangTypeTestExpr typeTestExprOne = createTypeCheckExpr(binaryExpr.pos, lhsVarRef, getNillTypeNode());
typeTestExprOne.setBType(symTable.booleanType);
BLangTypeTestExpr typeTestExprTwo = createTypeCheckExpr(binaryExpr.pos, rhsVarRef, getNillTypeNode());
typeTestExprTwo.setBType(symTable.booleanType);
BLangBinaryExpr ifBlockCondition = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, typeTestExprOne,
typeTestExprTwo, symTable.booleanType, OperatorKind.OR, binaryExpr.opSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangBinaryExpr newBinaryExpr = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, lhsVarRef, rhsVarRef,
nonNilType, binaryExpr.opKind, binaryExpr.opSymbol);
newBinaryExpr.lhsExpr = createTypeCastExpr(lhsVarRef, lhsType);
newBinaryExpr.rhsExpr = createTypeCastExpr(rhsVarRef, rhsType);
bLangAssignmentElse.expr = newBinaryExpr;
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(binaryExpr.pos, blockStmt);
ifStatement.expr = ifBlockCondition;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(binaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
if ((binaryExpr.lhsExpr.getBType() != null && binaryExpr.rhsExpr.getBType() != null) &&
(binaryExpr.rhsExpr.getBType().isNullable() ||
binaryExpr.lhsExpr.getBType().isNullable())) {
switch (binaryExpr.getOperatorKind()) {
case ADD:
case SUB:
case MUL:
case DIV:
case MOD:
case BITWISE_LEFT_SHIFT:
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
case BITWISE_AND:
case BITWISE_OR:
case BITWISE_XOR:
return true;
}
}
return false;
}
private void createTypeCastExprForArithmeticExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
if ((TypeTags.isIntegerTypeTag(lhsExprTypeTag) && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) ||
(TypeTags.isStringTypeTag(lhsExprTypeTag) && TypeTags.isStringTypeTag(rhsExprTypeTag)) ||
(TypeTags.isXMLTypeTag(lhsExprTypeTag) && TypeTags.isXMLTypeTag(rhsExprTypeTag))) {
return;
}
if (TypeTags.isXMLTypeTag(lhsExprTypeTag) && !TypeTags.isXMLTypeTag(rhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.rhsExpr.getBType())) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.xmlType);
return;
}
if (TypeTags.isXMLTypeTag(rhsExprTypeTag) && !TypeTags.isXMLTypeTag(lhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.lhsExpr.getBType())) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.getBType());
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.getBType());
}
private void createTypeCastExprForBinaryShiftExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if (isLhsIntegerType || lhsExprTypeTag == TypeTags.BYTE) {
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
}
private void createTypeCastExprForRelationalExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
BType lhsExprType = binaryExpr.lhsExpr.getBType();
BType rhsExprType = binaryExpr.rhsExpr.getBType();
if ((isLhsIntegerType && isRhsIntegerType) || (lhsExprTypeTag == TypeTags.BYTE &&
rhsExprTypeTag == TypeTags.BYTE)) {
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
addTypeCastForBinaryExprB(binaryExpr, lhsExprType, rhsExprType);
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
addTypeCastForBinaryExprA(binaryExpr, rhsExprType, lhsExprType);
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
addTypeCastForBinaryExprB(binaryExpr, lhsExprType, rhsExprType);
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
addTypeCastForBinaryExprA(binaryExpr, rhsExprType, lhsExprType);
return;
}
if (isLhsIntegerType && !isRhsIntegerType) {
addTypeCastForBinaryExprB(binaryExpr, symTable.intType, rhsExprType);
return;
}
if (!isLhsIntegerType && isRhsIntegerType) {
addTypeCastForBinaryExprA(binaryExpr, symTable.intType, lhsExprType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE || rhsExprTypeTag == TypeTags.BYTE) {
if ((lhsExprTypeTag == TypeTags.UNION && lhsExprType.isNullable()) ||
(rhsExprTypeTag == TypeTags.UNION && rhsExprType.isNullable())) {
binaryExpr.lhsExpr = addNilType(symTable.intType, binaryExpr.lhsExpr);
binaryExpr.rhsExpr = addNilType(symTable.intType, binaryExpr.rhsExpr);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
boolean isLhsStringType = TypeTags.isStringTypeTag(lhsExprTypeTag);
boolean isRhsStringType = TypeTags.isStringTypeTag(rhsExprTypeTag);
if (isLhsStringType && isRhsStringType) {
return;
}
if (isLhsStringType && !isRhsStringType) {
addTypeCastForBinaryExprB(binaryExpr, symTable.stringType, rhsExprType);
return;
}
if (!isLhsStringType && isRhsStringType) {
addTypeCastForBinaryExprA(binaryExpr, symTable.stringType, lhsExprType);
}
}
private void addTypeCastForBinaryExprA(BLangBinaryExpr binaryExpr, BType rhsExprType, BType lhsExprType) {
if (lhsExprType.tag == TypeTags.UNION && lhsExprType.isNullable()) {
binaryExpr.rhsExpr = addNilType(rhsExprType, binaryExpr.rhsExpr);
} else {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, rhsExprType);
}
}
private void addTypeCastForBinaryExprB(BLangBinaryExpr binaryExpr, BType lhsExprType, BType rhsExprType) {
if (rhsExprType.tag == TypeTags.UNION && rhsExprType.isNullable()) {
binaryExpr.lhsExpr = addNilType(lhsExprType, binaryExpr.lhsExpr);
} else {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, lhsExprType);
}
}
private BLangExpression addNilType(BType exprType, BLangExpression expr) {
LinkedHashSet<BType> members = new LinkedHashSet<>(2);
members.add(exprType);
members.add(symTable.nilType);
BUnionType unionType = new BUnionType(null, members, true, false);
return createTypeCastExpr(expr, unionType);
}
private BLangInvocation replaceWithIntRange(Location location, BLangExpression lhsExpr,
BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(location, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.setBType(symTable.intRangeType);
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.expectedType == null) {
return;
}
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.expectedType.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
Location pos = elvisExpr.pos;
String resultVarName = "_$result$_";
BType resultType = elvisExpr.getBType();
BLangSimpleVariable resultVar =
ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,
new BVarSymbol(0, names.fromString(resultVarName),
this.env.scope.owner.pkgID, resultType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);
resultVarDef.desugared = true;
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);
String lhsResultVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable lhsResultVar =
ASTBuilderUtil.createVariable(pos, lhsResultVarName, elvisExpr.lhsExpr.getBType(), elvisExpr.lhsExpr,
new BVarSymbol(0, names.fromString(lhsResultVarName),
this.env.scope.owner.pkgID, elvisExpr.lhsExpr.getBType(),
this.env.scope.owner, elvisExpr.pos, VIRTUAL));
BLangSimpleVariableDef lhsResultVarDef = ASTBuilderUtil.createVariableDef(pos, lhsResultVar);
BLangSimpleVarRef lhsResultVarRef = ASTBuilderUtil.createVariableRef(pos, lhsResultVar.symbol);
BLangAssignment nilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef, elvisExpr.rhsExpr);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);
ifBody.addStatement(nilAssignment);
BLangAssignment notNilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,
createTypeCastExpr(lhsResultVarRef, resultType));
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(pos);
elseBody.addStatement(notNilAssignment);
BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,
createTypeCheckExpr(pos, lhsResultVarRef, getNillTypeNode()), ifBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{
add(resultVarDef);
add(lhsResultVarDef);
add(ifStmt);
}});
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultType);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (isNullableUnaryExpr(unaryExpr)) {
BLangStatementExpression statementExpression = createStmtExprForNilableUnaryExpr(unaryExpr);
result = rewrite(statementExpression, env);
return;
}
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
if (types.isExpressionInUnaryValid(unaryExpr.expr) && unaryExpr.expectedType.tag == TypeTags.FINITE) {
result = rewriteExpr(Types.constructNumericLiteralFromUnaryExpr(unaryExpr));
return;
}
OperatorKind opKind = unaryExpr.operator;
if (opKind == OperatorKind.ADD || opKind == OperatorKind.SUB) {
createTypeCastExprForUnaryPlusAndMinus(unaryExpr);
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
private void createTypeCastExprForUnaryPlusAndMinus(BLangUnaryExpr unaryExpr) {
BLangExpression expr = unaryExpr.expr;
if (TypeTags.isIntegerTypeTag(expr.getBType().tag)) {
return;
}
unaryExpr.expr = createTypeCastExpr(expr, unaryExpr.getBType());
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final Location pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == Types.getReferredType(unaryExpr.getBType()).tag) {
binaryExpr.setBType(symTable.byteType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.setBType(symTable.intType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
private BLangStatementExpression createStmtExprForNilableUnaryExpr(BLangUnaryExpr unaryExpr) {
/*
* int? x = 3;
* int? y = +x;
*
*
* Above is desugared to
* int? $result$;
* if (x is ()) {
* $result$ = ();
* } else {
* $result$ = +x;
* }
* int y = $result$
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BUnionType exprBType = (BUnionType) unaryExpr.getBType();
BType nilLiftType = exprBType.getMemberTypes().iterator().next();
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
BLangSimpleVariableDef tempVarDef = createVarDef("$result",
unaryExpr.getBType(), createNilLiteral(), unaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(unaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangTypeTestExpr typeTestExpr = createTypeCheckExpr(unaryExpr.pos, unaryExpr.expr,
getNillTypeNode());
typeTestExpr.setBType(symTable.booleanType);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangExpression expr = createTypeCastExpr(unaryExpr.expr, nilLiftType);
bLangAssignmentElse.expr = ASTBuilderUtil.createUnaryExpr(unaryExpr.pos, expr,
nilLiftType, unaryExpr.operator, unaryExpr.opSymbol);
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(unaryExpr.pos, blockStmt);
ifStatement.expr = typeTestExpr;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(unaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableUnaryExpr(BLangUnaryExpr unaryExpr) {
if (unaryExpr.getBType() != null && unaryExpr.getBType().isNullable()) {
switch (unaryExpr.operator) {
case ADD:
case SUB:
case BITWISE_COMPLEMENT:
return true;
}
}
return false;
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
BType targetType = conversionExpr.targetType;
conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
if (!env.enclPkg.lambdaFunctions.contains(bLangLambdaFunction)) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.setBType(bLangArrowFunction.body.expr.getBType());
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.setBType(bLangArrowFunction.funcType);
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value),
new Name(funcNode.name.originalValue),
env.enclPkg.symbol.pkgID,
bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true,
bLangArrowFunction.pos, VIRTUAL);
funcSymbol.originalName = new Name(funcNode.name.originalValue);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.restParam = getRestSymbol(funcNode);
funcSymbol.retType = funcNode.returnTypeNode.getBType();
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.setBType(
new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.getBType(), null));
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
lambdaFunction.capturedClosureEnv = env;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
}
List<BLangXMLNS> prevInlineNamespaces = this.inlineXMLNamespaces;
if (isVisitingQuery && this.inlineXMLNamespaces != null) {
xmlElementLiteral.inlineNamespaces.addAll(this.inlineXMLNamespaces);
}
this.inlineXMLNamespaces = xmlElementLiteral.inlineNamespaces;
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
this.inlineXMLNamespaces = prevInlineNamespaces;
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) {
for (BLangExpression xmlItem : xmlSequenceLiteral.xmlItems) {
rewriteExpr(xmlItem);
}
result = xmlSequenceLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
/**
* The raw template literal gets desugared to a type init expression. For each literal, a new object class type
* def is generated from the object type. The type init expression creates an instance of this generated object
* type. For example, consider the following statements:
* string name = "Pubudu";
* 'object:RawTemplate rt = `Hello ${name}!`;
*
* The raw template literal above is desugared to:
* type RawTemplate$Impl$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
*
*
* 'object:RawTemplate rt = new RawTemplate$Impl$0([name]);
*
* @param rawTemplateLiteral The raw template literal to be desugared.
*/
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
Location pos = rawTemplateLiteral.pos;
BObjectType objType = (BObjectType) Types.getReferredType(rawTemplateLiteral.getBType());
BLangClassDefinition objClassDef =
desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos);
BObjectType classObjType = (BObjectType) objClassDef.getBType();
BVarSymbol insertionsSym = classObjType.fields.get("insertions").symbol;
BLangListConstructorExpr insertionsList = ASTBuilderUtil.createListConstructorExpr(pos, insertionsSym.type);
insertionsList.exprs.addAll(rawTemplateLiteral.insertions);
insertionsList.expectedType = insertionsSym.type;
BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType);
typeNewExpr.argsExpr.add(insertionsList);
typeNewExpr.initInvocation.argExprs.add(insertionsList);
typeNewExpr.initInvocation.requiredArgs.add(insertionsList);
result = rewriteExpr(typeNewExpr);
}
/**
* This method desugars a raw template literal object class for the provided raw template object type as follows.
* A literal defined as 'object:RawTemplate rt = `Hello ${name}!`;
* is desugared to,
* type $anonType$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
* @param strings The string portions of the literal
* @param objectType The abstract object type for which an object class needs to be generated
* @param pos The diagnostic position info for the type node
* @return Returns the generated concrete object class def
*/
private BLangClassDefinition desugarTemplateLiteralObjectTypedef(List<BLangLiteral> strings, BObjectType objectType,
Location pos) {
BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol;
Name objectClassName = names.fromString(
anonModelHelper.getNextRawTemplateTypeKey(env.enclPkg.packageID, tSymbol.name));
BObjectTypeSymbol classTSymbol = Symbols.createClassSymbol(tSymbol.flags, objectClassName,
env.enclPkg.packageID, null, env.enclPkg.symbol,
pos, VIRTUAL, false);
classTSymbol.flags |= Flags.CLASS;
BObjectType objectClassType = new BObjectType(classTSymbol, classTSymbol.flags);
objectClassType.fields = objectType.fields;
classTSymbol.type = objectClassType;
objectClassType.typeIdSet.add(objectType.typeIdSet);
BLangClassDefinition classDef = TypeDefBuilderHelper.createClassDef(pos, classTSymbol, env);
classDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value);
BType stringsType = objectClassType.fields.get("strings").symbol.type;
BLangListConstructorExpr stringsList = ASTBuilderUtil.createListConstructorExpr(pos, stringsType);
stringsList.exprs.addAll(strings);
stringsList.expectedType = stringsType;
classDef.fields.get(0).expr = stringsList;
BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(classDef, env);
classDef.initFunction = userDefinedInitFunction;
env.enclPkg.functions.add(userDefinedInitFunction);
env.enclPkg.topLevelNodes.add(userDefinedInitFunction);
BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(classDef, env);
tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,
tempGeneratedInitFunction.symbol.scope, env);
SemanticAnalyzer.AnalyzerData data = new SemanticAnalyzer.AnalyzerData(env);
this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, data);
classDef.generatedInitFunction = tempGeneratedInitFunction;
env.enclPkg.functions.add(classDef.generatedInitFunction);
env.enclPkg.topLevelNodes.add(classDef.generatedInitFunction);
return rewrite(classDef, env);
}
/**
* Creates a user-defined init() method for the provided object type node. If there are fields without default
* values specified in the type node, this will add parameters for those fields in the init() method and assign the
* param values to the respective fields in the method body.
*
* @param classDefn The object type node for which the init() method is generated
* @param env The symbol env for the object type node
* @return The generated init() method
*/
private BLangFunction createUserDefinedObjectInitFn(BLangClassDefinition classDefn, SymbolEnv env) {
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(classDefn.pos, classDefn.symbol, env,
names, Names.USER_DEFINED_INIT_SUFFIX,
symTable, classDefn.getBType());
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefn.getBType().tsymbol);
typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), classDefn.pos);
classDefn.initFunction = initFunction;
initFunction.returnTypeNode.setBType(symTable.nilType);
BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body;
BInvokableType initFnType = (BInvokableType) initFunction.getBType();
for (BLangSimpleVariable field : classDefn.fields) {
if (field.expr != null) {
continue;
}
BVarSymbol fieldSym = field.symbol;
BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type,
initFunction.symbol, classDefn.pos, VIRTUAL);
BLangSimpleVariable param = ASTBuilderUtil.createVariable(classDefn.pos, fieldSym.name.value,
fieldSym.type, null, paramSym);
param.flagSet.add(Flag.FINAL);
initFunction.symbol.scope.define(paramSym.name, paramSym);
initFunction.symbol.params.add(paramSym);
initFnType.paramTypes.add(param.getBType());
initFunction.requiredParams.add(param);
BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym);
BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.getBType(),
initFunction.receiver.symbol, field.name);
initFuncBody.addStatement(fieldInit);
}
return initFunction;
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.getBType());
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.getBType());
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangTransactionalExpr transactionalExpr) {
BInvokableSymbol isTransactionalSymbol =
(BInvokableSymbol) transactionDesugar.getInternalTransactionModuleInvokableSymbol(IS_TRANSACTIONAL);
result = ASTBuilderUtil
.createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(),
Collections.emptyList(), symResolver);
}
@Override
public void visit(BLangCommitExpr commitExpr) {
BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangFail failNode) {
if (this.onFailClause != null && !this.isVisitingQuery) {
if (this.onFailClause.bodyContainsFail) {
result = rewriteNestedOnFail(this.onFailClause, failNode);
} else {
result = createOnFailInvocation(onFailClause, failNode);
}
} else {
BLangReturn stmt = ASTBuilderUtil.createReturnStmt(failNode.pos, rewrite(failNode.expr, env));
stmt.desugared = true;
result = stmt;
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTableAccessExpr tableKeyAccessExpr) {
result = tableKeyAccessExpr;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);
ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters);
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,
xmlElementAccess.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
}
private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX));
String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null;
ArrayList<BLangExpression> args = new ArrayList<>();
for (BLangXMLElementFilter filter : filters) {
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace));
if (nsSymbol == symTable.notFoundSymbol) {
if (defaultNS != null && !filter.name.equals("*")) {
String expandedName = createExpandedQName(defaultNS, filter.name);
args.add(createStringLiteral(filter.elemNamePos, expandedName));
} else {
args.add(createStringLiteral(filter.elemNamePos, filter.name));
}
} else {
BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol;
String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name);
BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName);
args.add(stringLiteral);
}
}
return args;
}
private BLangInvocation createLanglibXMLInvocation(Location pos, String functionName,
BLangExpression invokeOnExpr,
ArrayList<BLangExpression> args,
ArrayList<BLangExpression> restArgs) {
invokeOnExpr = rewriteExpr(invokeOnExpr);
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = invokeOnExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(invokeOnExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = rewriteExprs(restArgs);
invocationNode.setBType(((BInvokableType) invocationNode.symbol.type).getReturnType());
invocationNode.langLibInvocation = true;
return invocationNode;
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);
xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);
ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
} else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,
xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());
result = rewriteExpr(invocationNode);
} else {
BLangExpression childIndexExpr;
if (xmlNavigation.childIndex == null) {
childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);
} else {
childIndexExpr = xmlNavigation.childIndex;
}
ArrayList<BLangExpression> args = new ArrayList<>();
args.add(rewriteExpr(childIndexExpr));
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);
result = rewriteExpr(invocationNode);
}
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);
result = typedescExpr;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
if (checkedExpr.isRedundantChecking) {
result = rewriteExpr(checkedExpr.expr);
return;
}
Location pos = checkedExpr.pos;
String resultVarName = "_$result$_";
BType resultType = checkedExpr.getBType();
BLangSimpleVariable resultVar =
ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,
new BVarSymbol(0, names.fromString(resultVarName),
this.env.scope.owner.pkgID, resultType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);
resultVarDef.desugared = true;
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);
String checkedExprVarName = GEN_VAR_PREFIX.value;
BType checkedExprType = checkedExpr.expr.getBType();
BLangSimpleVariable checkedExprVar =
ASTBuilderUtil.createVariable(pos, checkedExprVarName, checkedExprType,
checkedExpr.expr, new BVarSymbol(0, names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExprType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(pos, checkedExprVar);
BLangSimpleVarRef checkedExprVarRef = ASTBuilderUtil.createVariableRef(pos, checkedExprVar.symbol);
BLangAssignment successAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,
createTypeCastExpr(checkedExprVarRef, resultType));
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);
ifBody.addStatement(successAssignment);
BLangBlockStmt elseBody = getSafeErrorAssignment(pos, checkedExprVarRef, this.env.enclInvokable.symbol,
checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangValueType checkedExprTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
checkedExprTypeNode.setBType(resultType);
checkedExprTypeNode.typeKind = resultType.getKind();
BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,
createTypeCheckExpr(pos, checkedExprVarRef, checkedExprTypeNode), ifBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{
add(resultVarDef);
add(checkedExprVarDef);
add(ifStmt);
}});
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultType);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceClass.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangObjectConstructorExpression bLangObjectConstructorExpression) {
visit(bLangObjectConstructorExpression.classNode);
bLangObjectConstructorExpression.classNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(bLangObjectConstructorExpression.typeInit);
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.setBType(annotAccessExpr.getBType());
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.getBType(),
binaryExpr.rhsExpr.getBType()),
annotAccessExpr.getBType(), null), null,
symTable.builtinPos, VIRTUAL);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.getBType())) {
expr = addConversionExprIfRequired(expr, symTable.anyType);
}
if (typeTestExpr.isNegation) {
BLangTypeTestExpr bLangTypeTestExpr = ASTBuilderUtil.createTypeTestExpr(typeTestExpr.pos,
typeTestExpr.expr, typeTestExpr.typeNode);
BLangGroupExpr bLangGroupExpr = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
bLangGroupExpr.expression = bLangTypeTestExpr;
bLangGroupExpr.setBType(typeTestExpr.getBType());
BLangUnaryExpr unaryExpr = ASTBuilderUtil.createUnaryExpr(typeTestExpr.pos, bLangGroupExpr,
typeTestExpr.getBType(),
OperatorKind.NOT, null);
result = rewriteExpr(unaryExpr);
return;
}
typeTestExpr.expr = rewriteExpr(expr);
typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);
result = typeTestExpr;
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangQueryExpr queryExpr) {
boolean prevIsVisitingQuery = this.isVisitingQuery;
this.isVisitingQuery = true;
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryExpr, env, getVisibleXMLNSStmts(env));
result = rewrite(stmtExpr, env);
this.isVisitingQuery = prevIsVisitingQuery;
}
List<BLangStatement> getVisibleXMLNSStmts(SymbolEnv env) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
return nameBXMLNSSymbolMap.keySet().stream()
.map(key -> this.stmtsToBePropagatedToQuery.get(key))
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
@Override
public void visit(BLangQueryAction queryAction) {
boolean prevIsVisitingQuery = this.isVisitingQuery;
this.isVisitingQuery = true;
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env, getVisibleXMLNSStmts(env));
result = rewrite(stmtExpr, env);
this.isVisitingQuery = prevIsVisitingQuery;
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
BType refType = Types.getReferredType(constSymbol.literalType);
if (refType.tag <= TypeTags.BOOLEAN || refType.tag == TypeTags.NIL) {
if (refType.tag != TypeTags.NIL && (constSymbol.value == null ||
constSymbol.value.value == null)) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangDynamicArgExpr dynamicParamExpr) {
dynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument);
dynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition);
result = dynamicParamExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.getBType(), constantRef.value);
}
BLangSimpleVariableDef getIteratorVariableDefinition(Location pos, BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = pos;
iteratorInvocation.expr = dataReference;
iteratorInvocation.symbol = iteratorInvokableSymbol;
iteratorInvocation.setBType(iteratorInvokableSymbol.retType);
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
iteratorInvokableSymbol.retType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$",
iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);
}
BLangSimpleVariableDef getIteratorNextVariableDefinition(Location pos, BType nillableResultType,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$",
nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(pos, resultVariable);
}
BLangInvocation createIteratorNextInvocation(Location pos, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next");
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol =
getNextFunc((BObjectType) Types.getReferredType(iteratorSymbol.type)).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.setBType(nextFuncSymbol.retType);
return nextInvocation;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (bAttachedFunction.funcName.value.equals("next")) {
return bAttachedFunction;
}
}
return null;
}
BLangFieldBasedAccess getValueAccessExpression(Location location, BType varType,
BVarSymbol resultSymbol) {
return getFieldAccessExpression(location, "value", varType, resultSymbol);
}
BLangFieldBasedAccess getFieldAccessExpression(Location pos, String fieldName, BType varType,
BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName);
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = pos;
fieldBasedAccessExpression.setBType(varType);
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.getBType();
return fieldBasedAccessExpression;
}
private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.body.expr.pos;
returnNode.setExpression(bLangArrowFunction.body.expr);
blockNode.addStatement(returnNode);
return blockNode;
}
protected BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.setBType(retType);
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
BLangExpression onExpr,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = onExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.getBType(), names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(onExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symResolver.lookupMethodInModule(symTable.langInternalModuleSymbol,
names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.setBType(new BArrayType(symTable.anyType));
return expr;
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangValueExpression expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.setBType(iExpr.symbol.type);
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), null, expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(),
expr.getBType(),
expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(Location pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.intType);
return literal;
}
private BLangLiteral createByteLiteral(Location pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {
if (types.isSameType(expr.getBType(), targetType)) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.setBType(targetType);
conversionExpr.targetType = targetType;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getElementType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag != TypeTags.ARRAY) {
return bType;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol) ||
(invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {
return;
}
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;
if (invokableNode.symbol.type.getReturnType().isNullable() && (funcBody.stmts.size() < 1
|| funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
Location invPos = invokableNode.pos;
Location returnStmtPos;
if (invPos != null && !invokableNode.name.value.contains(GENERATED_INIT_SUFFIX.value)) {
returnStmtPos = new BLangDiagnosticLocation(invPos.lineRange().filePath(),
invPos.lineRange().endLine().line(),
invPos.lineRange().endLine().line(),
invPos.lineRange().startLine().offset(),
invPos.lineRange().startLine().offset(), 0, 0);
} else {
returnStmtPos = null;
}
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
funcBody.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || Types.getReferredType(symbol.type).tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
List<BLangExpression> restArgs = iExpr.restArgs;
int originalRequiredArgCount = iExpr.requiredArgs.size();
BLangSimpleVarRef varargRef = null;
BLangBlockStmt blockStmt = null;
BType varargVarType = null;
int restArgCount = restArgs.size();
if (restArgCount > 0 &&
restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR &&
originalRequiredArgCount < invokableSymbol.params.size()) {
BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr;
Location varargExpPos = expr.pos;
varargVarType = expr.getBType();
String varargVarName = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID,
varargVarType, this.env.scope.owner, varargExpPos, VIRTUAL);
varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);
BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);
varDef.var = var;
varDef.setBType(varargVarType);
blockStmt = createBlockStmt(varargExpPos);
blockStmt.stmts.add(varDef);
}
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol, varargRef);
}
if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) {
if (invokableSymbol.restParam == null) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
List<BLangExpression> exprs = new ArrayList<>();
BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;
BType elemType = arrayType.eType;
for (BLangExpression restArg : restArgs) {
exprs.add(addConversionExprIfRequired(restArg, elemType));
}
arrayLiteral.exprs = exprs;
arrayLiteral.setBType(arrayType);
if (restArgCount != 0) {
iExpr.restArgs = new ArrayList<>();
}
iExpr.restArgs.add(arrayLiteral);
return;
}
if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
if (iExpr.requiredArgs.size() == originalRequiredArgCount) {
return;
}
BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);
stmtExpression.setBType(firstNonRestArg.getBType());
iExpr.requiredArgs.add(0, stmtExpression);
if (invokableSymbol.restParam == null) {
restArgs.remove(0);
return;
}
BLangRestArgsExpression restArgsExpression = (BLangRestArgsExpression) restArgs.remove(0);
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
if (Types.getReferredType(restArgsExpression.getBType()).tag == TypeTags.RECORD) {
BLangExpression expr = ASTBuilderUtil.createEmptyArrayLiteral(invokableSymbol.pos, restParamType);
restArgs.add(expr);
return;
}
Location pos = restArgsExpression.pos;
BLangArrayLiteral newArrayLiteral = createArrayLiteralExprNode();
newArrayLiteral.setBType(restParamType);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, newArrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);
BLangInvocation lengthInvocation = createLengthInvocation(pos, varargRef);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndex,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i",
foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.getBType(),
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess valueExpr = ASTBuilderUtil.createIndexAccessExpr(varargRef, foreachVarRef);
BType refType = Types.getReferredType(varargVarType);
if (refType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) refType;
if (arrayType.state == BArrayState.CLOSED &&
arrayType.size == (iExpr.requiredArgs.size() - originalRequiredArgCount)) {
valueExpr.setBType(restParamType.eType);
} else {
valueExpr.setBType(arrayType.eType);
}
} else {
valueExpr.setBType(symTable.anyOrErrorType);
}
BLangExpression pushExpr = addConversionExprIfRequired(valueExpr, restParamType.eType);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, foreachBody);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
List.of(pushExpr),
restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
foreach.body = foreachBody;
BLangBlockStmt newArrayBlockStmt = createBlockStmt(pos);
newArrayBlockStmt.addStatement(varDef);
newArrayBlockStmt.addStatement(foreach);
BLangStatementExpression newArrayStmtExpression = createStatementExpression(newArrayBlockStmt, arrayVarRef);
newArrayStmtExpression.setBType(restParamType);
restArgs.add(addConversionExprIfRequired(newArrayStmtExpression, restParamType));
return;
}
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.setBType(restParamType);
BType elemType = restParamType.eType;
Location pos = restArgs.get(0).pos;
List<BLangExpression> exprs = new ArrayList<>();
for (int i = 0; i < restArgCount - 1; i++) {
exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType));
}
arrayLiteral.exprs = exprs;
BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
pushRestArgsExpr.pos = pos;
pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType,
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangBlockStmt pushBlockStmt = createBlockStmt(pos);
pushBlockStmt.stmts.add(varDef);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
new ArrayList<BLangExpression>() {{
add(pushRestArgsExpr);
}}, restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);
stmtExpression.setBType(restParamType);
iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }};
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new LinkedHashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
List<BLangRecordLiteral> incRecordLiterals = new ArrayList<>();
BLangRecordLiteral incRecordParamAllowAdditionalFields = null;
int varargIndex = 0;
BType varargType = null;
boolean tupleTypedVararg = false;
if (varargRef != null) {
varargType = Types.getReferredType(varargRef.getBType());
tupleTypedVararg = varargType.tag == TypeTags.TUPLE;
}
for (int i = 0; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.remove(param.name.value));
} else if (param.getFlags().contains(Flag.INCLUDED)) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
BType paramType = param.type;
recordLiteral.setBType(paramType);
args.add(recordLiteral);
incRecordLiterals.add(recordLiteral);
if (((BRecordType) Types.getReferredType(paramType)).restFieldType != symTable.noType) {
incRecordParamAllowAdditionalFields = recordLiteral;
}
} else if (varargRef == null) {
BLangExpression expr = new BLangIgnoreExpr();
expr.setBType(param.type);
args.add(expr);
} else {
if (Types.getReferredType(varargRef.getBType()).tag == TypeTags.RECORD) {
if (param.isDefaultable) {
BLangInvocation hasKeyInvocation = createLangLibInvocationNode(HAS_KEY, varargRef,
List.of(createStringLiteral(param.pos, param.name.value)), null, varargRef.pos);
BLangExpression indexExpr = rewriteExpr(createStringLiteral(param.pos, param.name.value));
BLangIndexBasedAccess memberAccessExpr =
ASTBuilderUtil.createMemberAccessExprNode(param.type, varargRef, indexExpr);
BLangExpression ignoreExpr = ASTBuilderUtil.createIgnoreExprNode(param.type);
BLangTernaryExpr ternaryExpr = ASTBuilderUtil.createTernaryExprNode(param.type,
hasKeyInvocation, memberAccessExpr, ignoreExpr);
args.add(ASTBuilderUtil.createDynamicParamExpression(hasKeyInvocation, ternaryExpr));
} else {
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(varargRef,
ASTBuilderUtil.createIdentifier(param.pos, param.name.value));
fieldBasedAccessExpression.setBType(param.type);
args.add(fieldBasedAccessExpression);
}
} else {
BLangExpression indexExpr = rewriteExpr(createIntLiteral(varargIndex));
BType memberAccessExprType = tupleTypedVararg ?
((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType;
args.add(addConversionExprIfRequired(ASTBuilderUtil.createMemberAccessExprNode(memberAccessExprType,
varargRef, indexExpr), param.type));
varargIndex++;
}
}
}
if (namedArgs.size() > 0) {
setFieldsForIncRecordLiterals(namedArgs, incRecordLiterals, incRecordParamAllowAdditionalFields);
}
iExpr.requiredArgs = args;
}
private void setFieldsForIncRecordLiterals(Map<String, BLangExpression> namedArgs,
List<BLangRecordLiteral> incRecordLiterals,
BLangRecordLiteral incRecordParamAllowAdditionalFields) {
for (String name : namedArgs.keySet()) {
boolean isAdditionalField = true;
BLangNamedArgsExpression expr = (BLangNamedArgsExpression) namedArgs.get(name);
for (BLangRecordLiteral recordLiteral : incRecordLiterals) {
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(recordLiteral.getBType())).fields;
if (fields.containsKey(name) &&
Types.getReferredType(fields.get(name).type).tag != TypeTags.NEVER) {
isAdditionalField = false;
createAndAddRecordFieldForIncRecordLiteral(recordLiteral, expr);
break;
}
}
if (isAdditionalField) {
createAndAddRecordFieldForIncRecordLiteral(incRecordParamAllowAdditionalFields, expr);
}
}
}
private void createAndAddRecordFieldForIncRecordLiteral(BLangRecordLiteral recordLiteral,
BLangNamedArgsExpression expr) {
BLangSimpleVarRef varRef = new BLangSimpleVarRef();
varRef.variableName = expr.name;
BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField = ASTBuilderUtil.
createBLangRecordKeyValue(varRef, expr.expr);
recordLiteral.fields.add(recordKeyValueField);
}
private BLangBlockStmt getSafeErrorAssignment(Location location, BLangSimpleVarRef ref,
BSymbol invokableSymbol,
List<BType> equivalentErrorTypes,
boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = Types.getReferredType(((BInvokableType) invokableSymbol.type).retType);
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable errorVar =
ASTBuilderUtil.createVariable(location, patternFailureCaseVarName, symTable.errorType,
createTypeCastExpr(ref, symTable.errorType),
new BVarSymbol(0, names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType,
this.env.scope.owner, location, VIRTUAL));
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(location);
BLangSimpleVariableDef errorVarDef = ASTBuilderUtil.createVariableDef(location, errorVar);
blockStmt.addStatement(errorVarDef);
BLangVariableReference errorVarRef = ASTBuilderUtil.createVariableRef(location, errorVar.symbol);
if (!isCheckPanicExpr && (returnOnError || this.onFailClause != null)) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = location;
failStmt.expr = errorVarRef;
blockStmt.addStatement(failStmt);
if (returnOnError && this.shouldReturnErrors) {
BLangReturn errorReturn = ASTBuilderUtil.createReturnStmt(location, rewrite(errorVarRef, env));
errorReturn.desugared = true;
failStmt.exprStmt = errorReturn;
}
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = location;
panicNode.expr = errorVarRef;
blockStmt.addStatement(panicNode);
}
return blockStmt;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.getBType();
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
BLangExpression impConversionExpr = expr.impConversionExpr;
expr.impConversionExpr = null;
return impConversionExpr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.setBType(lhsType);
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + UNDERSCORE + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner, recordVariable.pos,
VIRTUAL);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType,
recordSymbol, bindingPatternVariable.pos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BRecordType) recordVariable.restParam.getBType()).restFieldType :
symTable.anydataType;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,
recordVarType,
bindingPatternVariable.pos);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordVarType, recordSymbol, recordTypeNode, env);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + UNDERSCORE + errorCount++),
env.enclPkg.symbol.pkgID,
null, null, errorVariable.pos, VIRTUAL);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++,
errorVariable.pos);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
recordTypeNode.initFunction = TypeDefBuilderHelper
.createInitFunctionForRecordType(recordTypeNode, env, names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(detailType, detailType.tsymbol,
recordTypeNode, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorTypeSymbol.type = errorType;
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(errorType, errorTypeSymbol,
createErrorTypeNode(errorType), env);
return errorType;
}
return bindingPatternVariable.getBType();
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(Flags.PUBLIC, names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID, symTable.pureType, null,
field.valueBindingPattern.pos, VIRTUAL);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.getBType(),
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo, Location pos) {
BRecordType detailRecordType = createAnonRecordType(pos);
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(Flags.PUBLIC, fieldName, detailRecordType.tsymbol.pkgID, fieldType,
detailRecordType.tsymbol, detailEntry.key.pos, VIRTUAL);
detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordType.tsymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BRecordType createAnonRecordType(Location pos) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString(anonModelHelper.getNextRecordVarKey(env.enclPkg.packageID)),
env.enclPkg.symbol.pkgID, null, null, pos, VIRTUAL);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner,
false, symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, symTable.builtinPos);
}
BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(errorType);
return errorTypeNode;
}
private BLangExpression createBinaryExpression(Location pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.setBType(symTable.anyType);
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.getBType(),
expression.getBType());
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.getBType(),
binaryExpr, env);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsLikeExpr createIsLikeExpression(Location pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.setBType(variable.getBType());
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,
BVarSymbol selfSymbol) {
return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.getBType(), selfSymbol,
variable.name);
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr,
BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol,
BLangIdentifier fieldName) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName);
fieldAccess.symbol = fieldSymbol;
fieldAccess.setBType(fieldType);
fieldAccess.isStoreOnCreation = true;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = expr;
assignmentStmt.pos = function.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.isLValue || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.getBType();
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar =
ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.getBType(), null,
new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.getBType(),
this.env.scope.owner, accessExpr.pos, VIRTUAL));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.getBType(), tempResultVar);
BLangMatchStatement matchStmt = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matchStmt));
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.setBType(originalExprType);
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successClause = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {
BType originalType = Types.getReferredType(accessExpr.originalType);
if (TypeTags.isXMLTypeTag(originalType.tag) || isMapJson(originalType)) {
accessExpr.setBType(BUnionType.create(null, originalType, symTable.errorType));
} else {
accessExpr.setBType(originalType);
}
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.getBType());
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangExpression matchExpr = accessExpr.expr;
BType matchExprType = accessExpr.expr.getBType();
Location pos = accessExpr.pos;
BLangMatchStatement matchStmt = ASTBuilderUtil.createMatchStatement(matchExpr, pos);
boolean isAllTypesRecords = false;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>();
BType referredType = Types.getReferredType(matchExpr.getBType());
if (referredType.tag == TypeTags.UNION) {
memTypes = new LinkedHashSet<>(((BUnionType) referredType).getMemberTypes());
isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes);
}
if (accessExpr.nilSafeNavigation) {
matchStmt.addMatchClause(getMatchNullClause(matchExpr, tempResultVar));
matchStmt.setBType(type);
memTypes.remove(symTable.nilType);
}
if (accessExpr.errorSafeNavigation) {
matchStmt.addMatchClause(getMatchErrorClause(matchExpr, tempResultVar));
matchStmt.setBType(type);
matchStmt.pos = pos;
memTypes.remove(symTable.errorType);
}
BLangMatchClause successClause = null;
Name field = getFieldName(accessExpr);
if (field == Names.EMPTY) {
successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
pushToMatchStatementStack(matchStmt, successClause, pos);
return;
}
if (isAllTypesRecords) {
for (BType memberType : memTypes) {
BRecordType recordType = (BRecordType) Types.getReferredType(memberType);
if (recordType.fields.containsKey(field.value) || !recordType.sealed) {
successClause = getSuccessPatternClause(memberType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
}
}
matchStmt.addMatchClause(getMatchAllAndNilReturnClause(matchExpr, tempResultVar));
pushToMatchStatementStack(matchStmt, successClause, pos);
return;
}
successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
pushToMatchStatementStack(matchStmt, successClause, pos);
}
private boolean isMapJson(BType originalType) {
return originalType.tag == TypeTags.MAP && ((BMapType) originalType).getConstraint().tag == TypeTags.JSON;
}
private void pushToMatchStatementStack(BLangMatchStatement matchStmt, BLangMatchClause successClause,
Location pos) {
this.matchStmtStack.push(matchStmt);
if (this.successClause != null) {
this.successClause.blockStmt = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(matchStmt));
}
this.successClause = successClause;
}
private Name getFieldName(BLangAccessExpression accessExpr) {
Name field = Names.EMPTY;
if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
field = new Name(((BLangFieldBasedAccess) accessExpr).field.value);
} else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr;
if (indexBasedExpression.getKind() == NodeKind.LITERAL) {
field = new Name(((BLangLiteral) indexBasedExpression).value.toString());
}
}
return field;
}
private boolean isAllTypesAreRecordsInUnion(LinkedHashSet<BType> memTypes) {
for (BType memType : memTypes) {
int typeTag = Types.getReferredType(memType).tag;
if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) {
return false;
}
}
return true;
}
private BLangMatchClause getMatchErrorClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
Location pos = matchExpr.pos;
BVarSymbol errorPatternVarSymbol = new BVarSymbol(0, Names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(errorPatternVarSymbol, errorPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, errorPatternVarSymbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getErrorTypeNode());
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
private BLangMatchClause getMatchNullClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
Location pos = matchExpr.pos;
BVarSymbol nullPatternVarSymbol = new BVarSymbol(0, Names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(nullPatternVarSymbol, nullPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, nullPatternVarSymbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getNillTypeNode());
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
private BLangMatchClause getMatchAllAndNilReturnClause(BLangExpression matchExpr,
BLangSimpleVariable tempResultVar) {
Location pos = matchExpr.pos;
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, createLiteral(pos, symTable.nilType,
Names.NIL_VALUE));
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangWildCardMatchPattern wildCardMatchPattern = ASTBuilderUtil.createWildCardMatchPattern(matchExpr);
wildCardMatchPattern.setBType(symTable.anyType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, null, wildCardMatchPattern);
}
private BLangMatchClause getSuccessPatternClause(BType type, BLangExpression matchExpr,
BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar, boolean liftError) {
type = types.getSafeType(type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
Location pos = accessExpr.pos;
BVarSymbol successPatternSymbol;
if (Types.getReferredType(type).tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, Names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
} else {
successPatternSymbol = new BVarSymbol(0, Names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
BLangSimpleVarRef successPatternVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos,
successPatternVar.symbol);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(successPatternSymbol, successPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangAccessExpression tempAccessExpr = nodeCloner.cloneNode(accessExpr);
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr;
}
if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) {
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol =
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol;
}
tempAccessExpr.expr = addConversionExprIfRequired(successPatternVarRef, type);
tempAccessExpr.errorSafeNavigation = false;
tempAccessExpr.nilSafeNavigation = false;
accessExpr.cloneRef = null;
if (TypeTags.isXMLTypeTag(Types.getReferredType(tempAccessExpr.expr.getBType()).tag)) {
tempAccessExpr.setBType(BUnionType.create(null, accessExpr.originalType, symTable.errorType,
symTable.nilType));
} else {
tempAccessExpr.setBType(accessExpr.originalType);
}
tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess;
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.getBType());
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, this.env.scope,
Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, successPatternVarRef, createTypeNode(type));
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.setBType(symTable.nilType);
return nillTypeNode;
}
BLangValueType createTypeNode(BType type) {
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.typeKind = type.getKind();
typeNode.setBType(type);
return typeNode;
}
private BLangValueExpression cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangExpression varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression(originalAccessExpr.expr);
}
varRef.setBType(types.getSafeType(originalAccessExpr.expr.getBType(), true, false));
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.isLValue = originalAccessExpr.isLValue;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.setBType(originalAccessExpr.originalType);
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.booleanType);
literal.pos = symTable.builtinPos;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
case TypeTags.TYPEREFDESC:
return isDefaultableMappingType(Types.getReferredType(type));
default:
return false;
}
}
private BLangFunction createInitFunctionForClassDefn(BLangClassDefinition classDefinition, SymbolEnv env) {
BType returnType = symTable.nilType;
if (classDefinition.initFunction != null) {
returnType = classDefinition.initFunction.getBType().getReturnType();
}
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(null, classDefinition.symbol,
env, names, GENERATED_INIT_SUFFIX,
classDefinition.getBType(), returnType);
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefinition.getBType().tsymbol);
typeSymbol.generatedInitializerFunc = new BAttachedFunction(GENERATED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), null);
classDefinition.generatedInitFunction = initFunction;
initFunction.returnTypeNode.setBType(returnType);
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.getBType(), null,
symTable.builtinPos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(binaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
protected boolean isMappingOrObjectConstructorOrObjInit(BLangExpression expression) {
switch (expression.getKind()) {
case TYPE_INIT_EXPR:
case RECORD_LITERAL_EXPR:
case OBJECT_CTOR_EXPRESSION:
return true;
case CHECK_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangCheckedExpr) expression).expr);
case TYPE_CONVERSION_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangTypeConversionExpr) expression).expr);
default:
return false;
}
}
private BType getRestType(BInvokableSymbol invokableSymbol) {
if (invokableSymbol != null && invokableSymbol.restParam != null) {
return invokableSymbol.restParam.type;
}
return null;
}
private BType getRestType(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.getBType();
}
return null;
}
private BVarSymbol getRestSymbol(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.symbol;
}
return null;
}
private boolean isComputedKey(RecordLiteralNode.RecordField field) {
if (!field.isKeyValueField()) {
return false;
}
return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;
}
private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {
List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields;
BType type = mappingConstructorExpr.getBType();
Location pos = mappingConstructorExpr.pos;
List<RecordLiteralNode.RecordField> rewrittenFields = new ArrayList<>(fields.size());
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
BLangRecordLiteral.BLangRecordKey key = keyValueField.key;
BLangExpression origKey = key.expr;
BLangExpression keyExpr;
if (key.computedKey) {
keyExpr = origKey;
} else {
keyExpr = origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos,
StringEscapeUtils.unescapeJava(((BLangSimpleVarRef) origKey).variableName.value)) :
((BLangLiteral) origKey);
}
BLangRecordLiteral.BLangRecordKeyValueField rewrittenField =
ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr),
rewriteExpr(keyValueField.valueExpr));
rewrittenField.pos = keyValueField.pos;
rewrittenField.key.pos = key.pos;
rewrittenFields.add(rewrittenField);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;
rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue(
rewriteExpr(createStringLiteral(pos,
StringEscapeUtils.unescapeJava(varRefField.variableName.value))),
rewriteExpr(varRefField)));
} else {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
spreadOpField.expr = rewriteExpr(spreadOpField.expr);
rewrittenFields.add(spreadOpField);
}
}
fields.clear();
BType refType = Types.getReferredType(type);
return refType.tag == TypeTags.RECORD ?
new BLangStructLiteral(pos, type, refType.tsymbol, rewrittenFields) :
new BLangMapLiteral(pos, type, rewrittenFields);
}
protected void addTransactionInternalModuleImport() {
if (!env.enclPkg.packageID.equals(PackageID.TRANSACTION_INTERNAL)) {
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
pkgNameComps.add(ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.TRANSACTION.value));
importDcl.pkgNameComps = pkgNameComps;
importDcl.pos = env.enclPkg.symbol.pos;
importDcl.orgName = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.BALLERINA_INTERNAL_ORG.value);
importDcl.alias = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "trx");
importDcl.version = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "");
importDcl.symbol = symTable.internalTransactionModuleSymbol;
env.enclPkg.imports.add(importDcl);
env.enclPkg.symbol.imports.add(importDcl.symbol);
}
}
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} |
```suggestion errStream.println("Warning: Cache generation skipped for platform dependencies with 'provided' scope"); ``` | static boolean pullDependencyPackages(String orgName, String packageName, String version) {
Path ballerinaUserHomeDirPath = ProjectUtils.createAndGetHomeReposPath();
Path centralRepositoryDirPath = ballerinaUserHomeDirPath.resolve(ProjectConstants.REPOSITORIES_DIR)
.resolve(ProjectConstants.CENTRAL_REPOSITORY_CACHE_NAME);
Path balaDirPath = centralRepositoryDirPath.resolve(ProjectConstants.BALA_DIR_NAME);
Path balaPath = ProjectUtils.getPackagePath(balaDirPath, orgName, packageName, version);
String ballerinaShortVersion = RepoUtils.getBallerinaShortVersion();
Path cacheDir = centralRepositoryDirPath.resolve(
ProjectConstants.CACHES_DIR_NAME + "-" + ballerinaShortVersion);
ProjectEnvironmentBuilder defaultBuilder = ProjectEnvironmentBuilder.getDefaultBuilder();
defaultBuilder.addCompilationCacheFactory(new FileSystemCache.FileSystemCacheFactory(cacheDir));
BalaProject balaProject = BalaProject.loadProject(defaultBuilder, balaPath);
Path packageCacheDir = cacheDir.resolve(orgName).resolve(packageName).resolve(version);
if (packageCacheDir.toFile().exists()) {
deleteDirectory(packageCacheDir);
}
PackageCompilation packageCompilation = balaProject.currentPackage().getCompilation();
Collection<Diagnostic> resolutionDiagnostics = packageCompilation.getResolution()
.diagnosticResult().diagnostics();
if (!resolutionDiagnostics.isEmpty()) {
printDiagnostics(resolutionDiagnostics);
}
if (packageCompilation.getResolution().diagnosticResult().hasErrors()) {
return true;
}
if (!hasProvidedPlatformDeps(packageCompilation)) {
JBallerinaBackend jBallerinaBackend = JBallerinaBackend.from(packageCompilation, JvmTarget.JAVA_17);
Collection<Diagnostic> backendDiagnostics = jBallerinaBackend.diagnosticResult().diagnostics(false);
if (!backendDiagnostics.isEmpty()) {
printDiagnostics(backendDiagnostics);
}
return jBallerinaBackend.diagnosticResult().hasErrors();
}
errStream.println("Warning: Cache generation skipped due to platform dependencies with 'provided' scope");
return false;
} | errStream.println("Warning: Cache generation skipped due to platform dependencies with 'provided' scope"); | static boolean pullDependencyPackages(String orgName, String packageName, String version) {
Path ballerinaUserHomeDirPath = ProjectUtils.createAndGetHomeReposPath();
Path centralRepositoryDirPath = ballerinaUserHomeDirPath.resolve(ProjectConstants.REPOSITORIES_DIR)
.resolve(ProjectConstants.CENTRAL_REPOSITORY_CACHE_NAME);
Path balaDirPath = centralRepositoryDirPath.resolve(ProjectConstants.BALA_DIR_NAME);
Path balaPath = ProjectUtils.getPackagePath(balaDirPath, orgName, packageName, version);
String ballerinaShortVersion = RepoUtils.getBallerinaShortVersion();
Path cacheDir = centralRepositoryDirPath.resolve(
ProjectConstants.CACHES_DIR_NAME + "-" + ballerinaShortVersion);
ProjectEnvironmentBuilder defaultBuilder = ProjectEnvironmentBuilder.getDefaultBuilder();
defaultBuilder.addCompilationCacheFactory(new FileSystemCache.FileSystemCacheFactory(cacheDir));
BalaProject balaProject = BalaProject.loadProject(defaultBuilder, balaPath);
Path packageCacheDir = cacheDir.resolve(orgName).resolve(packageName).resolve(version);
if (packageCacheDir.toFile().exists()) {
deleteDirectory(packageCacheDir);
}
PackageCompilation packageCompilation = balaProject.currentPackage().getCompilation();
Collection<Diagnostic> resolutionDiagnostics = packageCompilation.getResolution()
.diagnosticResult().diagnostics();
if (!resolutionDiagnostics.isEmpty()) {
printDiagnostics(resolutionDiagnostics);
}
if (packageCompilation.getResolution().diagnosticResult().hasErrors()) {
return true;
}
if (!hasProvidedPlatformDeps(packageCompilation)) {
JBallerinaBackend jBallerinaBackend = JBallerinaBackend.from(packageCompilation, JvmTarget.JAVA_17);
Collection<Diagnostic> backendDiagnostics = jBallerinaBackend.diagnosticResult().diagnostics(false);
if (!backendDiagnostics.isEmpty()) {
printDiagnostics(backendDiagnostics);
}
return jBallerinaBackend.diagnosticResult().hasErrors();
}
errStream.println("Warning: Cache generation skipped due to platform dependencies with 'provided' scope");
return false;
} | class CommandUtil {
public static final String ORG_NAME = "ORG_NAME";
public static final String PKG_NAME = "PKG_NAME";
public static final String DIST_VERSION = "DIST_VERSION";
public static final String TOOL_ID = "TOOL_ID";
public static final String USER_HOME = "user.home";
public static final String GITIGNORE = "gitignore";
public static final String DEVCONTAINER = "devcontainer";
public static final String NEW_CMD_DEFAULTS = "new_cmd_defaults";
public static final String CREATE_CMD_TEMPLATES = "create_cmd_templates";
public static final String LIBS_DIR = "libs";
public static final String DEFAULT_TEMPLATE = "default";
public static final String MAIN_TEMPLATE = "main";
public static final String FILE_STRING_SEPARATOR = ", ";
private static FileSystem jarFs;
private static Map<String, String> env;
private static PrintStream errStream;
private static PrintStream outStream;
private static Path homeCache;
private static boolean exitWhenFinish;
private static String platform;
static void setPrintStream(PrintStream errStream) {
CommandUtil.errStream = errStream;
}
public static void initJarFs() {
URI uri = null;
try {
uri = CommandUtil.class.getClassLoader().getResource(CREATE_CMD_TEMPLATES).toURI();
if (uri.toString().contains("!")) {
final String[] array = uri.toString().split("!");
if (null == jarFs) {
env = new HashMap<>();
jarFs = FileSystems.newFileSystem(URI.create(array[0]), env);
}
}
} catch (URISyntaxException | IOException e) {
throw new AssertionError();
}
}
/**
* Print command errors with a standard format.
*
* @param stream error will be sent to this stream
* @param error error message
* @param usage usage if any
* @param help if the help message should be printed
*/
public static void printError(PrintStream stream, String error, String usage, boolean help) {
stream.println("ballerina: " + error);
if (null != usage) {
stream.println();
stream.println("USAGE:");
stream.println(" " + usage);
}
if (help) {
stream.println();
stream.println("For more information try --help");
}
}
/**
* Exit with error code 1.
*
* @param exit Whether to exit or not.
*/
public static void exitError(boolean exit) {
if (exit) {
Runtime.getRuntime().exit(1);
}
}
static void applyTemplate(String orgName, String templatePkgName, String version, String packageName,
Path projectPath, Path balaCache, List<Path> filesInDir) {
Path balaPath = getPlatformSpecificBalaPath(orgName, templatePkgName, version, balaCache);
if (!Files.exists(balaPath)) {
CommandUtil.printError(errStream,
"unable to find the bala: " + balaPath,
null,
false);
CommandUtil.exitError(exitWhenFinish);
}
try {
addModules(balaPath, projectPath, packageName);
} catch (IOException e) {
ProjectUtils.deleteSelectedFilesInDirectory(projectPath, filesInDir);
CommandUtil.printError(errStream,
"error occurred while creating the package: " + e.getMessage(),
null,
false);
CommandUtil.exitError(exitWhenFinish);
}
}
private static void addModules(Path balaPath, Path projectPath, String packageName)
throws IOException {
Gson gson = new Gson();
Path packageJsonPath = balaPath.resolve(PACKAGE_JSON);
Path dependencyGraphJsonPath = balaPath.resolve(DEPENDENCY_GRAPH_JSON);
Path balToolJsonPath = balaPath.resolve(TOOL_DIR).resolve(ProjectConstants.BAL_TOOL_JSON);
PackageJson templatePackageJson = null;
DependencyGraphJson templateDependencyGraphJson = null;
BalToolJson templateBalToolJson = null;
try (InputStream inputStream = new FileInputStream(String.valueOf(packageJsonPath))) {
Reader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);
templatePackageJson = gson.fromJson(fileReader, PackageJson.class);
} catch (IOException e) {
printError(errStream,
"Error while reading the package json file: " + e.getMessage(),
null,
false);
getRuntime().exit(1);
}
if (dependencyGraphJsonPath.toFile().exists()) {
try (InputStream inputStream = new FileInputStream(String.valueOf(dependencyGraphJsonPath))) {
Reader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);
templateDependencyGraphJson = gson.fromJson(fileReader, DependencyGraphJson.class);
} catch (IOException e) {
printError(errStream,
"Error while reading the dependency graph json file: " + e.getMessage(),
null,
false);
getRuntime().exit(1);
}
}
if (balToolJsonPath.toFile().exists()) {
try (InputStream inputStream = new FileInputStream(String.valueOf(balToolJsonPath))) {
Reader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);
templateBalToolJson = gson.fromJson(fileReader, BalToolJson.class);
} catch (IOException e) {
printError(errStream,
"Error while reading the " + BAL_TOOL_JSON + " file: " + e.getMessage(),
null,
false);
getRuntime().exit(1);
}
}
if (!templatePackageJson.getTemplate()) {
throw createLauncherException("unable to create the package: " +
"specified package is not a template");
}
Path ballerinaToml = projectPath.resolve(ProjectConstants.BALLERINA_TOML);
Files.createDirectories(projectPath);
Files.createFile(ballerinaToml);
writeBallerinaToml(ballerinaToml, templatePackageJson, packageName, platform);
if (dependencyGraphJsonPath.toFile().exists()) {
Path dependenciesToml = projectPath.resolve(DEPENDENCIES_TOML);
Files.createFile(dependenciesToml);
writeDependenciesToml(projectPath, templateDependencyGraphJson, templatePackageJson);
}
if (balToolJsonPath.toFile().exists()) {
Path balToolToml = projectPath.resolve(BAL_TOOL_TOML);
Files.createFile(balToolToml);
writeBalToolToml(balToolToml, templateBalToolJson, packageName);
copyToolDependencies(projectPath, balaPath.resolve(TOOL_DIR).resolve(LIBS_DIR));
}
Path packageMDFilePath = balaPath.resolve("docs")
.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME);
Path toPackageMdPath = projectPath.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME);
if (Files.exists(packageMDFilePath)) {
Files.copy(packageMDFilePath, toPackageMdPath, StandardCopyOption.REPLACE_EXISTING);
}
createDefaultGitignore(projectPath);
createDefaultDevContainer(projectPath);
String templatePkgName = templatePackageJson.getName();
Path modulesRoot = balaPath.resolve(ProjectConstants.MODULES_ROOT);
Path moduleMdDirRoot = balaPath.resolve("docs").resolve(ProjectConstants.MODULES_ROOT);
List<Path> modulesList;
try (Stream<Path> pathStream = Files.list(modulesRoot)) {
modulesList = pathStream.collect(Collectors.toList());
}
for (Path moduleRoot : modulesList) {
Path moduleDir = Optional.of(moduleRoot.getFileName()).get();
Path destDir;
if (moduleDir.toString().equals(templatePkgName)) {
destDir = projectPath;
} else {
String moduleDirName = moduleDir.toString().split(templatePkgName + ProjectConstants.DOT, 2)[1];
destDir = projectPath.resolve(ProjectConstants.MODULES_ROOT).resolve(moduleDirName);
Files.createDirectories(destDir);
}
Files.walkFileTree(moduleRoot, new FileUtils.Copy(moduleRoot, destDir, templatePkgName, packageName));
Path moduleMdSource = moduleMdDirRoot.resolve(moduleDir).resolve(ProjectConstants.MODULE_MD_FILE_NAME);
if (Files.exists(moduleMdSource)) {
Files.copy(moduleMdSource, destDir.resolve(ProjectConstants.MODULE_MD_FILE_NAME),
StandardCopyOption.REPLACE_EXISTING);
}
}
copyIcon(balaPath, projectPath);
copyPlatformLibraries(balaPath, projectPath);
copyIncludeFiles(balaPath, projectPath, templatePackageJson);
}
private static void copyIcon(Path balaPath, Path projectPath) {
Path docsPath = balaPath.resolve(ProjectConstants.BALA_DOCS_DIR);
try (Stream<Path> pathStream = Files.walk(docsPath, 1)) {
List<Path> icon = pathStream
.filter(FileSystems.getDefault().getPathMatcher("glob:**.png")::matches)
.collect(Collectors.toList());
if (!icon.isEmpty()) {
Path projectDocsDir = projectPath.resolve(ProjectConstants.BALA_DOCS_DIR);
Files.createDirectory(projectDocsDir);
Path projectIconPath = projectDocsDir.resolve(Optional.of(icon.get(0).getFileName()).get());
Files.copy(icon.get(0), projectIconPath, StandardCopyOption.REPLACE_EXISTING);
}
} catch (IOException e) {
printError(errStream,
"Error while retrieving the icon: " + e.getMessage(),
null,
false);
getRuntime().exit(1);
}
}
private static void copyPlatformLibraries(Path balaPath, Path projectPath) throws IOException {
Path platformLibPath = balaPath.resolve("platform").resolve(platform);
if (Files.exists(platformLibPath)) {
Path libs = projectPath.resolve("libs");
Files.createDirectories(libs);
Files.walkFileTree(platformLibPath, new FileUtils.Copy(platformLibPath, libs));
}
}
private static void copyIncludeFiles(Path balaPath, Path projectPath, PackageJson templatePackageJson)
throws IOException {
if (templatePackageJson.getInclude() != null) {
String templatePkgName = templatePackageJson.getName();
List<Path> includePaths = ProjectUtils.getPathsMatchingIncludePatterns(
templatePackageJson.getInclude(), balaPath);
for (Path includePath : includePaths) {
Path moduleNameUpdatedIncludePath = updateModuleDirectoryNaming(includePath, balaPath, templatePkgName);
Path fromIncludeFilePath = balaPath.resolve(includePath);
Path toIncludeFilePath = projectPath.resolve(moduleNameUpdatedIncludePath);
if (Files.notExists(toIncludeFilePath)) {
Files.createDirectories(toIncludeFilePath);
Files.walkFileTree(fromIncludeFilePath, new FileUtils.Copy(fromIncludeFilePath, toIncludeFilePath));
}
}
}
}
private static Path updateModuleDirectoryNaming(Path includePath, Path balaPath, String templatePkgName) {
Path modulesDirPath = balaPath.resolve(ProjectConstants.MODULES_ROOT);
Path absoluteIncludePath = balaPath.resolve(includePath);
if (absoluteIncludePath.startsWith(modulesDirPath)) {
Path moduleRootPath = modulesDirPath.relativize(absoluteIncludePath).subpath(0, 1);
String moduleDirName = Optional.of(moduleRootPath.getFileName()).get().toString();
String destinationDirName = moduleDirName.split(templatePkgName + ProjectConstants.DOT, 2)[1];
Path includePathRelativeToModuleRoot = modulesDirPath.resolve(moduleRootPath)
.relativize(absoluteIncludePath);
Path updatedIncludePath = Paths.get(ProjectConstants.MODULES_ROOT).resolve(destinationDirName)
.resolve(includePathRelativeToModuleRoot);
return updatedIncludePath;
}
return includePath;
}
/**
* Find the bala path for a given template.
*
* @param template template name
*/
static Path findBalaTemplate(String template, Path balaCache) {
String packageName = findPkgName(template);
String orgName = findOrg(template);
String version = findPkgVersion(template);
if (version != null) {
Path balaPath = getPlatformSpecificBalaPath(orgName, packageName, version, balaCache);
if (Files.exists(balaPath)) {
return balaPath;
} else {
return null;
}
} else {
return null;
}
}
public static void initPackageFromCentral(Path balaCache, Path projectPath, String packageName, String template,
List<Path> filesInDir) {
System.setProperty(CentralClientConstants.ENABLE_OUTPUT_STREAM, "true");
String templatePackageName = findPkgName(template);
String orgName = findOrg(template);
String version = findPkgVersion(template);
Path pkgCacheParent = balaCache.resolve(orgName).resolve(templatePackageName);
try {
pullPackageFromRemote(orgName, templatePackageName, version, pkgCacheParent);
} catch (PackageAlreadyExistsException e) {
if (version == null) {
List<PackageVersion> packageVersions = getPackageVersions(pkgCacheParent);
PackageVersion latest = findLatest(packageVersions);
if (latest == null) {
throw createLauncherException("unable to find package in the filesystem cache." +
" This is an unexpected error : " + e.getMessage());
}
version = latest.toString();
}
} catch (CentralClientException e) {
errStream.println("Warning: Unable to pull the package from Ballerina Central: " + e.getMessage());
if (findBalaTemplate(template, balaCache) == null) {
List<PackageVersion> packageVersions = getPackageVersions(pkgCacheParent);
PackageVersion latest = findLatest(packageVersions);
if (latest == null) {
throw createLauncherException("template not found in filesystem cache.");
}
version = latest.toString();
}
}
if (version == null) {
List<PackageVersion> packageVersions = getPackageVersions(pkgCacheParent);
PackageVersion latest = findLatest(packageVersions);
version = Objects.requireNonNull(latest).toString();
}
applyTemplate(orgName, templatePackageName, version, packageName, projectPath, balaCache, filesInDir);
}
private static void pullPackageFromRemote(String orgName, String packageName, String version, Path destination)
throws CentralClientException {
String supportedPlatform = Arrays.stream(JvmTarget.values())
.map(target -> target.code())
.collect(Collectors.joining(","));
Settings settings;
try {
settings = readSettings();
} catch (SettingsTomlException e) {
settings = Settings.from();
}
CentralAPIClient client = new CentralAPIClient(RepoUtils.getRemoteRepoURL(),
initializeProxy(settings.getProxy()), settings.getProxy().username(),
settings.getProxy().password(),
getAccessTokenOfCLI(settings), settings.getCentral().getConnectTimeout(),
settings.getCentral().getReadTimeout(), settings.getCentral().getWriteTimeout(),
settings.getCentral().getCallTimeout());
try {
client.pullPackage(orgName, packageName, version, destination, supportedPlatform,
RepoUtils.getBallerinaVersion(), false);
} catch (CentralClientException e) {
throw e;
}
}
public static void writeBallerinaToml(Path balTomlPath, PackageJson packageJson,
String packageName, String platform)
throws IOException {
Files.writeString(balTomlPath, "[package]", StandardOpenOption.APPEND);
Files.writeString(balTomlPath, "\norg = \"" + packageJson.getOrganization() + "\"",
StandardOpenOption.APPEND);
Files.writeString(balTomlPath, "\nname = \"" + packageName + "\"", StandardOpenOption.APPEND);
Files.writeString(balTomlPath, "\nversion = \"" + packageJson.getVersion() + "\"",
StandardOpenOption.APPEND);
List<String> newModuleNames = packageJson.getExport().stream().map(module ->
module.replaceFirst(packageJson.getName(), packageName)).collect(Collectors.toList());
StringJoiner stringJoiner = new StringJoiner(",");
for (String newModuleName : newModuleNames) {
stringJoiner.add("\"" + newModuleName + "\"");
}
Files.writeString(balTomlPath, "\nexport = [" + stringJoiner + "]"
.replaceFirst(packageJson.getName(), packageName), StandardOpenOption.APPEND);
Files.writeString(balTomlPath, "\ndistribution = \"" + packageJson.getBallerinaVersion()
+ "\"", StandardOpenOption.APPEND);
writePackageAttributeArray(balTomlPath, packageJson.getLicenses(), "license");
writePackageAttributeArray(balTomlPath, packageJson.getAuthors(), "authors");
writePackageAttributeArray(balTomlPath, packageJson.getKeywords(), "keywords");
writePackageAttributeValue(balTomlPath, packageJson.getSourceRepository(), "repository");
writePackageAttributeValue(balTomlPath, packageJson.getVisibility(), "visibility");
writePackageAttributeValue(balTomlPath, packageJson.getIcon(), "icon");
Files.writeString(balTomlPath, "\n\n[build-options]", StandardOpenOption.APPEND);
Files.writeString(balTomlPath, "\nobservabilityIncluded = true\n", StandardOpenOption.APPEND);
JsonArray platformLibraries = packageJson.getPlatformDependencies();
if (platformLibraries == null) {
return;
}
Files.writeString(balTomlPath, "\n[[platform." + platform + ".dependency]]", StandardOpenOption.APPEND);
for (Object dependencies : platformLibraries) {
JsonObject dependenciesObj = (JsonObject) dependencies;
if (null == dependenciesObj.get("scope")) {
String libPath = dependenciesObj.get("path").getAsString();
Path libName = Optional.of(Paths.get(libPath).getFileName()).get();
Path libRelPath = Paths.get("libs", libName.toString());
Files.writeString(balTomlPath, "\npath = \"" + libRelPath + "\"", StandardOpenOption.APPEND);
}
if (dependenciesObj.get("artifactId") != null) {
String artifactId = dependenciesObj.get("artifactId").getAsString();
Files.writeString(balTomlPath, "\nartifactId = \"" + artifactId + "\"",
StandardOpenOption.APPEND);
}
if (dependenciesObj.get("groupId") != null) {
String groupId = dependenciesObj.get("groupId").getAsString();
Files.writeString(balTomlPath, "\ngroupId = \"" + groupId + "\"", StandardOpenOption.APPEND);
}
if (dependenciesObj.get("version") != null) {
String dependencyVersion = dependenciesObj.get("version").getAsString();
Files.writeString(balTomlPath, "\nversion = \"" + dependencyVersion + "\"\n",
StandardOpenOption.APPEND);
}
if (null != dependenciesObj.get("scope") && dependenciesObj.get("scope").getAsString().equals("provided")) {
String scope = dependenciesObj.get("scope").getAsString();
Files.writeString(balTomlPath, "scope = \"" + scope + "\"\n",
StandardOpenOption.APPEND);
String artifactId = dependenciesObj.get("artifactId").getAsString();
printError(errStream,
"WARNING: path for the platform dependency " + artifactId + " with provided scope " +
"should be specified in the Ballerina.toml",
null,
false);
}
}
}
public static void writeDependenciesToml(Path projectPath, DependencyGraphJson templateDependencyGraphJson,
PackageJson templatePackageJson)
throws IOException {
Path depsTomlPath = projectPath.resolve(DEPENDENCIES_TOML);
String autoGenCode = "
"\n" +
"
"
"\n";
Files.writeString(depsTomlPath, autoGenCode, StandardOpenOption.APPEND);
String balTomlVersion = "[ballerina]\n" +
"dependencies-toml-version = \"" + ProjectConstants.DEPENDENCIES_TOML_VERSION + "\"\n" +
"\n";
Files.writeString(depsTomlPath, balTomlVersion, StandardOpenOption.APPEND);
List<ModuleDependency> currentPkgModules = new ArrayList<>();
for (ModuleDependency module : templateDependencyGraphJson.getModuleDependencies()) {
if (module.getOrg().equals(templatePackageJson.getOrganization())
&& module.getPackageName().equals(templatePackageJson.getName())) {
List<ModuleDependency> currentPkgModuleDeps = module.getDependencies();
currentPkgModules.addAll(currentPkgModuleDeps);
}
}
StringBuilder pkgDesc = new StringBuilder();
for (Dependency packageDependency : templateDependencyGraphJson.getPackageDependencyGraph()) {
if (templatePackageJson.getOrganization().equals(packageDependency.getOrg())
&& templatePackageJson.getName().equals(packageDependency.getName())) {
pkgDesc.append("[[package]]\n")
.append("org = \"").append(packageDependency.getOrg()).append("\"\n")
.append("name = \"").append(ProjectUtils.defaultName(projectPath)).append("\"\n")
.append("version = \"").append(packageDependency.getVersion()).append("\"\n");
pkgDesc.append(getDependenciesArrayContent(packageDependency));
pkgDesc.append(getDependencyModulesArrayContent(
templateDependencyGraphJson.getModuleDependencies(), true, projectPath));
} else {
pkgDesc.append("[[package]]\n")
.append("org = \"").append(packageDependency.getOrg()).append("\"\n")
.append("name = \"").append(packageDependency.getName()).append("\"\n")
.append("version = \"").append(packageDependency.getVersion()).append("\"\n");
pkgDesc.append(getDependenciesArrayContent(packageDependency));
List<ModuleDependency> packageDependencyModules = new ArrayList<>();
for (ModuleDependency module : currentPkgModules) {
if (packageDependency.getOrg().equals(module.getOrg())
&& packageDependency.getName().equals(module.getPackageName())) {
packageDependencyModules.add(module);
}
}
if (!packageDependencyModules.isEmpty()) {
pkgDesc.append(getDependencyModulesArrayContent(packageDependencyModules, false, projectPath));
}
}
pkgDesc.append("\n");
}
Files.writeString(depsTomlPath, pkgDesc.toString(), StandardOpenOption.APPEND);
}
public static Path getPlatformSpecificBalaPath(String orgName, String pkgName, String version,
Path balaCache) {
Path balaPath = balaCache.resolve(
ProjectUtils.getRelativeBalaPath(orgName, pkgName, version, null));
platform = ANY_PLATFORM;
if (!Files.exists(balaPath)) {
for (JvmTarget supportedPlatform : JvmTarget.values()) {
balaPath = balaCache.resolve(
ProjectUtils.getRelativeBalaPath(orgName, pkgName, version, supportedPlatform.code()));
if (Files.exists(balaPath)) {
platform = supportedPlatform.code();
break;
}
}
}
return balaPath;
}
/**
* Write to BalTool.toml file.
*
* @param balToolTomlPath path to BalTool.toml
* @param balToolJson Bal-tool.json content
*/
public static void writeBalToolToml(Path balToolTomlPath, BalToolJson balToolJson, String packageName)
throws IOException {
Files.writeString(balToolTomlPath, "[tool]", StandardOpenOption.APPEND);
Files.writeString(balToolTomlPath, "\nid = \"" + packageName + "\"\n",
StandardOpenOption.APPEND);
List<String> dependencyPaths = balToolJson.dependencyPaths();
StringBuilder dependenciesContent = new StringBuilder();
for (String dependencyPath: dependencyPaths) {
dependenciesContent.append("\n[[dependency]]\n").append("path = \"").append(dependencyPath).append("\"\n");
}
Files.writeString(balToolTomlPath, dependenciesContent.toString(), StandardOpenOption.APPEND);
}
/**
* Copy dependency jars to new package from template package.
*
* @param projectPath path to new project
* @param toolsLibPath Path to /tool/libs directory containing dependencies
*/
public static void copyToolDependencies(Path projectPath, Path toolsLibPath) throws IOException {
Path toolDirectory = projectPath.resolve(TOOL_DIR);
Files.createDirectory(toolDirectory);
Files.createDirectory(toolDirectory.resolve(LIBS_DIR));
Files.walkFileTree(toolsLibPath, new FileUtils.Copy(toolsLibPath, toolDirectory.resolve(LIBS_DIR)));
}
/**
* Get formatted dependencies array content for Dependencies.toml dependency.
*
* @param packageDependency package dependency
* @return formatted dependencies array content
*/
private static String getDependenciesArrayContent(Dependency packageDependency) {
StringBuilder dependenciesContent = new StringBuilder();
if (!packageDependency.getDependencies().isEmpty()) {
for (Dependency dependency : packageDependency.getDependencies()) {
dependenciesContent.append("\t{org = \"").append(dependency.getOrg())
.append("\", name = \"").append(dependency.getName())
.append("\"},\n");
}
String dependenciesPart = dependenciesContent.toString();
dependenciesPart = removeLastCharacter(trimStartingWhitespaces(dependenciesPart));
return "dependencies = [\n"
+ dependenciesPart
+ "\n]\n";
}
return "";
}
/**
* Get formatted modules array content for Dependencies.toml dependency.
* <code>
* modules = [
* {org = "ballerinax", packageName = "redis", moduleName = "redis"}
* ]
* </code>
*
* @param dependencyModules modules of the given dependency package
* @param isCurrentPackage is modules array generating for current package
* @param projectPath project path
* @return formatted modules array content
*/
private static String getDependencyModulesArrayContent(List<ModuleDependency> dependencyModules,
boolean isCurrentPackage, Path projectPath) {
StringBuilder modulesContent = new StringBuilder();
if (isCurrentPackage) {
for (ModuleDependency module : dependencyModules) {
String currentPkgName = ProjectUtils.defaultName(projectPath).value();
String modulePkgPart = module.getModuleName().split("\\.")[0];
String currentPkgModuleName = module.getModuleName().replaceFirst(modulePkgPart, currentPkgName);
modulesContent.append("\t{org = \"").append(module.getOrg())
.append("\", packageName = \"").append(currentPkgName)
.append("\", moduleName = \"").append(currentPkgModuleName)
.append("\"},\n");
}
} else {
for (ModuleDependency module : dependencyModules) {
modulesContent.append("\t{org = \"").append(module.getOrg())
.append("\", packageName = \"").append(module.getPackageName())
.append("\", moduleName = \"").append(module.getModuleName())
.append("\"},\n");
}
}
String modulesPart = modulesContent.toString();
modulesPart = removeLastCharacter(trimStartingWhitespaces(modulesPart));
return "modules = [\n" + modulesPart + "\n]\n";
}
/**
* Write Ballerina.toml package attribute array from template package.json to new project Ballerina.toml.
*
* @param balTomlPath Ballerina.toml path of the new project
* @param attributeArray package attribute values array
* @param attributeName package attribute name
* @throws IOException when error occurs writing to the Ballerina.toml
*/
private static void writePackageAttributeArray(Path balTomlPath, List<String> attributeArray, String attributeName)
throws IOException {
if (attributeArray != null && !attributeArray.isEmpty()) {
StringJoiner stringJoiner = new StringJoiner(",");
for (String attributeElement : attributeArray) {
stringJoiner.add("\"" + attributeElement + "\"");
}
Files.writeString(balTomlPath, "\n" + attributeName + " = [" + stringJoiner + "]",
StandardOpenOption.APPEND);
}
}
/**
* Write Ballerina.toml package attribute from template package.json to new project Ballerina.toml.
*
* @param balTomlPath Ballerina.toml path of the new project
* @param attributeValue package attribute value
* @param attributeName package attribute name
* @throws IOException when error occurs writing to the Ballerina.toml
*/
private static void writePackageAttributeValue(Path balTomlPath, String attributeValue, String attributeName)
throws IOException {
if (attributeValue != null && !attributeValue.isEmpty()) {
Files.writeString(balTomlPath, "\n" + attributeName + " = \"" + attributeValue + "\"",
StandardOpenOption.APPEND);
}
}
/**
* Find the package name for a given template.
*
* @param template template name
* @return packageName - package name of the module
*/
public static String findPkgName(String template) {
String[] orgSplit = template.split("/");
String packageName = "";
String packagePart = (orgSplit.length > 1) ? orgSplit[1] : "";
String[] pkgSplit = packagePart.split(":");
packageName = pkgSplit[0].trim();
return packageName;
}
/**
* Find the organization for a given template.
*
* @param template template name
* @return orgName - org of the module
*/
public static String findOrg(String template) {
String[] orgSplit = template.split("/");
return orgSplit[0].trim();
}
/**
* Find the package version for a given template.
*
* @param template template name
* @return version - version of the module
*/
public static String findPkgVersion(String template) {
String[] orgSplit = template.split("/");
String packagePart = (orgSplit.length > 1) ? orgSplit[1] : "";
String[] pkgSplit = packagePart.split(":");
if (pkgSplit.length > 1) {
return pkgSplit[1].trim();
} else {
return null;
}
}
/**
* Initialize a new ballerina project in the given path.
*
* @param path project path
* @param packageName name of the package
* @param template package template
* @param balFilesExist if bal files exist in the project
* @throws IOException If any IO exception occurred
* @throws URISyntaxException If any URISyntaxException occurred
*/
public static void initPackageByTemplate(Path path, String packageName, String template, boolean balFilesExist)
throws IOException, URISyntaxException {
applyTemplate(path, template, balFilesExist);
if (template.equalsIgnoreCase(LIB_DIR)) {
initLibPackage(path, packageName);
Path source = path.resolve("lib.bal");
Files.move(source, source.resolveSibling(guessPkgName(packageName, template) + ".bal"),
StandardCopyOption.REPLACE_EXISTING);
} else if (template.equalsIgnoreCase(TOOL_DIR)) {
initToolPackage(path, packageName);
} else {
initPackage(path, packageName);
}
createDefaultGitignore(path);
createDefaultDevContainer(path);
}
private static void createDefaultGitignore(Path path) throws IOException {
Path gitignore = path.resolve(ProjectConstants.GITIGNORE_FILE_NAME);
if (Files.notExists(gitignore)) {
Files.createFile(gitignore);
}
if (Files.size(gitignore) == 0) {
String defaultGitignore = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + GITIGNORE);
Files.write(gitignore, defaultGitignore.getBytes(StandardCharsets.UTF_8));
}
}
private static void createDefaultDevContainer(Path path) throws IOException {
Path devContainer = path.resolve(ProjectConstants.DEVCONTAINER);
if (Files.notExists(devContainer)) {
Files.createFile(devContainer);
}
if (Files.size(devContainer) == 0) {
String defaultDevContainer = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + DEVCONTAINER);
defaultDevContainer = defaultDevContainer.replace("latest", RepoUtils.getBallerinaVersion());
Files.write(devContainer, defaultDevContainer.getBytes(StandardCharsets.UTF_8));
}
}
/**
* Get the list of templates.
*
* @return list of templates
*/
public static List<String> getTemplates() {
try {
Path templateDir = getTemplatePath();
Stream<Path> walk = Files.walk(templateDir, 1);
List<String> templates = walk.filter(Files::isDirectory)
.filter(directory -> !templateDir.equals(directory))
.filter(directory -> directory.getFileName() != null)
.map(directory -> directory.getFileName())
.map(fileName -> fileName.toString())
.collect(Collectors.toList());
if (null != jarFs) {
return templates.stream().map(t -> t
.replace(jarFs.getSeparator(), ""))
.collect(Collectors.toList());
} else {
return templates;
}
} catch (IOException | URISyntaxException e) {
return new ArrayList<String>();
}
}
/**
* Get the path to the given template.
*
* @return path of the given template
* @throws URISyntaxException if any URISyntaxException occured
*/
private static Path getTemplatePath() throws URISyntaxException {
URI uri = CommandUtil.class.getClassLoader().getResource(CREATE_CMD_TEMPLATES).toURI();
if (uri.toString().contains("!")) {
final String[] array = uri.toString().split("!");
return jarFs.getPath(array[1]);
} else {
return Paths.get(uri);
}
}
/**
* Apply the template to the created module.
*
* @param modulePath path to the module
* @param template template name
* @param balFilesExist if bal files exist in the project
* @throws IOException if any IOException occurred
* @throws URISyntaxException if any URISyntaxException occurred
*/
public static void applyTemplate(Path modulePath, String template, boolean balFilesExist)
throws IOException, URISyntaxException {
Path templateDir = getTemplatePath().resolve(template);
if (template.equalsIgnoreCase(MAIN_TEMPLATE)) {
templateDir = getTemplatePath().resolve(DEFAULT_TEMPLATE);
Path tempDirTest = getTemplatePath().resolve(MAIN_TEMPLATE);
Files.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath));
Files.walkFileTree(tempDirTest, new FileUtils.Copy(tempDirTest, modulePath));
} else if (template.equalsIgnoreCase(DEFAULT_TEMPLATE)) {
if (!balFilesExist) {
Files.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath));
}
} else {
Files.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath));
}
}
/**
* Initialize a new ballerina project in the given path.
*
* @param path Project path
* @throws IOException If any IO exception occurred
*/
public static void initPackage(Path path, String packageName) throws IOException {
Path ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML);
Files.createFile(ballerinaToml);
String defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-app.toml");
defaultManifest = defaultManifest
.replaceAll(ORG_NAME, ProjectUtils.guessOrgName())
.replaceAll(PKG_NAME, guessPkgName(packageName, "app"))
.replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion());
Files.write(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8));
}
private static void initLibPackage(Path path, String packageName) throws IOException {
Path ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML);
Files.createFile(ballerinaToml);
String defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-lib.toml");
defaultManifest = defaultManifest.replaceAll(ORG_NAME, ProjectUtils.guessOrgName())
.replaceAll(PKG_NAME, guessPkgName(packageName, "lib"))
.replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion());
write(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8));
String packageMd = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/Package.md");
write(path.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME), packageMd.getBytes(StandardCharsets.UTF_8));
}
/**
* Initialize a new ballerina tool package in the given path.
*
* @param path Project path
* @param packageName package name
* @throws IOException If any IO exception occurred
*/
private static void initToolPackage(Path path, String packageName) throws IOException {
Path ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML);
Files.createFile(ballerinaToml);
String defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-app.toml");
defaultManifest = defaultManifest
.replaceAll(ORG_NAME, ProjectUtils.guessOrgName())
.replaceAll(PKG_NAME, guessPkgName(packageName, TOOL_DIR))
.replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion());
Files.write(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8));
Path balToolToml = path.resolve(ProjectConstants.BAL_TOOL_TOML);
Files.createFile(balToolToml);
String balToolManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-tool.toml");
balToolManifest = balToolManifest.replaceAll(TOOL_ID, guessPkgName(packageName, TOOL_DIR));
write(balToolToml, balToolManifest.getBytes(StandardCharsets.UTF_8));
}
protected static PackageVersion findLatest(List<PackageVersion> packageVersions) {
if (packageVersions.isEmpty()) {
return null;
}
PackageVersion latestVersion = packageVersions.get(0);
for (PackageVersion pkgVersion : packageVersions) {
latestVersion = getLatest(latestVersion, pkgVersion);
}
return latestVersion;
}
protected static PackageVersion getLatest(PackageVersion v1, PackageVersion v2) {
SemanticVersion semVer1 = v1.value();
SemanticVersion semVer2 = v2.value();
boolean isV1PreReleaseVersion = semVer1.isPreReleaseVersion();
boolean isV2PreReleaseVersion = semVer2.isPreReleaseVersion();
if (isV1PreReleaseVersion ^ isV2PreReleaseVersion) {
return isV1PreReleaseVersion ? v2 : v1;
} else {
return semVer1.greaterThanOrEqualTo(semVer2) ? v1 : v2;
}
}
public static List<PackageVersion> getPackageVersions(Path balaPackagePath) {
List<Path> versions = new ArrayList<>();
if (Files.exists(balaPackagePath)) {
Stream<Path> collectVersions;
try {
collectVersions = Files.list(balaPackagePath);
} catch (IOException e) {
throw new RuntimeException("Error while accessing Distribution cache: " + e.getMessage());
}
versions.addAll(collectVersions.collect(Collectors.toList()));
}
return pathToVersions(versions);
}
protected static List<PackageVersion> pathToVersions(List<Path> versions) {
List<PackageVersion> availableVersions = new ArrayList<>();
versions.stream().map(path -> Optional.ofNullable(path)
.map(Path::getFileName)
.map(Path::toString)
.orElse("0.0.0")).forEach(version -> {
try {
availableVersions.add(PackageVersion.from(version));
} catch (ProjectException ignored) {
}
});
return availableVersions;
}
/**
* Remove starting whitespaces of a string.
*
* @param str given string
* @return starting whitespaces removed string
*/
private static String trimStartingWhitespaces(String str) {
return str.replaceFirst("\\s++$", "");
}
/**
* Remove last character of a string.
*
* @param str given string
* @return last character removed string
*/
private static String removeLastCharacter(String str) {
return str.substring(0, str.length() - 1);
}
/**
* Check if files of the given template exist in a given path.
*
* @param template given string
* @param packagePath given path
* @throws URISyntaxException if URI syntax exception occurred
* @throws IOException if IO exception occurred
*/
public static String checkTemplateFilesExists(String template, Path packagePath) throws URISyntaxException,
IOException {
Path templateDir = getTemplatePath().resolve(template);
Stream<Path> paths = Files.list(templateDir);
List<Path> templateFilePathList = paths.collect(Collectors.toList());
StringBuilder existingFiles = new StringBuilder();
for (Path path : templateFilePathList) {
Optional<String> fileNameOptional = Optional.ofNullable(path.getFileName()).map(path1 -> path1.toString());
if (fileNameOptional.isPresent()) {
String fileName = fileNameOptional.get();
if (!fileName.endsWith(ProjectConstants.BLANG_SOURCE_EXT) &&
Files.exists(packagePath.resolve(fileName))) {
existingFiles.append(fileName).append(FILE_STRING_SEPARATOR);
}
}
}
return existingFiles.toString();
}
/**
* Check if common files of a package exist in a given path.
*
* @param packagePath given path
*/
public static String checkPackageFilesExists(Path packagePath) {
String[] packageFiles = {DEPENDENCIES_TOML, BAL_TOOL_TOML, ProjectConstants.PACKAGE_MD_FILE_NAME,
ProjectConstants.MODULE_MD_FILE_NAME, ProjectConstants.MODULES_ROOT, ProjectConstants.TEST_DIR_NAME};
StringBuilder existingFiles = new StringBuilder();
for (String file : packageFiles) {
if (Files.exists(packagePath.resolve(file))) {
existingFiles.append(file).append(FILE_STRING_SEPARATOR);
}
}
return existingFiles.toString();
}
/**
* Check if .bal files exist in a given path.
*
* @param packagePath given path
* @return error message if files exists
*/
public static boolean balFilesExists(Path packagePath) throws IOException {
return Files.list(packagePath).anyMatch(path -> path.toString().endsWith(ProjectConstants.BLANG_SOURCE_EXT));
}
/**
* Get the latest version from a given list of versions.
*
* @param versions the list of strings
* @return the latest version
*/
static String getLatestVersion(List<String> versions) {
String latestVersion = versions.get(0);
for (String version : versions) {
if (SemanticVersion.from(version).greaterThan(SemanticVersion.from(latestVersion))) {
latestVersion = version;
}
}
return latestVersion;
}
/**
* Pull the dependencies of a given package from central.
*
* @param orgName org name of the dependent package
* @param packageName name of the dependent package
* @param version version of the dependent package
* @return true if the dependent package compilation has errors
*/
private static void printDiagnostics(Collection<Diagnostic> diagnostics) {
for (Diagnostic diagnostic: diagnostics) {
CommandUtil.printError(errStream, diagnostic.toString(), null, false);
}
}
private static boolean hasProvidedPlatformDeps(PackageCompilation packageCompilation) {
Set<Object> providedDeps = new HashSet<>();
packageCompilation.getResolution().allDependencies()
.stream()
.map(ResolvedPackageDependency::packageInstance)
.map(Package::manifest)
.flatMap(pkgManifest -> pkgManifest.platforms().values().stream())
.filter(Objects::nonNull)
.flatMap(pkgPlatform -> pkgPlatform.dependencies().stream())
.filter(dependency -> "provided".equals(dependency.get("scope")))
.forEach(providedDeps::add);
return !providedDeps.isEmpty();
}
} | class CommandUtil {
public static final String ORG_NAME = "ORG_NAME";
public static final String PKG_NAME = "PKG_NAME";
public static final String DIST_VERSION = "DIST_VERSION";
public static final String TOOL_ID = "TOOL_ID";
public static final String USER_HOME = "user.home";
public static final String GITIGNORE = "gitignore";
public static final String DEVCONTAINER = "devcontainer";
public static final String NEW_CMD_DEFAULTS = "new_cmd_defaults";
public static final String CREATE_CMD_TEMPLATES = "create_cmd_templates";
public static final String LIBS_DIR = "libs";
public static final String DEFAULT_TEMPLATE = "default";
public static final String MAIN_TEMPLATE = "main";
public static final String FILE_STRING_SEPARATOR = ", ";
private static FileSystem jarFs;
private static Map<String, String> env;
private static PrintStream errStream;
private static PrintStream outStream;
private static Path homeCache;
private static boolean exitWhenFinish;
private static String platform;
static void setPrintStream(PrintStream errStream) {
CommandUtil.errStream = errStream;
}
public static void initJarFs() {
URI uri = null;
try {
uri = CommandUtil.class.getClassLoader().getResource(CREATE_CMD_TEMPLATES).toURI();
if (uri.toString().contains("!")) {
final String[] array = uri.toString().split("!");
if (null == jarFs) {
env = new HashMap<>();
jarFs = FileSystems.newFileSystem(URI.create(array[0]), env);
}
}
} catch (URISyntaxException | IOException e) {
throw new AssertionError();
}
}
/**
* Print command errors with a standard format.
*
* @param stream error will be sent to this stream
* @param error error message
* @param usage usage if any
* @param help if the help message should be printed
*/
public static void printError(PrintStream stream, String error, String usage, boolean help) {
stream.println("ballerina: " + error);
if (null != usage) {
stream.println();
stream.println("USAGE:");
stream.println(" " + usage);
}
if (help) {
stream.println();
stream.println("For more information try --help");
}
}
/**
* Exit with error code 1.
*
* @param exit Whether to exit or not.
*/
public static void exitError(boolean exit) {
if (exit) {
Runtime.getRuntime().exit(1);
}
}
static void applyTemplate(String orgName, String templatePkgName, String version, String packageName,
Path projectPath, Path balaCache, List<Path> filesInDir) {
Path balaPath = getPlatformSpecificBalaPath(orgName, templatePkgName, version, balaCache);
if (!Files.exists(balaPath)) {
CommandUtil.printError(errStream,
"unable to find the bala: " + balaPath,
null,
false);
CommandUtil.exitError(exitWhenFinish);
}
try {
addModules(balaPath, projectPath, packageName);
} catch (IOException e) {
ProjectUtils.deleteSelectedFilesInDirectory(projectPath, filesInDir);
CommandUtil.printError(errStream,
"error occurred while creating the package: " + e.getMessage(),
null,
false);
CommandUtil.exitError(exitWhenFinish);
}
}
private static void addModules(Path balaPath, Path projectPath, String packageName)
throws IOException {
Gson gson = new Gson();
Path packageJsonPath = balaPath.resolve(PACKAGE_JSON);
Path dependencyGraphJsonPath = balaPath.resolve(DEPENDENCY_GRAPH_JSON);
Path balToolJsonPath = balaPath.resolve(TOOL_DIR).resolve(ProjectConstants.BAL_TOOL_JSON);
PackageJson templatePackageJson = null;
DependencyGraphJson templateDependencyGraphJson = null;
BalToolJson templateBalToolJson = null;
try (InputStream inputStream = new FileInputStream(String.valueOf(packageJsonPath))) {
Reader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);
templatePackageJson = gson.fromJson(fileReader, PackageJson.class);
} catch (IOException e) {
printError(errStream,
"Error while reading the package json file: " + e.getMessage(),
null,
false);
getRuntime().exit(1);
}
if (dependencyGraphJsonPath.toFile().exists()) {
try (InputStream inputStream = new FileInputStream(String.valueOf(dependencyGraphJsonPath))) {
Reader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);
templateDependencyGraphJson = gson.fromJson(fileReader, DependencyGraphJson.class);
} catch (IOException e) {
printError(errStream,
"Error while reading the dependency graph json file: " + e.getMessage(),
null,
false);
getRuntime().exit(1);
}
}
if (balToolJsonPath.toFile().exists()) {
try (InputStream inputStream = new FileInputStream(String.valueOf(balToolJsonPath))) {
Reader fileReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);
templateBalToolJson = gson.fromJson(fileReader, BalToolJson.class);
} catch (IOException e) {
printError(errStream,
"Error while reading the " + BAL_TOOL_JSON + " file: " + e.getMessage(),
null,
false);
getRuntime().exit(1);
}
}
if (!templatePackageJson.getTemplate()) {
throw createLauncherException("unable to create the package: " +
"specified package is not a template");
}
Path ballerinaToml = projectPath.resolve(ProjectConstants.BALLERINA_TOML);
Files.createDirectories(projectPath);
Files.createFile(ballerinaToml);
writeBallerinaToml(ballerinaToml, templatePackageJson, packageName, platform);
if (dependencyGraphJsonPath.toFile().exists()) {
Path dependenciesToml = projectPath.resolve(DEPENDENCIES_TOML);
Files.createFile(dependenciesToml);
writeDependenciesToml(projectPath, templateDependencyGraphJson, templatePackageJson);
}
if (balToolJsonPath.toFile().exists()) {
Path balToolToml = projectPath.resolve(BAL_TOOL_TOML);
Files.createFile(balToolToml);
writeBalToolToml(balToolToml, templateBalToolJson, packageName);
copyToolDependencies(projectPath, balaPath.resolve(TOOL_DIR).resolve(LIBS_DIR));
}
Path packageMDFilePath = balaPath.resolve("docs")
.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME);
Path toPackageMdPath = projectPath.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME);
if (Files.exists(packageMDFilePath)) {
Files.copy(packageMDFilePath, toPackageMdPath, StandardCopyOption.REPLACE_EXISTING);
}
createDefaultGitignore(projectPath);
createDefaultDevContainer(projectPath);
String templatePkgName = templatePackageJson.getName();
Path modulesRoot = balaPath.resolve(ProjectConstants.MODULES_ROOT);
Path moduleMdDirRoot = balaPath.resolve("docs").resolve(ProjectConstants.MODULES_ROOT);
List<Path> modulesList;
try (Stream<Path> pathStream = Files.list(modulesRoot)) {
modulesList = pathStream.collect(Collectors.toList());
}
for (Path moduleRoot : modulesList) {
Path moduleDir = Optional.of(moduleRoot.getFileName()).get();
Path destDir;
if (moduleDir.toString().equals(templatePkgName)) {
destDir = projectPath;
} else {
String moduleDirName = moduleDir.toString().split(templatePkgName + ProjectConstants.DOT, 2)[1];
destDir = projectPath.resolve(ProjectConstants.MODULES_ROOT).resolve(moduleDirName);
Files.createDirectories(destDir);
}
Files.walkFileTree(moduleRoot, new FileUtils.Copy(moduleRoot, destDir, templatePkgName, packageName));
Path moduleMdSource = moduleMdDirRoot.resolve(moduleDir).resolve(ProjectConstants.MODULE_MD_FILE_NAME);
if (Files.exists(moduleMdSource)) {
Files.copy(moduleMdSource, destDir.resolve(ProjectConstants.MODULE_MD_FILE_NAME),
StandardCopyOption.REPLACE_EXISTING);
}
}
copyIcon(balaPath, projectPath);
copyPlatformLibraries(balaPath, projectPath);
copyIncludeFiles(balaPath, projectPath, templatePackageJson);
}
private static void copyIcon(Path balaPath, Path projectPath) {
Path docsPath = balaPath.resolve(ProjectConstants.BALA_DOCS_DIR);
try (Stream<Path> pathStream = Files.walk(docsPath, 1)) {
List<Path> icon = pathStream
.filter(FileSystems.getDefault().getPathMatcher("glob:**.png")::matches)
.collect(Collectors.toList());
if (!icon.isEmpty()) {
Path projectDocsDir = projectPath.resolve(ProjectConstants.BALA_DOCS_DIR);
Files.createDirectory(projectDocsDir);
Path projectIconPath = projectDocsDir.resolve(Optional.of(icon.get(0).getFileName()).get());
Files.copy(icon.get(0), projectIconPath, StandardCopyOption.REPLACE_EXISTING);
}
} catch (IOException e) {
printError(errStream,
"Error while retrieving the icon: " + e.getMessage(),
null,
false);
getRuntime().exit(1);
}
}
private static void copyPlatformLibraries(Path balaPath, Path projectPath) throws IOException {
Path platformLibPath = balaPath.resolve("platform").resolve(platform);
if (Files.exists(platformLibPath)) {
Path libs = projectPath.resolve("libs");
Files.createDirectories(libs);
Files.walkFileTree(platformLibPath, new FileUtils.Copy(platformLibPath, libs));
}
}
private static void copyIncludeFiles(Path balaPath, Path projectPath, PackageJson templatePackageJson)
throws IOException {
if (templatePackageJson.getInclude() != null) {
String templatePkgName = templatePackageJson.getName();
List<Path> includePaths = ProjectUtils.getPathsMatchingIncludePatterns(
templatePackageJson.getInclude(), balaPath);
for (Path includePath : includePaths) {
Path moduleNameUpdatedIncludePath = updateModuleDirectoryNaming(includePath, balaPath, templatePkgName);
Path fromIncludeFilePath = balaPath.resolve(includePath);
Path toIncludeFilePath = projectPath.resolve(moduleNameUpdatedIncludePath);
if (Files.notExists(toIncludeFilePath)) {
Files.createDirectories(toIncludeFilePath);
Files.walkFileTree(fromIncludeFilePath, new FileUtils.Copy(fromIncludeFilePath, toIncludeFilePath));
}
}
}
}
private static Path updateModuleDirectoryNaming(Path includePath, Path balaPath, String templatePkgName) {
Path modulesDirPath = balaPath.resolve(ProjectConstants.MODULES_ROOT);
Path absoluteIncludePath = balaPath.resolve(includePath);
if (absoluteIncludePath.startsWith(modulesDirPath)) {
Path moduleRootPath = modulesDirPath.relativize(absoluteIncludePath).subpath(0, 1);
String moduleDirName = Optional.of(moduleRootPath.getFileName()).get().toString();
String destinationDirName = moduleDirName.split(templatePkgName + ProjectConstants.DOT, 2)[1];
Path includePathRelativeToModuleRoot = modulesDirPath.resolve(moduleRootPath)
.relativize(absoluteIncludePath);
Path updatedIncludePath = Paths.get(ProjectConstants.MODULES_ROOT).resolve(destinationDirName)
.resolve(includePathRelativeToModuleRoot);
return updatedIncludePath;
}
return includePath;
}
/**
* Find the bala path for a given template.
*
* @param template template name
*/
static Path findBalaTemplate(String template, Path balaCache) {
String packageName = findPkgName(template);
String orgName = findOrg(template);
String version = findPkgVersion(template);
if (version != null) {
Path balaPath = getPlatformSpecificBalaPath(orgName, packageName, version, balaCache);
if (Files.exists(balaPath)) {
return balaPath;
} else {
return null;
}
} else {
return null;
}
}
public static void initPackageFromCentral(Path balaCache, Path projectPath, String packageName, String template,
List<Path> filesInDir) {
System.setProperty(CentralClientConstants.ENABLE_OUTPUT_STREAM, "true");
String templatePackageName = findPkgName(template);
String orgName = findOrg(template);
String version = findPkgVersion(template);
Path pkgCacheParent = balaCache.resolve(orgName).resolve(templatePackageName);
try {
pullPackageFromRemote(orgName, templatePackageName, version, pkgCacheParent);
} catch (PackageAlreadyExistsException e) {
if (version == null) {
List<PackageVersion> packageVersions = getPackageVersions(pkgCacheParent);
PackageVersion latest = findLatest(packageVersions);
if (latest == null) {
throw createLauncherException("unable to find package in the filesystem cache." +
" This is an unexpected error : " + e.getMessage());
}
version = latest.toString();
}
} catch (CentralClientException e) {
errStream.println("Warning: Unable to pull the package from Ballerina Central: " + e.getMessage());
if (findBalaTemplate(template, balaCache) == null) {
List<PackageVersion> packageVersions = getPackageVersions(pkgCacheParent);
PackageVersion latest = findLatest(packageVersions);
if (latest == null) {
throw createLauncherException("template not found in filesystem cache.");
}
version = latest.toString();
}
}
if (version == null) {
List<PackageVersion> packageVersions = getPackageVersions(pkgCacheParent);
PackageVersion latest = findLatest(packageVersions);
version = Objects.requireNonNull(latest).toString();
}
applyTemplate(orgName, templatePackageName, version, packageName, projectPath, balaCache, filesInDir);
}
private static void pullPackageFromRemote(String orgName, String packageName, String version, Path destination)
throws CentralClientException {
String supportedPlatform = Arrays.stream(JvmTarget.values())
.map(target -> target.code())
.collect(Collectors.joining(","));
Settings settings;
try {
settings = readSettings();
} catch (SettingsTomlException e) {
settings = Settings.from();
}
CentralAPIClient client = new CentralAPIClient(RepoUtils.getRemoteRepoURL(),
initializeProxy(settings.getProxy()), settings.getProxy().username(),
settings.getProxy().password(),
getAccessTokenOfCLI(settings), settings.getCentral().getConnectTimeout(),
settings.getCentral().getReadTimeout(), settings.getCentral().getWriteTimeout(),
settings.getCentral().getCallTimeout());
try {
client.pullPackage(orgName, packageName, version, destination, supportedPlatform,
RepoUtils.getBallerinaVersion(), false);
} catch (CentralClientException e) {
throw e;
}
}
public static void writeBallerinaToml(Path balTomlPath, PackageJson packageJson,
String packageName, String platform)
throws IOException {
Files.writeString(balTomlPath, "[package]", StandardOpenOption.APPEND);
Files.writeString(balTomlPath, "\norg = \"" + packageJson.getOrganization() + "\"",
StandardOpenOption.APPEND);
Files.writeString(balTomlPath, "\nname = \"" + packageName + "\"", StandardOpenOption.APPEND);
Files.writeString(balTomlPath, "\nversion = \"" + packageJson.getVersion() + "\"",
StandardOpenOption.APPEND);
List<String> newModuleNames = packageJson.getExport().stream().map(module ->
module.replaceFirst(packageJson.getName(), packageName)).collect(Collectors.toList());
StringJoiner stringJoiner = new StringJoiner(",");
for (String newModuleName : newModuleNames) {
stringJoiner.add("\"" + newModuleName + "\"");
}
Files.writeString(balTomlPath, "\nexport = [" + stringJoiner + "]"
.replaceFirst(packageJson.getName(), packageName), StandardOpenOption.APPEND);
Files.writeString(balTomlPath, "\ndistribution = \"" + packageJson.getBallerinaVersion()
+ "\"", StandardOpenOption.APPEND);
writePackageAttributeArray(balTomlPath, packageJson.getLicenses(), "license");
writePackageAttributeArray(balTomlPath, packageJson.getAuthors(), "authors");
writePackageAttributeArray(balTomlPath, packageJson.getKeywords(), "keywords");
writePackageAttributeValue(balTomlPath, packageJson.getSourceRepository(), "repository");
writePackageAttributeValue(balTomlPath, packageJson.getVisibility(), "visibility");
writePackageAttributeValue(balTomlPath, packageJson.getIcon(), "icon");
Files.writeString(balTomlPath, "\n\n[build-options]", StandardOpenOption.APPEND);
Files.writeString(balTomlPath, "\nobservabilityIncluded = true\n", StandardOpenOption.APPEND);
JsonArray platformLibraries = packageJson.getPlatformDependencies();
if (platformLibraries == null) {
return;
}
Files.writeString(balTomlPath, "\n[[platform." + platform + ".dependency]]", StandardOpenOption.APPEND);
for (Object dependencies : platformLibraries) {
JsonObject dependenciesObj = (JsonObject) dependencies;
if (null == dependenciesObj.get("scope")) {
String libPath = dependenciesObj.get("path").getAsString();
Path libName = Optional.of(Paths.get(libPath).getFileName()).get();
Path libRelPath = Paths.get("libs", libName.toString());
Files.writeString(balTomlPath, "\npath = \"" + libRelPath + "\"", StandardOpenOption.APPEND);
}
if (dependenciesObj.get("artifactId") != null) {
String artifactId = dependenciesObj.get("artifactId").getAsString();
Files.writeString(balTomlPath, "\nartifactId = \"" + artifactId + "\"",
StandardOpenOption.APPEND);
}
if (dependenciesObj.get("groupId") != null) {
String groupId = dependenciesObj.get("groupId").getAsString();
Files.writeString(balTomlPath, "\ngroupId = \"" + groupId + "\"", StandardOpenOption.APPEND);
}
if (dependenciesObj.get("version") != null) {
String dependencyVersion = dependenciesObj.get("version").getAsString();
Files.writeString(balTomlPath, "\nversion = \"" + dependencyVersion + "\"\n",
StandardOpenOption.APPEND);
}
if (null != dependenciesObj.get("scope") && dependenciesObj.get("scope").getAsString().equals("provided")) {
String scope = dependenciesObj.get("scope").getAsString();
Files.writeString(balTomlPath, "scope = \"" + scope + "\"\n",
StandardOpenOption.APPEND);
String artifactId = dependenciesObj.get("artifactId").getAsString();
printError(errStream,
"WARNING: path for the platform dependency " + artifactId + " with provided scope " +
"should be specified in the Ballerina.toml",
null,
false);
}
}
}
public static void writeDependenciesToml(Path projectPath, DependencyGraphJson templateDependencyGraphJson,
PackageJson templatePackageJson)
throws IOException {
Path depsTomlPath = projectPath.resolve(DEPENDENCIES_TOML);
String autoGenCode = "
"\n" +
"
"
"\n";
Files.writeString(depsTomlPath, autoGenCode, StandardOpenOption.APPEND);
String balTomlVersion = "[ballerina]\n" +
"dependencies-toml-version = \"" + ProjectConstants.DEPENDENCIES_TOML_VERSION + "\"\n" +
"\n";
Files.writeString(depsTomlPath, balTomlVersion, StandardOpenOption.APPEND);
List<ModuleDependency> currentPkgModules = new ArrayList<>();
for (ModuleDependency module : templateDependencyGraphJson.getModuleDependencies()) {
if (module.getOrg().equals(templatePackageJson.getOrganization())
&& module.getPackageName().equals(templatePackageJson.getName())) {
List<ModuleDependency> currentPkgModuleDeps = module.getDependencies();
currentPkgModules.addAll(currentPkgModuleDeps);
}
}
StringBuilder pkgDesc = new StringBuilder();
for (Dependency packageDependency : templateDependencyGraphJson.getPackageDependencyGraph()) {
if (templatePackageJson.getOrganization().equals(packageDependency.getOrg())
&& templatePackageJson.getName().equals(packageDependency.getName())) {
pkgDesc.append("[[package]]\n")
.append("org = \"").append(packageDependency.getOrg()).append("\"\n")
.append("name = \"").append(ProjectUtils.defaultName(projectPath)).append("\"\n")
.append("version = \"").append(packageDependency.getVersion()).append("\"\n");
pkgDesc.append(getDependenciesArrayContent(packageDependency));
pkgDesc.append(getDependencyModulesArrayContent(
templateDependencyGraphJson.getModuleDependencies(), true, projectPath));
} else {
pkgDesc.append("[[package]]\n")
.append("org = \"").append(packageDependency.getOrg()).append("\"\n")
.append("name = \"").append(packageDependency.getName()).append("\"\n")
.append("version = \"").append(packageDependency.getVersion()).append("\"\n");
pkgDesc.append(getDependenciesArrayContent(packageDependency));
List<ModuleDependency> packageDependencyModules = new ArrayList<>();
for (ModuleDependency module : currentPkgModules) {
if (packageDependency.getOrg().equals(module.getOrg())
&& packageDependency.getName().equals(module.getPackageName())) {
packageDependencyModules.add(module);
}
}
if (!packageDependencyModules.isEmpty()) {
pkgDesc.append(getDependencyModulesArrayContent(packageDependencyModules, false, projectPath));
}
}
pkgDesc.append("\n");
}
Files.writeString(depsTomlPath, pkgDesc.toString(), StandardOpenOption.APPEND);
}
public static Path getPlatformSpecificBalaPath(String orgName, String pkgName, String version,
Path balaCache) {
Path balaPath = balaCache.resolve(
ProjectUtils.getRelativeBalaPath(orgName, pkgName, version, null));
platform = ANY_PLATFORM;
if (!Files.exists(balaPath)) {
for (JvmTarget supportedPlatform : JvmTarget.values()) {
balaPath = balaCache.resolve(
ProjectUtils.getRelativeBalaPath(orgName, pkgName, version, supportedPlatform.code()));
if (Files.exists(balaPath)) {
platform = supportedPlatform.code();
break;
}
}
}
return balaPath;
}
/**
* Write to BalTool.toml file.
*
* @param balToolTomlPath path to BalTool.toml
* @param balToolJson Bal-tool.json content
*/
public static void writeBalToolToml(Path balToolTomlPath, BalToolJson balToolJson, String packageName)
throws IOException {
Files.writeString(balToolTomlPath, "[tool]", StandardOpenOption.APPEND);
Files.writeString(balToolTomlPath, "\nid = \"" + packageName + "\"\n",
StandardOpenOption.APPEND);
List<String> dependencyPaths = balToolJson.dependencyPaths();
StringBuilder dependenciesContent = new StringBuilder();
for (String dependencyPath: dependencyPaths) {
dependenciesContent.append("\n[[dependency]]\n").append("path = \"").append(dependencyPath).append("\"\n");
}
Files.writeString(balToolTomlPath, dependenciesContent.toString(), StandardOpenOption.APPEND);
}
/**
* Copy dependency jars to new package from template package.
*
* @param projectPath path to new project
* @param toolsLibPath Path to /tool/libs directory containing dependencies
*/
public static void copyToolDependencies(Path projectPath, Path toolsLibPath) throws IOException {
Path toolDirectory = projectPath.resolve(TOOL_DIR);
Files.createDirectory(toolDirectory);
Files.createDirectory(toolDirectory.resolve(LIBS_DIR));
Files.walkFileTree(toolsLibPath, new FileUtils.Copy(toolsLibPath, toolDirectory.resolve(LIBS_DIR)));
}
/**
* Get formatted dependencies array content for Dependencies.toml dependency.
*
* @param packageDependency package dependency
* @return formatted dependencies array content
*/
private static String getDependenciesArrayContent(Dependency packageDependency) {
StringBuilder dependenciesContent = new StringBuilder();
if (!packageDependency.getDependencies().isEmpty()) {
for (Dependency dependency : packageDependency.getDependencies()) {
dependenciesContent.append("\t{org = \"").append(dependency.getOrg())
.append("\", name = \"").append(dependency.getName())
.append("\"},\n");
}
String dependenciesPart = dependenciesContent.toString();
dependenciesPart = removeLastCharacter(trimStartingWhitespaces(dependenciesPart));
return "dependencies = [\n"
+ dependenciesPart
+ "\n]\n";
}
return "";
}
/**
* Get formatted modules array content for Dependencies.toml dependency.
* <code>
* modules = [
* {org = "ballerinax", packageName = "redis", moduleName = "redis"}
* ]
* </code>
*
* @param dependencyModules modules of the given dependency package
* @param isCurrentPackage is modules array generating for current package
* @param projectPath project path
* @return formatted modules array content
*/
private static String getDependencyModulesArrayContent(List<ModuleDependency> dependencyModules,
boolean isCurrentPackage, Path projectPath) {
StringBuilder modulesContent = new StringBuilder();
if (isCurrentPackage) {
for (ModuleDependency module : dependencyModules) {
String currentPkgName = ProjectUtils.defaultName(projectPath).value();
String modulePkgPart = module.getModuleName().split("\\.")[0];
String currentPkgModuleName = module.getModuleName().replaceFirst(modulePkgPart, currentPkgName);
modulesContent.append("\t{org = \"").append(module.getOrg())
.append("\", packageName = \"").append(currentPkgName)
.append("\", moduleName = \"").append(currentPkgModuleName)
.append("\"},\n");
}
} else {
for (ModuleDependency module : dependencyModules) {
modulesContent.append("\t{org = \"").append(module.getOrg())
.append("\", packageName = \"").append(module.getPackageName())
.append("\", moduleName = \"").append(module.getModuleName())
.append("\"},\n");
}
}
String modulesPart = modulesContent.toString();
modulesPart = removeLastCharacter(trimStartingWhitespaces(modulesPart));
return "modules = [\n" + modulesPart + "\n]\n";
}
/**
* Write Ballerina.toml package attribute array from template package.json to new project Ballerina.toml.
*
* @param balTomlPath Ballerina.toml path of the new project
* @param attributeArray package attribute values array
* @param attributeName package attribute name
* @throws IOException when error occurs writing to the Ballerina.toml
*/
private static void writePackageAttributeArray(Path balTomlPath, List<String> attributeArray, String attributeName)
throws IOException {
if (attributeArray != null && !attributeArray.isEmpty()) {
StringJoiner stringJoiner = new StringJoiner(",");
for (String attributeElement : attributeArray) {
stringJoiner.add("\"" + attributeElement + "\"");
}
Files.writeString(balTomlPath, "\n" + attributeName + " = [" + stringJoiner + "]",
StandardOpenOption.APPEND);
}
}
/**
* Write Ballerina.toml package attribute from template package.json to new project Ballerina.toml.
*
* @param balTomlPath Ballerina.toml path of the new project
* @param attributeValue package attribute value
* @param attributeName package attribute name
* @throws IOException when error occurs writing to the Ballerina.toml
*/
private static void writePackageAttributeValue(Path balTomlPath, String attributeValue, String attributeName)
throws IOException {
if (attributeValue != null && !attributeValue.isEmpty()) {
Files.writeString(balTomlPath, "\n" + attributeName + " = \"" + attributeValue + "\"",
StandardOpenOption.APPEND);
}
}
/**
* Find the package name for a given template.
*
* @param template template name
* @return packageName - package name of the module
*/
public static String findPkgName(String template) {
String[] orgSplit = template.split("/");
String packageName = "";
String packagePart = (orgSplit.length > 1) ? orgSplit[1] : "";
String[] pkgSplit = packagePart.split(":");
packageName = pkgSplit[0].trim();
return packageName;
}
/**
* Find the organization for a given template.
*
* @param template template name
* @return orgName - org of the module
*/
public static String findOrg(String template) {
String[] orgSplit = template.split("/");
return orgSplit[0].trim();
}
/**
* Find the package version for a given template.
*
* @param template template name
* @return version - version of the module
*/
public static String findPkgVersion(String template) {
String[] orgSplit = template.split("/");
String packagePart = (orgSplit.length > 1) ? orgSplit[1] : "";
String[] pkgSplit = packagePart.split(":");
if (pkgSplit.length > 1) {
return pkgSplit[1].trim();
} else {
return null;
}
}
/**
* Initialize a new ballerina project in the given path.
*
* @param path project path
* @param packageName name of the package
* @param template package template
* @param balFilesExist if bal files exist in the project
* @throws IOException If any IO exception occurred
* @throws URISyntaxException If any URISyntaxException occurred
*/
public static void initPackageByTemplate(Path path, String packageName, String template, boolean balFilesExist)
throws IOException, URISyntaxException {
applyTemplate(path, template, balFilesExist);
if (template.equalsIgnoreCase(LIB_DIR)) {
initLibPackage(path, packageName);
Path source = path.resolve("lib.bal");
Files.move(source, source.resolveSibling(guessPkgName(packageName, template) + ".bal"),
StandardCopyOption.REPLACE_EXISTING);
} else if (template.equalsIgnoreCase(TOOL_DIR)) {
initToolPackage(path, packageName);
} else {
initPackage(path, packageName);
}
createDefaultGitignore(path);
createDefaultDevContainer(path);
}
private static void createDefaultGitignore(Path path) throws IOException {
Path gitignore = path.resolve(ProjectConstants.GITIGNORE_FILE_NAME);
if (Files.notExists(gitignore)) {
Files.createFile(gitignore);
}
if (Files.size(gitignore) == 0) {
String defaultGitignore = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + GITIGNORE);
Files.write(gitignore, defaultGitignore.getBytes(StandardCharsets.UTF_8));
}
}
private static void createDefaultDevContainer(Path path) throws IOException {
Path devContainer = path.resolve(ProjectConstants.DEVCONTAINER);
if (Files.notExists(devContainer)) {
Files.createFile(devContainer);
}
if (Files.size(devContainer) == 0) {
String defaultDevContainer = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + DEVCONTAINER);
defaultDevContainer = defaultDevContainer.replace("latest", RepoUtils.getBallerinaVersion());
Files.write(devContainer, defaultDevContainer.getBytes(StandardCharsets.UTF_8));
}
}
/**
* Get the list of templates.
*
* @return list of templates
*/
public static List<String> getTemplates() {
try {
Path templateDir = getTemplatePath();
Stream<Path> walk = Files.walk(templateDir, 1);
List<String> templates = walk.filter(Files::isDirectory)
.filter(directory -> !templateDir.equals(directory))
.filter(directory -> directory.getFileName() != null)
.map(directory -> directory.getFileName())
.map(fileName -> fileName.toString())
.collect(Collectors.toList());
if (null != jarFs) {
return templates.stream().map(t -> t
.replace(jarFs.getSeparator(), ""))
.collect(Collectors.toList());
} else {
return templates;
}
} catch (IOException | URISyntaxException e) {
return new ArrayList<String>();
}
}
/**
* Get the path to the given template.
*
* @return path of the given template
* @throws URISyntaxException if any URISyntaxException occured
*/
private static Path getTemplatePath() throws URISyntaxException {
URI uri = CommandUtil.class.getClassLoader().getResource(CREATE_CMD_TEMPLATES).toURI();
if (uri.toString().contains("!")) {
final String[] array = uri.toString().split("!");
return jarFs.getPath(array[1]);
} else {
return Paths.get(uri);
}
}
/**
* Apply the template to the created module.
*
* @param modulePath path to the module
* @param template template name
* @param balFilesExist if bal files exist in the project
* @throws IOException if any IOException occurred
* @throws URISyntaxException if any URISyntaxException occurred
*/
public static void applyTemplate(Path modulePath, String template, boolean balFilesExist)
throws IOException, URISyntaxException {
Path templateDir = getTemplatePath().resolve(template);
if (template.equalsIgnoreCase(MAIN_TEMPLATE)) {
templateDir = getTemplatePath().resolve(DEFAULT_TEMPLATE);
Path tempDirTest = getTemplatePath().resolve(MAIN_TEMPLATE);
Files.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath));
Files.walkFileTree(tempDirTest, new FileUtils.Copy(tempDirTest, modulePath));
} else if (template.equalsIgnoreCase(DEFAULT_TEMPLATE)) {
if (!balFilesExist) {
Files.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath));
}
} else {
Files.walkFileTree(templateDir, new FileUtils.Copy(templateDir, modulePath));
}
}
/**
* Initialize a new ballerina project in the given path.
*
* @param path Project path
* @throws IOException If any IO exception occurred
*/
public static void initPackage(Path path, String packageName) throws IOException {
Path ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML);
Files.createFile(ballerinaToml);
String defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-app.toml");
defaultManifest = defaultManifest
.replaceAll(ORG_NAME, ProjectUtils.guessOrgName())
.replaceAll(PKG_NAME, guessPkgName(packageName, "app"))
.replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion());
Files.write(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8));
}
private static void initLibPackage(Path path, String packageName) throws IOException {
Path ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML);
Files.createFile(ballerinaToml);
String defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-lib.toml");
defaultManifest = defaultManifest.replaceAll(ORG_NAME, ProjectUtils.guessOrgName())
.replaceAll(PKG_NAME, guessPkgName(packageName, "lib"))
.replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion());
write(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8));
String packageMd = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/Package.md");
write(path.resolve(ProjectConstants.PACKAGE_MD_FILE_NAME), packageMd.getBytes(StandardCharsets.UTF_8));
}
/**
* Initialize a new ballerina tool package in the given path.
*
* @param path Project path
* @param packageName package name
* @throws IOException If any IO exception occurred
*/
private static void initToolPackage(Path path, String packageName) throws IOException {
Path ballerinaToml = path.resolve(ProjectConstants.BALLERINA_TOML);
Files.createFile(ballerinaToml);
String defaultManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-app.toml");
defaultManifest = defaultManifest
.replaceAll(ORG_NAME, ProjectUtils.guessOrgName())
.replaceAll(PKG_NAME, guessPkgName(packageName, TOOL_DIR))
.replaceAll(DIST_VERSION, RepoUtils.getBallerinaShortVersion());
Files.write(ballerinaToml, defaultManifest.getBytes(StandardCharsets.UTF_8));
Path balToolToml = path.resolve(ProjectConstants.BAL_TOOL_TOML);
Files.createFile(balToolToml);
String balToolManifest = FileUtils.readFileAsString(NEW_CMD_DEFAULTS + "/" + "manifest-tool.toml");
balToolManifest = balToolManifest.replaceAll(TOOL_ID, guessPkgName(packageName, TOOL_DIR));
write(balToolToml, balToolManifest.getBytes(StandardCharsets.UTF_8));
}
protected static PackageVersion findLatest(List<PackageVersion> packageVersions) {
if (packageVersions.isEmpty()) {
return null;
}
PackageVersion latestVersion = packageVersions.get(0);
for (PackageVersion pkgVersion : packageVersions) {
latestVersion = getLatest(latestVersion, pkgVersion);
}
return latestVersion;
}
protected static PackageVersion getLatest(PackageVersion v1, PackageVersion v2) {
SemanticVersion semVer1 = v1.value();
SemanticVersion semVer2 = v2.value();
boolean isV1PreReleaseVersion = semVer1.isPreReleaseVersion();
boolean isV2PreReleaseVersion = semVer2.isPreReleaseVersion();
if (isV1PreReleaseVersion ^ isV2PreReleaseVersion) {
return isV1PreReleaseVersion ? v2 : v1;
} else {
return semVer1.greaterThanOrEqualTo(semVer2) ? v1 : v2;
}
}
public static List<PackageVersion> getPackageVersions(Path balaPackagePath) {
List<Path> versions = new ArrayList<>();
if (Files.exists(balaPackagePath)) {
Stream<Path> collectVersions;
try {
collectVersions = Files.list(balaPackagePath);
} catch (IOException e) {
throw new RuntimeException("Error while accessing Distribution cache: " + e.getMessage());
}
versions.addAll(collectVersions.collect(Collectors.toList()));
}
return pathToVersions(versions);
}
protected static List<PackageVersion> pathToVersions(List<Path> versions) {
List<PackageVersion> availableVersions = new ArrayList<>();
versions.stream().map(path -> Optional.ofNullable(path)
.map(Path::getFileName)
.map(Path::toString)
.orElse("0.0.0")).forEach(version -> {
try {
availableVersions.add(PackageVersion.from(version));
} catch (ProjectException ignored) {
}
});
return availableVersions;
}
/**
* Remove starting whitespaces of a string.
*
* @param str given string
* @return starting whitespaces removed string
*/
private static String trimStartingWhitespaces(String str) {
return str.replaceFirst("\\s++$", "");
}
/**
* Remove last character of a string.
*
* @param str given string
* @return last character removed string
*/
private static String removeLastCharacter(String str) {
return str.substring(0, str.length() - 1);
}
/**
* Check if files of the given template exist in a given path.
*
* @param template given string
* @param packagePath given path
* @throws URISyntaxException if URI syntax exception occurred
* @throws IOException if IO exception occurred
*/
public static String checkTemplateFilesExists(String template, Path packagePath) throws URISyntaxException,
IOException {
Path templateDir = getTemplatePath().resolve(template);
Stream<Path> paths = Files.list(templateDir);
List<Path> templateFilePathList = paths.collect(Collectors.toList());
StringBuilder existingFiles = new StringBuilder();
for (Path path : templateFilePathList) {
Optional<String> fileNameOptional = Optional.ofNullable(path.getFileName()).map(path1 -> path1.toString());
if (fileNameOptional.isPresent()) {
String fileName = fileNameOptional.get();
if (!fileName.endsWith(ProjectConstants.BLANG_SOURCE_EXT) &&
Files.exists(packagePath.resolve(fileName))) {
existingFiles.append(fileName).append(FILE_STRING_SEPARATOR);
}
}
}
return existingFiles.toString();
}
/**
* Check if common files of a package exist in a given path.
*
* @param packagePath given path
*/
public static String checkPackageFilesExists(Path packagePath) {
String[] packageFiles = {DEPENDENCIES_TOML, BAL_TOOL_TOML, ProjectConstants.PACKAGE_MD_FILE_NAME,
ProjectConstants.MODULE_MD_FILE_NAME, ProjectConstants.MODULES_ROOT, ProjectConstants.TEST_DIR_NAME};
StringBuilder existingFiles = new StringBuilder();
for (String file : packageFiles) {
if (Files.exists(packagePath.resolve(file))) {
existingFiles.append(file).append(FILE_STRING_SEPARATOR);
}
}
return existingFiles.toString();
}
/**
* Check if .bal files exist in a given path.
*
* @param packagePath given path
* @return error message if files exists
*/
public static boolean balFilesExists(Path packagePath) throws IOException {
return Files.list(packagePath).anyMatch(path -> path.toString().endsWith(ProjectConstants.BLANG_SOURCE_EXT));
}
/**
* Get the latest version from a given list of versions.
*
* @param versions the list of strings
* @return the latest version
*/
static String getLatestVersion(List<String> versions) {
String latestVersion = versions.get(0);
for (String version : versions) {
if (SemanticVersion.from(version).greaterThan(SemanticVersion.from(latestVersion))) {
latestVersion = version;
}
}
return latestVersion;
}
/**
* Pull the dependencies of a given package from central.
*
* @param orgName org name of the dependent package
* @param packageName name of the dependent package
* @param version version of the dependent package
* @return true if the dependent package compilation has errors
*/
private static void printDiagnostics(Collection<Diagnostic> diagnostics) {
for (Diagnostic diagnostic: diagnostics) {
CommandUtil.printError(errStream, diagnostic.toString(), null, false);
}
}
private static boolean hasProvidedPlatformDeps(PackageCompilation packageCompilation) {
Set<Object> providedDeps = new HashSet<>();
packageCompilation.getResolution().allDependencies()
.stream()
.map(ResolvedPackageDependency::packageInstance)
.map(Package::manifest)
.flatMap(pkgManifest -> pkgManifest.platforms().values().stream())
.filter(Objects::nonNull)
.flatMap(pkgPlatform -> pkgPlatform.dependencies().stream())
.filter(dependency -> "provided".equals(dependency.get("scope")))
.forEach(providedDeps::add);
return !providedDeps.isEmpty();
}
} |
It might also be better for the new constructor to just take a long and we calculate the share usage in bytes in the constructor. | private Response<ShareStatistics> mapGetStatisticsResponse(SharesGetStatisticsResponse response) {
ShareStatistics shareStatistics =
new ShareStatistics((int) (response.getValue().getShareUsageBytes() / (Constants.GB)),
response.getValue().getShareUsageBytes());
return new SimpleResponse<>(response, shareStatistics);
} | new ShareStatistics((int) (response.getValue().getShareUsageBytes() / (Constants.GB)), | private Response<ShareStatistics> mapGetStatisticsResponse(SharesGetStatisticsResponse response) {
ShareStatistics shareStatistics =
new ShareStatistics(response.getValue().getShareUsageBytes());
return new SimpleResponse<>(response, shareStatistics);
} | class ShareAsyncClient {
private final ClientLogger logger = new ClientLogger(ShareAsyncClient.class);
private final AzureFileStorageImpl azureFileStorageClient;
private final String shareName;
private final String snapshot;
private final String accountName;
private final ShareServiceVersion serviceVersion;
/**
* Creates a ShareAsyncClient that sends requests to the storage share at {@link AzureFileStorageImpl
* endpoint}. Each service call goes through the {@link HttpPipeline pipeline} in the
* {@code azureFileStorageClient}.
*
* @param client Client that interacts with the service interfaces
* @param shareName Name of the share
*/
ShareAsyncClient(AzureFileStorageImpl client, String shareName, String snapshot, String accountName,
ShareServiceVersion serviceVersion) {
Objects.requireNonNull(shareName, "'shareName' cannot be null.");
this.shareName = shareName;
this.snapshot = snapshot;
this.accountName = accountName;
this.azureFileStorageClient = client;
this.serviceVersion = serviceVersion;
}
/**
* Get the url of the storage share client.
*
* @return the url of the Storage Share.
*/
public String getShareUrl() {
StringBuilder shareUrlString = new StringBuilder(azureFileStorageClient.getUrl()).append("/").append(shareName);
if (snapshot != null) {
shareUrlString.append("?snapshot=").append(snapshot);
}
return shareUrlString.toString();
}
/**
* Gets the service version the client is using.
*
* @return the service version the client is using.
*/
public ShareServiceVersion getServiceVersion() {
return serviceVersion;
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @return a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share
*/
public ShareDirectoryAsyncClient getRootDirectoryClient() {
return getDirectoryClient("");
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the specified directory.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @param directoryName Name of the directory
* @return a {@link ShareDirectoryAsyncClient} that interacts with the directory in the share
*/
public ShareDirectoryAsyncClient getDirectoryClient(String directoryName) {
return new ShareDirectoryAsyncClient(azureFileStorageClient, shareName, directoryName, snapshot, accountName,
serviceVersion);
}
/**
* Constructs a {@link ShareFileAsyncClient} that interacts with the specified file.
*
* <p>If the file doesn't exist in the share {@link ShareFileAsyncClient | class ShareAsyncClient {
private final ClientLogger logger = new ClientLogger(ShareAsyncClient.class);
private final AzureFileStorageImpl azureFileStorageClient;
private final String shareName;
private final String snapshot;
private final String accountName;
private final ShareServiceVersion serviceVersion;
/**
* Creates a ShareAsyncClient that sends requests to the storage share at {@link AzureFileStorageImpl
* endpoint}. Each service call goes through the {@link HttpPipeline pipeline} in the
* {@code azureFileStorageClient}.
*
* @param client Client that interacts with the service interfaces
* @param shareName Name of the share
*/
ShareAsyncClient(AzureFileStorageImpl client, String shareName, String snapshot, String accountName,
ShareServiceVersion serviceVersion) {
Objects.requireNonNull(shareName, "'shareName' cannot be null.");
this.shareName = shareName;
this.snapshot = snapshot;
this.accountName = accountName;
this.azureFileStorageClient = client;
this.serviceVersion = serviceVersion;
}
/**
* Get the url of the storage share client.
*
* @return the url of the Storage Share.
*/
public String getShareUrl() {
StringBuilder shareUrlString = new StringBuilder(azureFileStorageClient.getUrl()).append("/").append(shareName);
if (snapshot != null) {
shareUrlString.append("?snapshot=").append(snapshot);
}
return shareUrlString.toString();
}
/**
* Gets the service version the client is using.
*
* @return the service version the client is using.
*/
public ShareServiceVersion getServiceVersion() {
return serviceVersion;
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @return a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share
*/
public ShareDirectoryAsyncClient getRootDirectoryClient() {
return getDirectoryClient("");
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the specified directory.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @param directoryName Name of the directory
* @return a {@link ShareDirectoryAsyncClient} that interacts with the directory in the share
*/
public ShareDirectoryAsyncClient getDirectoryClient(String directoryName) {
return new ShareDirectoryAsyncClient(azureFileStorageClient, shareName, directoryName, snapshot, accountName,
serviceVersion);
}
/**
* Constructs a {@link ShareFileAsyncClient} that interacts with the specified file.
*
* <p>If the file doesn't exist in the share {@link ShareFileAsyncClient |
> `#maybeCreate` already handles potential existing one. That's the "maybe" part. This code path is hit multiple times but if this code is allowed to run multiple times it will create duplicate sets of metadata. > TBH, I'm not sure why these get registered instead of created anyway. register and create do the same thing, but create is eager whereas register is deferred. Deferring doesn't buy us much yet as Quarkus digs into these Configurations almost immediately right now. | private void setUpDeploymentConfiguration() {
if (project.getConfigurations().findByName(this.deploymentConfigurationName) == null) {
project.getConfigurations().register(this.deploymentConfigurationName, configuration -> {
Configuration enforcedPlatforms = this.getPlatformConfiguration();
configuration.extendsFrom(enforcedPlatforms);
configuration.getDependencies().addAllLater(project.provider(() -> {
ConditionalDependenciesEnabler cdEnabler =
new ConditionalDependenciesEnabler(project, mode, enforcedPlatforms);
final Collection<ExtensionDependency> allExtensions = cdEnabler.getAllExtensions();
Set<ExtensionDependency> extensions = collectFirstMetQuarkusExtensions(getRawRuntimeConfiguration(), allExtensions);
for (ExtensionDependency knownExtension : allExtensions) {
if (knownExtension.isConditional()) {
extensions.add(knownExtension);
}
}
final Set<ModuleVersionIdentifier> alreadyProcessed = new HashSet<>(extensions.size());
final DependencyHandler dependencies = project.getDependencies();
final Set<Dependency> deploymentDependencies = new HashSet<>();
for (ExtensionDependency extension : extensions) {
if (extension instanceof LocalExtensionDependency) {
LocalExtensionDependency localExtensionDependency = (LocalExtensionDependency) extension;
deploymentDependencies.add(
dependencies.project(Collections.singletonMap("path", localExtensionDependency.findDeploymentModulePath())));
} else {
if (!alreadyProcessed.add(extension.getExtensionId())) {
continue;
}
deploymentDependencies.add(dependencies.create(
extension.getDeploymentModule().getGroupId() + ":"
+ extension.getDeploymentModule().getArtifactId() + ":"
+ extension.getDeploymentModule().getVersion()));
}
}
return deploymentDependencies;
}));
});
}
} | project.getConfigurations().register(this.deploymentConfigurationName, configuration -> { | private void setUpDeploymentConfiguration() {
if (project.getConfigurations().findByName(this.deploymentConfigurationName) == null) {
project.getConfigurations().create(this.deploymentConfigurationName, configuration -> {
Configuration enforcedPlatforms = this.getPlatformConfiguration();
configuration.extendsFrom(enforcedPlatforms);
configuration.getDependencies().addAllLater(project.provider(() -> {
ConditionalDependenciesEnabler cdEnabler = new ConditionalDependenciesEnabler(project, mode,
enforcedPlatforms);
final Collection<ExtensionDependency> allExtensions = cdEnabler.getAllExtensions();
Set<ExtensionDependency> extensions = collectFirstMetQuarkusExtensions(getRawRuntimeConfiguration(),
allExtensions);
for (ExtensionDependency knownExtension : allExtensions) {
if (knownExtension.isConditional()) {
extensions.add(knownExtension);
}
}
final Set<ModuleVersionIdentifier> alreadyProcessed = new HashSet<>(extensions.size());
final DependencyHandler dependencies = project.getDependencies();
final Set<Dependency> deploymentDependencies = new HashSet<>();
for (ExtensionDependency extension : extensions) {
if (extension instanceof LocalExtensionDependency) {
LocalExtensionDependency localExtensionDependency = (LocalExtensionDependency) extension;
deploymentDependencies.add(
dependencies.project(Collections.singletonMap("path",
localExtensionDependency.findDeploymentModulePath())));
} else {
if (!alreadyProcessed.add(extension.getExtensionId())) {
continue;
}
deploymentDependencies.add(dependencies.create(
extension.getDeploymentModule().getGroupId() + ":"
+ extension.getDeploymentModule().getArtifactId() + ":"
+ extension.getDeploymentModule().getVersion()));
}
}
return deploymentDependencies;
}));
});
}
} | class ApplicationDeploymentClasspathBuilder {
private static String getRuntimeConfigName(LaunchMode mode, boolean base) {
final StringBuilder sb = new StringBuilder();
sb.append("quarkus");
if (mode == LaunchMode.DEVELOPMENT) {
sb.append("Dev");
} else if (mode == LaunchMode.TEST) {
sb.append("Test");
} else {
sb.append("Prod");
}
if (base) {
sb.append("Base");
}
sb.append("RuntimeClasspathConfiguration");
return sb.toString();
}
public static String getBaseRuntimeConfigName(LaunchMode mode) {
return getRuntimeConfigName(mode, true);
}
public static String getFinalRuntimeConfigName(LaunchMode mode) {
return getRuntimeConfigName(mode, false);
}
public static void initConfigurations(Project project) {
final ConfigurationContainer configContainer = project.getConfigurations();
configContainer.create(ToolingUtils.DEV_MODE_CONFIGURATION_NAME)
.extendsFrom(configContainer.getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME));
configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.TEST))
.extendsFrom(configContainer.getByName(JavaPlugin.TEST_RUNTIME_CLASSPATH_CONFIGURATION_NAME));
configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.NORMAL))
.extendsFrom(configContainer.getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME));
configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.DEVELOPMENT))
.extendsFrom(
configContainer.getByName(ToolingUtils.DEV_MODE_CONFIGURATION_NAME),
configContainer.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME),
configContainer.getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME));
configContainer.getByName(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME)
.withDependencies(annotationProcessors -> {
Set<ResolvedArtifact> compileClasspathArtifacts = DependencyUtils
.duplicateConfiguration(project, configContainer
.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME))
.getResolvedConfiguration()
.getResolvedArtifacts();
for (ResolvedArtifact artifact : compileClasspathArtifacts) {
if ("quarkus-panache-common".equals(artifact.getName())
&& "io.quarkus".equals(artifact.getModuleVersion().getId().getGroup())) {
project.getDependencies().add(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME,
"io.quarkus:quarkus-panache-common:" + artifact.getModuleVersion().getId().getVersion());
}
}
});
}
private final Project project;
private final LaunchMode mode;
private final String runtimeConfigurationName;
private final String platformConfigurationName;
private final String deploymentConfigurationName;
/**
* The platform configuration updates the PlatformImports, but since the PlatformImports don't
* have a place to be stored in the project, they're stored here. The way that extensions are
* tracked and conditional dependencies needs some attention, which will likely resolve this.
*/
private static final HashMap<String, PlatformImportsImpl> platformImports = new HashMap<>();
/**
* The key used to look up the correct PlatformImports that matches the platformConfigurationName
*/
private final String platformImportName;
public ApplicationDeploymentClasspathBuilder(Project project, LaunchMode mode) {
this.project = project;
this.mode = mode;
this.runtimeConfigurationName = getFinalRuntimeConfigName(mode);
this.platformConfigurationName = ToolingUtils.toPlatformConfigurationName(this.runtimeConfigurationName);
this.deploymentConfigurationName = ToolingUtils.toDeploymentConfigurationName(this.runtimeConfigurationName);
this.platformImportName = project.getPath() + ":" + this.platformConfigurationName;
setUpPlatformConfiguration();
setUpRuntimeConfiguration();
setUpDeploymentConfiguration();
}
private void setUpPlatformConfiguration() {
if (project.getConfigurations().findByName(this.platformConfigurationName) == null) {
PlatformImportsImpl platformImports =
ApplicationDeploymentClasspathBuilder.platformImports.computeIfAbsent(this.platformImportName, (ignored) -> new PlatformImportsImpl());
project.getConfigurations().register(this.platformConfigurationName, configuration -> {
configuration.getDependencies().addAllLater(project.provider(() ->
project.getConfigurations()
.getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME)
.getAllDependencies()
.stream()
.filter(dependency ->
dependency instanceof ModuleDependency &&
ToolingUtils.isEnforcedPlatform((ModuleDependency) dependency))
.collect(Collectors.toList())
));
configuration.getResolutionStrategy().eachDependency(d -> {
ModuleIdentifier identifier = d.getTarget().getModule();
final String group = identifier.getGroup();
final String name = identifier.getName();
if (name.endsWith(BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX)) {
platformImports.addPlatformDescriptor(group, name, d.getTarget().getVersion(), "json",
d.getTarget().getVersion());
} else if (name.endsWith(BootstrapConstants.PLATFORM_PROPERTIES_ARTIFACT_ID_SUFFIX)) {
final DefaultDependencyArtifact dep = new DefaultDependencyArtifact();
dep.setExtension("properties");
dep.setType("properties");
dep.setName(name);
final DefaultExternalModuleDependency gradleDep = new DefaultExternalModuleDependency(
group, name, d.getTarget().getVersion(), null);
gradleDep.addArtifact(dep);
for (ResolvedArtifact a : project.getConfigurations().detachedConfiguration(gradleDep)
.getResolvedConfiguration().getResolvedArtifacts()) {
if (a.getName().equals(name)) {
try {
platformImports.addPlatformProperties(group, name, null, "properties", d.getTarget().getVersion(),
a.getFile().toPath());
} catch (AppModelResolverException e) {
throw new GradleException("Failed to import platform properties " + a.getFile(), e);
}
break;
}
}
}
});
});
}
}
private void setUpRuntimeConfiguration() {
if (project.getConfigurations().findByName(this.runtimeConfigurationName) == null) {
project.getConfigurations().register(this.runtimeConfigurationName, configuration ->
configuration.extendsFrom(
project.getConfigurations()
.getByName(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(mode))));
}
}
public Configuration getPlatformConfiguration() {
return project.getConfigurations().getByName(this.platformConfigurationName);
}
private Configuration getRawRuntimeConfiguration() {
return project.getConfigurations().getByName(this.runtimeConfigurationName);
}
/**
* Forces deployment configuration to resolve to discover conditional dependencies.
*/
public Configuration getRuntimeConfiguration() {
this.getDeploymentConfiguration().resolve();
return project.getConfigurations().getByName(this.runtimeConfigurationName);
}
public Configuration getDeploymentConfiguration() {
return project.getConfigurations().getByName(this.deploymentConfigurationName);
}
/**
* Forces the platform configuration to resolve and then uses that to populate platform imports.
*/
public PlatformImports getPlatformImports() {
this.getPlatformConfiguration().getResolvedConfiguration();
return platformImports.get(this.platformImportName);
}
private Set<ExtensionDependency> collectFirstMetQuarkusExtensions(Configuration configuration,
Collection<ExtensionDependency> knownExtensions) {
Set<ExtensionDependency> firstLevelExtensions = new HashSet<>();
Set<ResolvedDependency> firstLevelModuleDependencies = configuration.getResolvedConfiguration()
.getFirstLevelModuleDependencies();
Set<String> visitedArtifacts = new HashSet<>();
for (ResolvedDependency firstLevelModuleDependency : firstLevelModuleDependencies) {
firstLevelExtensions
.addAll(collectQuarkusExtensions(firstLevelModuleDependency, visitedArtifacts, knownExtensions));
}
return firstLevelExtensions;
}
private Set<ExtensionDependency> collectQuarkusExtensions(ResolvedDependency dependency, Set<String> visitedArtifacts,
Collection<ExtensionDependency> knownExtensions) {
String artifactKey = String.format("%s:%s", dependency.getModuleGroup(), dependency.getModuleName());
if (!visitedArtifacts.add(artifactKey)) {
return Collections.emptySet();
}
Set<ExtensionDependency> extensions = new LinkedHashSet<>();
ExtensionDependency extension = getExtensionOrNull(dependency.getModuleGroup(), dependency.getModuleName(),
dependency.getModuleVersion(), knownExtensions);
if (extension != null) {
extensions.add(extension);
} else {
for (ResolvedDependency child : dependency.getChildren()) {
extensions.addAll(collectQuarkusExtensions(child, visitedArtifacts, knownExtensions));
}
}
return extensions;
}
private ExtensionDependency getExtensionOrNull(String group, String artifact, String version,
Collection<ExtensionDependency> knownExtensions) {
for (ExtensionDependency knownExtension : knownExtensions) {
if (group.equals(knownExtension.getGroup()) && artifact.equals(knownExtension.getName())
&& version.equals(knownExtension.getVersion())) {
return knownExtension;
}
}
return null;
}
} | class ApplicationDeploymentClasspathBuilder {
private static String getRuntimeConfigName(LaunchMode mode, boolean base) {
final StringBuilder sb = new StringBuilder();
sb.append("quarkus");
if (mode == LaunchMode.DEVELOPMENT) {
sb.append("Dev");
} else if (mode == LaunchMode.TEST) {
sb.append("Test");
} else {
sb.append("Prod");
}
if (base) {
sb.append("Base");
}
sb.append("RuntimeClasspathConfiguration");
return sb.toString();
}
public static String getBaseRuntimeConfigName(LaunchMode mode) {
return getRuntimeConfigName(mode, true);
}
public static String getFinalRuntimeConfigName(LaunchMode mode) {
return getRuntimeConfigName(mode, false);
}
public static void initConfigurations(Project project) {
final ConfigurationContainer configContainer = project.getConfigurations();
configContainer.create(ToolingUtils.DEV_MODE_CONFIGURATION_NAME)
.extendsFrom(configContainer.getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME));
configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.TEST))
.extendsFrom(configContainer.getByName(JavaPlugin.TEST_RUNTIME_CLASSPATH_CONFIGURATION_NAME));
configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.NORMAL))
.extendsFrom(configContainer.getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME));
configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.DEVELOPMENT))
.extendsFrom(
configContainer.getByName(ToolingUtils.DEV_MODE_CONFIGURATION_NAME),
configContainer.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME),
configContainer.getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME));
configContainer.getByName(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME)
.withDependencies(annotationProcessors -> {
Set<ResolvedArtifact> compileClasspathArtifacts = DependencyUtils
.duplicateConfiguration(project, configContainer
.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME))
.getResolvedConfiguration()
.getResolvedArtifacts();
for (ResolvedArtifact artifact : compileClasspathArtifacts) {
if ("quarkus-panache-common".equals(artifact.getName())
&& "io.quarkus".equals(artifact.getModuleVersion().getId().getGroup())) {
project.getDependencies().add(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME,
"io.quarkus:quarkus-panache-common:" + artifact.getModuleVersion().getId().getVersion());
}
}
});
}
private final Project project;
private final LaunchMode mode;
private final String runtimeConfigurationName;
private final String platformConfigurationName;
private final String deploymentConfigurationName;
/**
* The platform configuration updates the PlatformImports, but since the PlatformImports don't
* have a place to be stored in the project, they're stored here. The way that extensions are
* tracked and conditional dependencies needs some attention, which will likely resolve this.
*/
private static final HashMap<String, PlatformImportsImpl> platformImports = new HashMap<>();
/**
* The key used to look up the correct PlatformImports that matches the platformConfigurationName
*/
private final String platformImportName;
public ApplicationDeploymentClasspathBuilder(Project project, LaunchMode mode) {
this.project = project;
this.mode = mode;
this.runtimeConfigurationName = getFinalRuntimeConfigName(mode);
this.platformConfigurationName = ToolingUtils.toPlatformConfigurationName(this.runtimeConfigurationName);
this.deploymentConfigurationName = ToolingUtils.toDeploymentConfigurationName(this.runtimeConfigurationName);
this.platformImportName = project.getPath() + ":" + this.platformConfigurationName;
setUpPlatformConfiguration();
setUpRuntimeConfiguration();
setUpDeploymentConfiguration();
}
private void setUpPlatformConfiguration() {
if (project.getConfigurations().findByName(this.platformConfigurationName) == null) {
PlatformImportsImpl platformImports = ApplicationDeploymentClasspathBuilder.platformImports
.computeIfAbsent(this.platformImportName, (ignored) -> new PlatformImportsImpl());
project.getConfigurations().create(this.platformConfigurationName, configuration -> {
configuration.getDependencies().addAllLater(project.provider(() -> project.getConfigurations()
.getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME)
.getAllDependencies()
.stream()
.filter(dependency -> dependency instanceof ModuleDependency &&
ToolingUtils.isEnforcedPlatform((ModuleDependency) dependency))
.collect(Collectors.toList())));
configuration.getResolutionStrategy().eachDependency(d -> {
ModuleIdentifier identifier = d.getTarget().getModule();
final String group = identifier.getGroup();
final String name = identifier.getName();
if (name.endsWith(BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX)) {
platformImports.addPlatformDescriptor(group, name, d.getTarget().getVersion(), "json",
d.getTarget().getVersion());
} else if (name.endsWith(BootstrapConstants.PLATFORM_PROPERTIES_ARTIFACT_ID_SUFFIX)) {
final DefaultDependencyArtifact dep = new DefaultDependencyArtifact();
dep.setExtension("properties");
dep.setType("properties");
dep.setName(name);
final DefaultExternalModuleDependency gradleDep = new DefaultExternalModuleDependency(
group, name, d.getTarget().getVersion(), null);
gradleDep.addArtifact(dep);
for (ResolvedArtifact a : project.getConfigurations().detachedConfiguration(gradleDep)
.getResolvedConfiguration().getResolvedArtifacts()) {
if (a.getName().equals(name)) {
try {
platformImports.addPlatformProperties(group, name, null, "properties",
d.getTarget().getVersion(),
a.getFile().toPath());
} catch (AppModelResolverException e) {
throw new GradleException("Failed to import platform properties " + a.getFile(), e);
}
break;
}
}
}
});
});
}
}
private void setUpRuntimeConfiguration() {
if (project.getConfigurations().findByName(this.runtimeConfigurationName) == null) {
project.getConfigurations().create(this.runtimeConfigurationName, configuration -> configuration.extendsFrom(
project.getConfigurations()
.getByName(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(mode))));
}
}
public Configuration getPlatformConfiguration() {
return project.getConfigurations().getByName(this.platformConfigurationName);
}
private Configuration getRawRuntimeConfiguration() {
return project.getConfigurations().getByName(this.runtimeConfigurationName);
}
/**
* Forces deployment configuration to resolve to discover conditional dependencies.
*/
public Configuration getRuntimeConfiguration() {
this.getDeploymentConfiguration().resolve();
return project.getConfigurations().getByName(this.runtimeConfigurationName);
}
public Configuration getDeploymentConfiguration() {
return project.getConfigurations().getByName(this.deploymentConfigurationName);
}
/**
* Forces the platform configuration to resolve and then uses that to populate platform imports.
*/
public PlatformImports getPlatformImports() {
this.getPlatformConfiguration().getResolvedConfiguration();
return platformImports.get(this.platformImportName);
}
private Set<ExtensionDependency> collectFirstMetQuarkusExtensions(Configuration configuration,
Collection<ExtensionDependency> knownExtensions) {
Set<ExtensionDependency> firstLevelExtensions = new HashSet<>();
Set<ResolvedDependency> firstLevelModuleDependencies = configuration.getResolvedConfiguration()
.getFirstLevelModuleDependencies();
Set<String> visitedArtifacts = new HashSet<>();
for (ResolvedDependency firstLevelModuleDependency : firstLevelModuleDependencies) {
firstLevelExtensions
.addAll(collectQuarkusExtensions(firstLevelModuleDependency, visitedArtifacts, knownExtensions));
}
return firstLevelExtensions;
}
private Set<ExtensionDependency> collectQuarkusExtensions(ResolvedDependency dependency, Set<String> visitedArtifacts,
Collection<ExtensionDependency> knownExtensions) {
String artifactKey = String.format("%s:%s", dependency.getModuleGroup(), dependency.getModuleName());
if (!visitedArtifacts.add(artifactKey)) {
return Collections.emptySet();
}
Set<ExtensionDependency> extensions = new LinkedHashSet<>();
ExtensionDependency extension = getExtensionOrNull(dependency.getModuleGroup(), dependency.getModuleName(),
dependency.getModuleVersion(), knownExtensions);
if (extension != null) {
extensions.add(extension);
} else {
for (ResolvedDependency child : dependency.getChildren()) {
extensions.addAll(collectQuarkusExtensions(child, visitedArtifacts, knownExtensions));
}
}
return extensions;
}
private ExtensionDependency getExtensionOrNull(String group, String artifact, String version,
Collection<ExtensionDependency> knownExtensions) {
for (ExtensionDependency knownExtension : knownExtensions) {
if (group.equals(knownExtension.getGroup()) && artifact.equals(knownExtension.getName())
&& version.equals(knownExtension.getVersion())) {
return knownExtension;
}
}
return null;
}
} |
Why do we need to get it again? We are still inside the lock | private boolean deployTo(Move move) {
ApplicationId application = move.node.allocation().get().owner();
try (MaintenanceDeployment deployment = new MaintenanceDeployment(application, deployer, nodeRepository())) {
if ( ! deployment.isValid()) return false;
boolean couldMarkRetiredNow = markWantToRetire(Optional.of(move.node), true);
if ( ! couldMarkRetiredNow) return false;
Optional<Node> expectedNewNode = Optional.empty();
try {
if ( ! deployment.prepare()) return false;
expectedNewNode =
nodeRepository().getNodes(application, Node.State.reserved).stream()
.filter(node -> node.hasParent(move.toHost.hostname()))
.filter(node -> node.allocation().get().membership().cluster().id().equals(move.node.allocation().get().membership().cluster().id()))
.findAny();
if (expectedNewNode.isEmpty()) return false;
if ( ! deployment.activate()) return false;
log.info("Rebalancer redeployed " + application + " to " + move);
return true;
}
finally {
markWantToRetire(nodeRepository().getNode(move.node.hostname()), false);
if (expectedNewNode.isPresent()) {
Optional<Node> reservedNewNode = nodeRepository().getNode(expectedNewNode.get().hostname(), Node.State.reserved);
reservedNewNode.ifPresent(reserved -> nodeRepository().setDirty(reserved, Agent.system, "Expired by Rebalancer"));
}
}
}
} | Optional<Node> reservedNewNode = nodeRepository().getNode(expectedNewNode.get().hostname(), Node.State.reserved); | private boolean deployTo(Move move) {
ApplicationId application = move.node.allocation().get().owner();
try (MaintenanceDeployment deployment = new MaintenanceDeployment(application, deployer, nodeRepository())) {
if ( ! deployment.isValid()) return false;
boolean couldMarkRetiredNow = markWantToRetire(move.node, true);
if ( ! couldMarkRetiredNow) return false;
Optional<Node> expectedNewNode = Optional.empty();
try {
if ( ! deployment.prepare()) return false;
expectedNewNode =
nodeRepository().getNodes(application, Node.State.reserved).stream()
.filter(node -> node.hasParent(move.toHost.hostname()))
.filter(node -> node.allocation().get().membership().cluster().id().equals(move.node.allocation().get().membership().cluster().id()))
.findAny();
if (expectedNewNode.isEmpty()) return false;
if ( ! deployment.activate()) return false;
log.info("Rebalancer redeployed " + application + " to " + move);
return true;
}
finally {
markWantToRetire(move.node, false);
if (expectedNewNode.isPresent()) {
Optional<Node> reservedNewNode = nodeRepository().getNode(expectedNewNode.get().hostname(), Node.State.reserved);
reservedNewNode.ifPresent(reserved -> nodeRepository().setDirty(reserved, Agent.system, "Expired by Rebalancer"));
}
}
}
} | class Rebalancer extends Maintainer {
private final Deployer deployer;
private final HostResourcesCalculator hostResourcesCalculator;
private final Optional<HostProvisioner> hostProvisioner;
private final Metric metric;
private final Clock clock;
public Rebalancer(Deployer deployer,
NodeRepository nodeRepository,
HostResourcesCalculator hostResourcesCalculator,
Optional<HostProvisioner> hostProvisioner,
Metric metric,
Clock clock,
Duration interval) {
super(nodeRepository, interval);
this.deployer = deployer;
this.hostResourcesCalculator = hostResourcesCalculator;
this.hostProvisioner = hostProvisioner;
this.metric = metric;
this.clock = clock;
}
@Override
protected void maintain() {
if (hostProvisioner.isPresent()) return;
NodeList allNodes = nodeRepository().list();
updateSkewMetric(allNodes);
if ( ! zoneIsStable(allNodes)) return;
Move bestMove = findBestMove(allNodes);
if (bestMove == Move.none) return;
deployTo(bestMove);
}
/** We do this here rather than in MetricsReporter because it is expensive and frequent updates are unnecessary */
private void updateSkewMetric(NodeList allNodes) {
DockerHostCapacity capacity = new DockerHostCapacity(allNodes, hostResourcesCalculator);
double totalSkew = 0;
int hostCount = 0;
for (Node host : allNodes.nodeType((NodeType.host)).state(Node.State.active)) {
hostCount++;
totalSkew += Node.skew(host.flavor().resources(), capacity.freeCapacityOf(host));
}
metric.set("hostedVespa.docker.skew", totalSkew/hostCount, null);
}
private boolean zoneIsStable(NodeList allNodes) {
NodeList active = allNodes.state(Node.State.active);
if (active.stream().anyMatch(node -> node.allocation().get().membership().retired())) return false;
if (active.stream().anyMatch(node -> node.status().wantToRetire())) return false;
return true;
}
/**
* Find the best move to reduce allocation skew and returns it.
* Returns Move.none if no moves can be made to reduce skew.
*/
private Move findBestMove(NodeList allNodes) {
DockerHostCapacity capacity = new DockerHostCapacity(allNodes, hostResourcesCalculator);
Move bestMove = Move.none;
for (Node node : allNodes.nodeType(NodeType.tenant).state(Node.State.active)) {
if (node.parentHostname().isEmpty()) continue;
if (node.allocation().get().owner().instance().isTester()) continue;
for (Node toHost : allNodes.nodeType(NodeType.host).state(NodePrioritizer.ALLOCATABLE_HOST_STATES)) {
if (toHost.hostname().equals(node.parentHostname().get())) continue;
if ( ! capacity.freeCapacityOf(toHost).satisfies(node.flavor().resources())) continue;
double skewReductionAtFromHost = skewReductionByRemoving(node, allNodes.parentOf(node).get(), capacity);
double skewReductionAtToHost = skewReductionByAdding(node, toHost, capacity);
double netSkewReduction = skewReductionAtFromHost + skewReductionAtToHost;
if (netSkewReduction > bestMove.netSkewReduction)
bestMove = new Move(node, toHost, netSkewReduction);
}
}
return bestMove;
}
/** Returns true only if this operation changes the state of the wantToRetire flag */
private boolean markWantToRetire(Optional<Node> node, boolean wantToRetire) {
if (node.isEmpty()) return false;
try (Mutex lock = nodeRepository().lock(node.get())) {
Optional<Node> nodeToMove = nodeRepository().getNode(node.get().hostname());
if (nodeToMove.isEmpty()) return false;
if (nodeToMove.get().state() != Node.State.active) return false;
if (nodeToMove.get().status().wantToRetire() == wantToRetire) return false;
nodeRepository().write(nodeToMove.get().withWantToRetire(wantToRetire, Agent.system, clock.instant()), lock);
return true;
}
}
/**
* Try a redeployment to effect the chosen move.
* If it can be done, that's ok; we'll try this or another move later.
*
* @return true if the move was done, false if it couldn't be
*/
private double skewReductionByRemoving(Node node, Node fromHost, DockerHostCapacity capacity) {
NodeResources freeHostCapacity = capacity.freeCapacityOf(fromHost);
double skewBefore = Node.skew(fromHost.flavor().resources(), freeHostCapacity);
double skewAfter = Node.skew(fromHost.flavor().resources(), freeHostCapacity.add(node.flavor().resources().justNumbers()));
return skewBefore - skewAfter;
}
private double skewReductionByAdding(Node node, Node toHost, DockerHostCapacity capacity) {
NodeResources freeHostCapacity = capacity.freeCapacityOf(toHost);
double skewBefore = Node.skew(toHost.flavor().resources(), freeHostCapacity);
double skewAfter = Node.skew(toHost.flavor().resources(), freeHostCapacity.subtract(node.flavor().resources().justNumbers()));
return skewBefore - skewAfter;
}
private static class Move {
static final Move none = new Move(null, null, 0);
final Node node;
final Node toHost;
final double netSkewReduction;
Move(Node node, Node toHost, double netSkewReduction) {
this.node = node;
this.toHost = toHost;
this.netSkewReduction = netSkewReduction;
}
@Override
public String toString() {
return "move " +
( node == null ? "none" :
(node.hostname() + " to " + toHost + " [skew reduction " + netSkewReduction + "]"));
}
}
private static class MaintenanceDeployment implements Closeable {
private static final Logger log = Logger.getLogger(MaintenanceDeployment.class.getName());
private final ApplicationId application;
private final Optional<Mutex> lock;
private final Optional<Deployment> deployment;
public MaintenanceDeployment(ApplicationId application, Deployer deployer, NodeRepository nodeRepository) {
this.application = application;
lock = tryLock(application, nodeRepository);
deployment = tryDeployment(lock, application, deployer, nodeRepository);
}
/** Return whether this is - as yet - functional and can be used to carry out the deployment */
public boolean isValid() {
return deployment.isPresent();
}
private Optional<Mutex> tryLock(ApplicationId application, NodeRepository nodeRepository) {
try {
return Optional.of(nodeRepository.lock(application, Duration.ofSeconds(1)));
}
catch (ApplicationLockException e) {
return Optional.empty();
}
}
private Optional<Deployment> tryDeployment(Optional<Mutex> lock,
ApplicationId application,
Deployer deployer,
NodeRepository nodeRepository) {
if (lock.isEmpty()) return Optional.empty();
if (nodeRepository.getNodes(application, Node.State.active).isEmpty()) return Optional.empty();
return deployer.deployFromLocalActive(application);
}
public boolean prepare() {
return doStep(() -> deployment.get().prepare());
}
public boolean activate() {
return doStep(() -> deployment.get().activate());
}
private boolean doStep(Runnable action) {
if ( ! isValid()) return false;
try {
action.run();
return true;
} catch (TransientException e) {
log.log(LogLevel.INFO, "Failed to deploy " + application + " with a transient error: " +
Exceptions.toMessageString(e));
return false;
} catch (RuntimeException e) {
log.log(LogLevel.WARNING, "Exception on maintenance deploy of " + application, e);
return false;
}
}
@Override
public void close() {
lock.ifPresent(l -> l.close());
}
}
} | class Rebalancer extends Maintainer {
private final Deployer deployer;
private final HostResourcesCalculator hostResourcesCalculator;
private final Optional<HostProvisioner> hostProvisioner;
private final Metric metric;
private final Clock clock;
public Rebalancer(Deployer deployer,
NodeRepository nodeRepository,
HostResourcesCalculator hostResourcesCalculator,
Optional<HostProvisioner> hostProvisioner,
Metric metric,
Clock clock,
Duration interval) {
super(nodeRepository, interval);
this.deployer = deployer;
this.hostResourcesCalculator = hostResourcesCalculator;
this.hostProvisioner = hostProvisioner;
this.metric = metric;
this.clock = clock;
}
@Override
protected void maintain() {
if (hostProvisioner.isPresent()) return;
NodeList allNodes = nodeRepository().list();
updateSkewMetric(allNodes);
if ( ! zoneIsStable(allNodes)) return;
Move bestMove = findBestMove(allNodes);
if (bestMove == Move.none) return;
deployTo(bestMove);
}
/** We do this here rather than in MetricsReporter because it is expensive and frequent updates are unnecessary */
private void updateSkewMetric(NodeList allNodes) {
DockerHostCapacity capacity = new DockerHostCapacity(allNodes, hostResourcesCalculator);
double totalSkew = 0;
int hostCount = 0;
for (Node host : allNodes.nodeType((NodeType.host)).state(Node.State.active)) {
hostCount++;
totalSkew += Node.skew(host.flavor().resources(), capacity.freeCapacityOf(host));
}
metric.set("hostedVespa.docker.skew", totalSkew/hostCount, null);
}
private boolean zoneIsStable(NodeList allNodes) {
NodeList active = allNodes.state(Node.State.active);
if (active.stream().anyMatch(node -> node.allocation().get().membership().retired())) return false;
if (active.stream().anyMatch(node -> node.status().wantToRetire())) return false;
return true;
}
/**
* Find the best move to reduce allocation skew and returns it.
* Returns Move.none if no moves can be made to reduce skew.
*/
private Move findBestMove(NodeList allNodes) {
DockerHostCapacity capacity = new DockerHostCapacity(allNodes, hostResourcesCalculator);
Move bestMove = Move.none;
for (Node node : allNodes.nodeType(NodeType.tenant).state(Node.State.active)) {
if (node.parentHostname().isEmpty()) continue;
if (node.allocation().get().owner().instance().isTester()) continue;
for (Node toHost : allNodes.nodeType(NodeType.host).state(NodePrioritizer.ALLOCATABLE_HOST_STATES)) {
if (toHost.hostname().equals(node.parentHostname().get())) continue;
if ( ! capacity.freeCapacityOf(toHost).satisfies(node.flavor().resources())) continue;
double skewReductionAtFromHost = skewReductionByRemoving(node, allNodes.parentOf(node).get(), capacity);
double skewReductionAtToHost = skewReductionByAdding(node, toHost, capacity);
double netSkewReduction = skewReductionAtFromHost + skewReductionAtToHost;
if (netSkewReduction > bestMove.netSkewReduction)
bestMove = new Move(node, toHost, netSkewReduction);
}
}
return bestMove;
}
/** Returns true only if this operation changes the state of the wantToRetire flag */
private boolean markWantToRetire(Node node, boolean wantToRetire) {
try (Mutex lock = nodeRepository().lock(node)) {
Optional<Node> nodeToMove = nodeRepository().getNode(node.hostname());
if (nodeToMove.isEmpty()) return false;
if (nodeToMove.get().state() != Node.State.active) return false;
if (nodeToMove.get().status().wantToRetire() == wantToRetire) return false;
nodeRepository().write(nodeToMove.get().withWantToRetire(wantToRetire, Agent.system, clock.instant()), lock);
return true;
}
}
/**
* Try a redeployment to effect the chosen move.
* If it can be done, that's ok; we'll try this or another move later.
*
* @return true if the move was done, false if it couldn't be
*/
private double skewReductionByRemoving(Node node, Node fromHost, DockerHostCapacity capacity) {
NodeResources freeHostCapacity = capacity.freeCapacityOf(fromHost);
double skewBefore = Node.skew(fromHost.flavor().resources(), freeHostCapacity);
double skewAfter = Node.skew(fromHost.flavor().resources(), freeHostCapacity.add(node.flavor().resources().justNumbers()));
return skewBefore - skewAfter;
}
private double skewReductionByAdding(Node node, Node toHost, DockerHostCapacity capacity) {
NodeResources freeHostCapacity = capacity.freeCapacityOf(toHost);
double skewBefore = Node.skew(toHost.flavor().resources(), freeHostCapacity);
double skewAfter = Node.skew(toHost.flavor().resources(), freeHostCapacity.subtract(node.flavor().resources().justNumbers()));
return skewBefore - skewAfter;
}
private static class Move {
static final Move none = new Move(null, null, 0);
final Node node;
final Node toHost;
final double netSkewReduction;
Move(Node node, Node toHost, double netSkewReduction) {
this.node = node;
this.toHost = toHost;
this.netSkewReduction = netSkewReduction;
}
@Override
public String toString() {
return "move " +
( node == null ? "none" :
(node.hostname() + " to " + toHost + " [skew reduction " + netSkewReduction + "]"));
}
}
private static class MaintenanceDeployment implements Closeable {
private static final Logger log = Logger.getLogger(MaintenanceDeployment.class.getName());
private final ApplicationId application;
private final Optional<Mutex> lock;
private final Optional<Deployment> deployment;
public MaintenanceDeployment(ApplicationId application, Deployer deployer, NodeRepository nodeRepository) {
this.application = application;
lock = tryLock(application, nodeRepository);
deployment = tryDeployment(lock, application, deployer, nodeRepository);
}
/** Return whether this is - as yet - functional and can be used to carry out the deployment */
public boolean isValid() {
return deployment.isPresent();
}
private Optional<Mutex> tryLock(ApplicationId application, NodeRepository nodeRepository) {
try {
return Optional.of(nodeRepository.lock(application, Duration.ofSeconds(1)));
}
catch (ApplicationLockException e) {
return Optional.empty();
}
}
private Optional<Deployment> tryDeployment(Optional<Mutex> lock,
ApplicationId application,
Deployer deployer,
NodeRepository nodeRepository) {
if (lock.isEmpty()) return Optional.empty();
if (nodeRepository.getNodes(application, Node.State.active).isEmpty()) return Optional.empty();
return deployer.deployFromLocalActive(application);
}
public boolean prepare() {
return doStep(() -> deployment.get().prepare());
}
public boolean activate() {
return doStep(() -> deployment.get().activate());
}
private boolean doStep(Runnable action) {
if ( ! isValid()) return false;
try {
action.run();
return true;
} catch (TransientException e) {
log.log(LogLevel.INFO, "Failed to deploy " + application + " with a transient error: " +
Exceptions.toMessageString(e));
return false;
} catch (RuntimeException e) {
log.log(LogLevel.WARNING, "Exception on maintenance deploy of " + application, e);
return false;
}
}
@Override
public void close() {
lock.ifPresent(l -> l.close());
}
}
} |
This is because `getKeyColumns` is not stable. I've fixed this by using `fullSchema`, PTAL. ``` /** * NOTE: The result key columns are not in the creating order because `nameToColumn` * uses unordered hashmap to keeps name to column's mapping. */ public List<Column> getKeyColumns() { return getColumns().stream().filter(Column::isKey).collect(Collectors.toList()); } ``` | public void testCreateMaterializedViewWithoutSortKeys_Partitioned_1() {
String sql = "create materialized view test_mv_sort_key1 " +
"partition by c_1_3 " +
"distributed by hash(c_1_3, c_1_0) buckets 10 " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_3, c_1_0, c_1_4, c_1_5 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_0"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_3"));
} | Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_0")); | public void testCreateMaterializedViewWithoutSortKeys_Partitioned_1() {
String sql = "create materialized view test_mv_sort_key1 " +
"partition by c_1_3 " +
"distributed by hash(c_1_3, c_1_0) buckets 10 " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_3, c_1_0, c_1_4, c_1_5 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_3"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_0"));
} | class CreateMaterializedViewTest {
private static final Logger LOG = LogManager.getLogger(CreateMaterializedViewTest.class);
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Rule
public TestName name = new TestName();
@ClassRule
public static TemporaryFolder temp = new TemporaryFolder();
private static ConnectContext connectContext;
private static StarRocksAssert starRocksAssert;
private static Database testDb;
private static GlobalStateMgr currentState;
@BeforeClass
public static void beforeClass() throws Exception {
ConnectorPlanTestBase.doInit(temp.newFolder().toURI().toString());
Config.alter_scheduler_interval_millisecond = 100;
Config.dynamic_partition_enable = true;
Config.dynamic_partition_check_interval_seconds = 1;
Config.enable_experimental_mv = true;
UtFrameUtils.createMinStarRocksCluster();
connectContext = UtFrameUtils.createDefaultCtx();
starRocksAssert = new StarRocksAssert(connectContext);
if (!starRocksAssert.databaseExist("_statistics_")) {
StatisticsMetaManager m = new StatisticsMetaManager();
m.createStatisticsTablesForTest();
}
starRocksAssert.withDatabase("test").useDatabase("test")
.withTable("CREATE TABLE test.tbl1\n" +
"(\n" +
" k1 date,\n" +
" k2 int,\n" +
" v1 int sum\n" +
")\n" +
"PARTITION BY RANGE(k1)\n" +
"(\n" +
" PARTITION p1 values [('2020-01-01'),('2020-02-01')),\n" +
" PARTITION p2 values [('2020-02-01'),('2020-03-01'))\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.withTable("CREATE TABLE test.TBL1 \n" +
"(\n" +
" K1 date,\n" +
" K2 int,\n" +
" V1 int sum\n" +
")\n" +
"PARTITION BY RANGE(K1)\n" +
"(\n" +
" PARTITION p1 values [('2020-01-01'),('2020-02-01')),\n" +
" PARTITION p2 values [('2020-02-01'),('2020-03-01'))\n" +
")\n" +
"DISTRIBUTED BY HASH(K2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.withTable("CREATE TABLE `aggregate_table_with_null` (\n" +
"`k1` date,\n" +
"`v2` datetime MAX,\n" +
"`v3` char(20) MIN,\n" +
"`v4` bigint SUM,\n" +
"`v8` bigint SUM,\n" +
"`v5` HLL HLL_UNION,\n" +
"`v6` BITMAP BITMAP_UNION,\n" +
"`v7` PERCENTILE PERCENTILE_UNION\n" +
") ENGINE=OLAP\n" +
"AGGREGATE KEY(`k1`)\n" +
"COMMENT \"OLAP\"\n" +
"DISTRIBUTED BY HASH(`k1`) BUCKETS 3\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
");")
.withView("CREATE VIEW v1 AS SELECT * FROM aggregate_table_with_null;")
.withTable("CREATE TABLE test.tbl2\n" +
"(\n" +
" k1 date,\n" +
" k2 int,\n" +
" v1 int sum\n" +
")\n" +
"PARTITION BY RANGE(k2)\n" +
"(\n" +
" PARTITION p1 values less than('10'),\n" +
" PARTITION p2 values less than('20')\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.withTable("CREATE TABLE test.tbl3\n" +
"(\n" +
" k1 date,\n" +
" k2 int,\n" +
" v1 int sum\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.withTable("CREATE TABLE test.tbl4\n" +
"(\n" +
" k1 date,\n" +
" k2 int,\n" +
" k3 int,\n" +
" v1 int sum\n" +
")\n" +
"PARTITION BY RANGE(k2,k3)\n" +
"(\n" +
" PARTITION p1 values less than('20','30'),\n" +
" PARTITION p2 values less than('40','50')\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.withTable("CREATE TABLE `t1` (\n" +
" `c_1_0` decimal128(30, 4) NOT NULL COMMENT \"\",\n" +
" `c_1_1` boolean NOT NULL COMMENT \"\",\n" +
" `c_1_2` date NULL COMMENT \"\",\n" +
" `c_1_3` date NOT NULL COMMENT \"\",\n" +
" `c_1_4` double NULL COMMENT \"\",\n" +
" `c_1_5` double NULL COMMENT \"\",\n" +
" `c_1_6` datetime NULL COMMENT \"\",\n" +
" `c_1_7` ARRAY<int(11)> NULL COMMENT \"\",\n" +
" `c_1_8` smallint(6) NULL COMMENT \"\",\n" +
" `c_1_9` bigint(20) NOT NULL COMMENT \"\",\n" +
" `c_1_10` varchar(31) NOT NULL COMMENT \"\",\n" +
" `c_1_11` decimal128(22, 18) NULL COMMENT \"\",\n" +
" `c_1_12` boolean NULL COMMENT \"\"\n" +
") ENGINE=OLAP \n" +
"DUPLICATE KEY(`c_1_0`, `c_1_1`, `c_1_2`, `c_1_3`)\n" +
"COMMENT \"OLAP\"\n" +
"PARTITION BY RANGE(`c_1_3`)\n" +
"(PARTITION p20000101 VALUES [('2000-01-01'), ('2010-12-31')),\n" +
"PARTITION p20101231 VALUES [('2010-12-31'), ('2021-12-30')),\n" +
"PARTITION p20211230 VALUES [('2021-12-30'), ('2032-12-29')))\n" +
"DISTRIBUTED BY HASH(`c_1_3`, `c_1_2`, `c_1_0`) BUCKETS 10 \n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\",\n" +
"\"in_memory\" = \"false\"\n" +
");")
.withTable("CREATE EXTERNAL TABLE mysql_external_table\n" +
"(\n" +
" k1 DATE,\n" +
" k2 INT,\n" +
" k3 SMALLINT,\n" +
" k4 VARCHAR(2048),\n" +
" k5 DATETIME\n" +
")\n" +
"ENGINE=mysql\n" +
"PROPERTIES\n" +
"(\n" +
" \"host\" = \"127.0.0.1\",\n" +
" \"port\" = \"3306\",\n" +
" \"user\" = \"mysql_user\",\n" +
" \"password\" = \"mysql_passwd\",\n" +
" \"database\" = \"mysql_db_test\",\n" +
" \"table\" = \"mysql_table_test\"\n" +
");")
.withDatabase("test2").useDatabase("test2")
.withTable("CREATE TABLE test2.tbl3\n" +
"(\n" +
" k1 date,\n" +
" k2 int,\n" +
" v1 int sum\n" +
")\n" +
"PARTITION BY RANGE(k1)\n" +
"(\n" +
" PARTITION p1 values less than('2021-02-01'),\n" +
" PARTITION p2 values less than('2021-03-01')\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.withTable("CREATE TABLE test.tbl5\n" +
"(\n" +
" k1 date,\n" +
" k2 int,\n" +
" k3 int,\n" +
" v1 int,\n" +
" v2 int\n" +
")\n" +
"PARTITION BY RANGE(k1)\n" +
"(\n" +
" PARTITION p1 values less than('2021-02-01'),\n" +
" PARTITION p2 values less than('2021-03-01')\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.withTable("CREATE TABLE test.tbl_for_count\n" +
"(\n" +
" c_0_0 BIGINT NULL ,\n" +
" c_0_1 DATE NOT NULL ,\n" +
" c_0_2 DECIMAL(37, 5) NOT NULL,\n" +
" c_0_3 INT MAX NOT NULL ,\n" +
" c_0_4 DATE REPLACE_IF_NOT_NULL NOT NULL ,\n" +
" c_0_5 PERCENTILE PERCENTILE_UNION NOT NULL\n" +
")\n" +
"AGGREGATE KEY (c_0_0,c_0_1,c_0_2)\n" +
"PARTITION BY RANGE(c_0_1)\n" +
"(\n" +
" START (\"2010-01-01\") END (\"2021-12-31\") EVERY (INTERVAL 219 day)\n" +
")\n" +
"DISTRIBUTED BY HASH (c_0_2,c_0_1) BUCKETS 3\n" +
"properties('replication_num'='1');")
.withTable("CREATE TABLE test.mocked_cloud_table\n" +
"(\n" +
" k1 date,\n" +
" k2 int,\n" +
" v1 int sum\n" +
")\n" +
"PARTITION BY RANGE(k1)\n" +
"(\n" +
" PARTITION p1 values [('2020-01-01'),('2020-02-01')),\n" +
" PARTITION p2 values [('2020-02-01'),('2020-03-01'))\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.useDatabase("test");
starRocksAssert.withView("create view test.view_to_tbl1 as select * from test.tbl1;");
currentState = GlobalStateMgr.getCurrentState();
testDb = currentState.getDb("test");
}
private void dropMv(String mvName) throws Exception {
String sql = "drop materialized view " + mvName;
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
StmtExecutor stmtExecutor = new StmtExecutor(connectContext, statementBase);
stmtExecutor.execute();
}
private void dropTableForce(String tableName) throws Exception {
String sql = "drop table " + tableName + " force";
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
StmtExecutor stmtExecutor = new StmtExecutor(connectContext, statementBase);
stmtExecutor.execute();
}
private void dropTable(String tableName) throws Exception {
String sql = "drop table " + tableName;
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
StmtExecutor stmtExecutor = new StmtExecutor(connectContext, statementBase);
stmtExecutor.execute();
}
private List<TaskRunStatus> waitingTaskFinish() {
TaskManager taskManager = GlobalStateMgr.getCurrentState().getTaskManager();
List<TaskRunStatus> taskRuns = taskManager.showTaskRunStatus(null);
int retryCount = 0, maxRetry = 5;
while (retryCount < maxRetry) {
ThreadUtil.sleepAtLeastIgnoreInterrupts(2000L);
Constants.TaskRunState state = taskRuns.get(0).getState();
if (state == Constants.TaskRunState.FAILED || state == Constants.TaskRunState.SUCCESS) {
break;
}
retryCount++;
LOG.info("waiting for TaskRunState retryCount:" + retryCount);
}
return taskRuns;
}
@Test
public void testFullCreate() throws Exception {
new MockUp<StmtExecutor>() {
@Mock
public void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {
}
};
LocalDateTime startTime = LocalDateTime.now().plusSeconds(3);
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 SECOND)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
try {
MaterializedView materializedView = getMaterializedViewChecked(sql);
PartitionInfo partitionInfo = materializedView.getPartitionInfo();
Assert.assertEquals(1, partitionInfo.getPartitionColumns().size());
Assert.assertTrue(partitionInfo instanceof ExpressionRangePartitionInfo);
ExpressionRangePartitionInfo expressionRangePartitionInfo = (ExpressionRangePartitionInfo) partitionInfo;
Expr partitionExpr = expressionRangePartitionInfo.getPartitionExprs().get(0);
Assert.assertTrue(partitionExpr instanceof FunctionCallExpr);
FunctionCallExpr partitionFunctionCallExpr = (FunctionCallExpr) partitionExpr;
Assert.assertEquals("date_trunc", partitionFunctionCallExpr.getFnName().getFunction());
List<SlotRef> slotRefs = Lists.newArrayList();
partitionFunctionCallExpr.collect(SlotRef.class, slotRefs);
SlotRef partitionSlotRef = slotRefs.get(0);
Assert.assertEquals("k1", partitionSlotRef.getColumnName());
List<BaseTableInfo> baseTableInfos = materializedView.getBaseTableInfos();
Assert.assertEquals(1, baseTableInfos.size());
Expr partitionRefTableExpr = materializedView.getPartitionRefTableExprs().get(0);
List<SlotRef> tableSlotRefs = Lists.newArrayList();
partitionRefTableExpr.collect(SlotRef.class, tableSlotRefs);
SlotRef slotRef = tableSlotRefs.get(0);
TableName baseTableName = slotRef.getTblNameWithoutAnalyzed();
Assert.assertEquals(baseTableName.getDb(), testDb.getFullName());
Table baseTable = testDb.getTable(baseTableName.getTbl());
Assert.assertNotNull(baseTable);
Assert.assertEquals(baseTableInfos.get(0).getTableId(), baseTable.getId());
Assert.assertEquals(1, baseTable.getRelatedMaterializedViews().size());
Column baseColumn = baseTable.getColumn(slotRef.getColumnName());
Assert.assertNotNull(baseColumn);
Assert.assertEquals("k1", baseColumn.getName());
Assert.assertEquals("SELECT `test`.`tb1`.`k1`, `test`.`tb1`.`k2` AS `s2`\n" +
"FROM `test`.`tbl1` AS `tb1`",
materializedView.getViewDefineSql());
TableProperty tableProperty = materializedView.getTableProperty();
Assert.assertEquals(1, tableProperty.getReplicationNum().shortValue());
Assert.assertEquals(OlapTable.OlapTableState.NORMAL, materializedView.getState());
Assert.assertEquals(KeysType.DUP_KEYS, materializedView.getKeysType());
Assert.assertEquals(Table.TableType.MATERIALIZED_VIEW,
materializedView.getType());
Assert.assertEquals(0, materializedView.getRelatedMaterializedViews().size(), 0);
Assert.assertEquals(2, materializedView.getBaseSchema().size());
Assert.assertTrue(materializedView.isActive());
testFullCreateSync(materializedView, baseTable);
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
dropMv("mv1");
}
}
public void testFullCreateSync(MaterializedView materializedView, Table baseTable) throws Exception {
TaskManager taskManager = GlobalStateMgr.getCurrentState().getTaskManager();
String mvTaskName = TaskBuilder.getMvTaskName(materializedView.getId());
List<TaskRunStatus> taskRuns = waitingTaskFinish();
Assert.assertEquals(Constants.TaskRunState.SUCCESS, taskRuns.get(0).getState());
Collection<Partition> baseTablePartitions = baseTable.getPartitions();
Collection<Partition> mvPartitions = materializedView.getPartitions();
Assert.assertEquals(2, mvPartitions.size());
Assert.assertEquals(baseTablePartitions.size(), mvPartitions.size());
String addPartitionSql = "ALTER TABLE test.tbl1 ADD PARTITION p3 values less than('2020-04-01');";
new StmtExecutor(connectContext, addPartitionSql).execute();
taskManager.executeTask(mvTaskName);
waitingTaskFinish();
Assert.assertEquals(3, baseTablePartitions.size());
Assert.assertEquals(baseTablePartitions.size(), mvPartitions.size());
String dropPartitionSql = "ALTER TABLE test.tbl1 DROP PARTITION p3\n";
new StmtExecutor(connectContext, dropPartitionSql).execute();
taskManager.executeTask(mvTaskName);
waitingTaskFinish();
Assert.assertEquals(2, mvPartitions.size());
Assert.assertEquals(baseTablePartitions.size(), mvPartitions.size());
}
@Test
public void testCreateAsync() {
LocalDateTime startTime = LocalDateTime.now().plusSeconds(3);
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 MONTH)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql, connectContext));
}
@Test
public void testCreateAsyncMVWithDuplicatedProperty() {
LocalDateTime startTime = LocalDateTime.now().plusSeconds(3);
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql, connectContext));
String sql2 = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql2, connectContext));
String sql3 = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql3, connectContext));
}
@Test
public void testCreateAsyncNormal() throws Exception {
LocalDateTime startTime = LocalDateTime.now().plusSeconds(3);
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
sql = "create materialized view mv1\n" +
"distributed by hash(s2) buckets 10\n" +
"partition by date_trunc('month',k1)\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
sql = "create materialized view mv1\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"distributed by hash(s2) buckets 10\n" +
"partition by date_trunc('month',k1)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
@Test
public void testCreateAsyncLowercase() throws Exception {
LocalDateTime startTime = LocalDateTime.now().plusSeconds(3);
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 day)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
@Test
public void testCreateAsyncWithSingleTable() throws Exception {
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2)\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
CreateMaterializedViewStatement createMaterializedViewStatement = (CreateMaterializedViewStatement)
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
RefreshSchemeClause refreshSchemeDesc = createMaterializedViewStatement.getRefreshSchemeDesc();
Assert.assertEquals(MaterializedView.RefreshType.MANUAL, refreshSchemeDesc.getType());
}
@Test
public void testCreateSyncWithSingleTable() throws Exception {
String sql = "create materialized view mv1\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
Assert.assertTrue(statementBase instanceof CreateMaterializedViewStmt);
}
@Test
public void testFullCreateMultiTables() throws Exception {
new MockUp<StmtExecutor>() {
@Mock
public void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {
}
};
String sql = "create materialized view mv1\n" +
"partition by s1\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('9999-12-31') EVERY(INTERVAL 3 SECOND)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select date_trunc('month',tb1.k1) s1, tb2.k2 s2 from tbl1 tb1 join tbl2 tb2 on tb1.k2 = tb2.k2;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
Table mv1 = testDb.getTable("mv1");
Assert.assertTrue(mv1 instanceof MaterializedView);
MaterializedView materializedView = (MaterializedView) mv1;
PartitionInfo partitionInfo = materializedView.getPartitionInfo();
Assert.assertEquals(1, partitionInfo.getPartitionColumns().size());
Assert.assertTrue(partitionInfo instanceof ExpressionRangePartitionInfo);
ExpressionRangePartitionInfo expressionRangePartitionInfo = (ExpressionRangePartitionInfo) partitionInfo;
Expr partitionExpr = expressionRangePartitionInfo.getPartitionExprs().get(0);
Assert.assertTrue(partitionExpr instanceof SlotRef);
SlotRef partitionSlotRef = (SlotRef) partitionExpr;
Assert.assertEquals("s1", partitionSlotRef.getColumnName());
List<BaseTableInfo> baseTableInfos = materializedView.getBaseTableInfos();
Assert.assertEquals(2, baseTableInfos.size());
Expr partitionRefTableExpr = materializedView.getPartitionRefTableExprs().get(0);
List<SlotRef> slotRefs = Lists.newArrayList();
partitionRefTableExpr.collect(SlotRef.class, slotRefs);
SlotRef slotRef = slotRefs.get(0);
TableName baseTableName = slotRef.getTblNameWithoutAnalyzed();
Assert.assertEquals(baseTableName.getDb(), testDb.getFullName());
Table baseTable = testDb.getTable(baseTableName.getTbl());
Assert.assertNotNull(baseTable);
Assert.assertTrue(baseTableInfos.stream().anyMatch(baseTableInfo ->
baseTableInfo.getTableId() == baseTable.getId()));
Assert.assertTrue(1 <= baseTable.getRelatedMaterializedViews().size());
Column baseColumn = baseTable.getColumn(slotRef.getColumnName());
Assert.assertNotNull(baseColumn);
Assert.assertEquals("k1", baseColumn.getName());
Assert.assertEquals(
"SELECT date_trunc('month', `test`.`tb1`.`k1`) AS `s1`, `test`.`tb2`.`k2` AS `s2`\n" +
"FROM `test`.`tbl1` AS `tb1` INNER JOIN `test`.`tbl2` AS `tb2` ON `test`.`tb1`.`k2` = `test`.`tb2`.`k2`",
materializedView.getViewDefineSql());
TableProperty tableProperty = materializedView.getTableProperty();
Assert.assertEquals(1, tableProperty.getReplicationNum().shortValue(), 1);
Assert.assertEquals(OlapTable.OlapTableState.NORMAL, materializedView.getState());
Assert.assertEquals(KeysType.DUP_KEYS, materializedView.getKeysType());
Assert.assertEquals(Table.TableType.MATERIALIZED_VIEW,
materializedView.getType());
Assert.assertEquals(0, materializedView.getRelatedMaterializedViews().size());
Assert.assertEquals(2, materializedView.getBaseSchema().size());
Assert.assertTrue(materializedView.isActive());
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
dropMv("mv1");
}
}
@Test
public void testFullCreateNoPartition() throws Exception {
new MockUp<StmtExecutor>() {
@Mock
public void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {
}
};
String sql = "create materialized view mv1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('9999-12-31') EVERY(INTERVAL 3 SECOND) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, tbl1.k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
Table mv1 = testDb.getTable("mv1");
Assert.assertTrue(mv1 instanceof MaterializedView);
MaterializedView materializedView = (MaterializedView) mv1;
PartitionInfo partitionInfo = materializedView.getPartitionInfo();
Assert.assertTrue(partitionInfo instanceof SinglePartitionInfo);
Assert.assertEquals(1, materializedView.getPartitions().size());
Partition partition = materializedView.getPartitions().iterator().next();
Assert.assertNotNull(partition);
Assert.assertEquals("mv1", partition.getName());
List<BaseTableInfo> baseTableInfos = materializedView.getBaseTableInfos();
Assert.assertEquals(1, baseTableInfos.size());
Table baseTable = testDb.getTable(baseTableInfos.iterator().next().getTableId());
Assert.assertTrue(1 <= baseTable.getRelatedMaterializedViews().size());
Assert.assertEquals("SELECT `test`.`tbl1`.`k1`, `test`.`tbl1`.`k2`\nFROM `test`.`tbl1`",
materializedView.getViewDefineSql());
TableProperty tableProperty = materializedView.getTableProperty();
Assert.assertEquals(1, tableProperty.getReplicationNum().shortValue());
Assert.assertEquals(OlapTable.OlapTableState.NORMAL, materializedView.getState());
Assert.assertEquals(KeysType.DUP_KEYS, materializedView.getKeysType());
Assert.assertEquals(Table.TableType.MATERIALIZED_VIEW,
materializedView.getType());
Assert.assertEquals(0, materializedView.getRelatedMaterializedViews().size());
Assert.assertEquals(2, materializedView.getBaseSchema().size());
MaterializedView.AsyncRefreshContext asyncRefreshContext =
materializedView.getRefreshScheme().getAsyncRefreshContext();
Assert.assertTrue(asyncRefreshContext.getStartTime() > 0);
Assert.assertEquals("SECOND", asyncRefreshContext.getTimeUnit());
Assert.assertEquals(3, asyncRefreshContext.getStep());
Assert.assertTrue(materializedView.isActive());
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
dropMv("mv1");
}
}
@Test
public void testCreateWithoutBuckets() throws Exception {
new MockUp<StmtExecutor>() {
@Mock
public void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {
}
};
String sql = "create materialized view mv1 " +
"distributed by hash(k2)" +
"refresh async START('9999-12-31') EVERY(INTERVAL 3 SECOND) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, tbl1.k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
dropMv("mv1");
}
}
@Test
public void testPartitionByTableAlias() throws Exception {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1 tb1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionNoDataBase() {
starRocksAssert.withoutUseDatabase();
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from test.tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("No database selected"));
} finally {
starRocksAssert.useDatabase("test");
}
}
@Test
public void testPartitionHasDataBase() {
starRocksAssert.withoutUseDatabase();
String sql = "create materialized view test.mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from test.tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
starRocksAssert.useDatabase("test");
}
}
@Test
public void testPartitionNoNeed() {
String sql = "create materialized view mv1 " +
"partition by (a+b) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('month',k1) ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertTrue(e.getMessage(),
e.getMessage().contains("Unsupported expr 'a + b' in PARTITION BY clause"));
} finally {
starRocksAssert.useDatabase("test");
}
}
@Test
public void testCreateMVWithExplainQuery() {
String sql = "create materialized view mv1 " +
"as explain select k1, v2 from aggregate_table_with_null;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
Assert.fail();
} catch (Exception e) {
Assert.assertEquals("Creating materialized view does not support explain query", e.getMessage());
} finally {
starRocksAssert.useDatabase("test");
}
}
@Test
public void testPartitionWithFunctionIn() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('month',tbl1.k1) ss, k2 from tbl1;";
try {
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
ExpressionPartitionDesc partitionExpDesc = createMaterializedViewStatement.getPartitionExpDesc();
Assert.assertFalse(partitionExpDesc.isFunction());
Assert.assertTrue(partitionExpDesc.getExpr() instanceof SlotRef);
Assert.assertEquals("ss", partitionExpDesc.getSlotRef().getColumnName());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionWithFunctionInUseStr2Date() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(a) buckets 10 " +
"REFRESH DEFERRED MANUAL " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select str2date(d,'%Y%m%d') ss, a, b, c from jdbc0.partitioned_db0.tbl1;";
try {
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
ExpressionPartitionDesc partitionExpDesc = createMaterializedViewStatement.getPartitionExpDesc();
Assert.assertFalse(partitionExpDesc.isFunction());
Assert.assertTrue(partitionExpDesc.getExpr() instanceof SlotRef);
Assert.assertEquals("ss", partitionExpDesc.getSlotRef().getColumnName());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionWithFunctionInUseStr2DateForError() {
String sql = "create materialized view mv_error " +
"partition by ss " +
"distributed by hash(a) buckets 10 " +
"REFRESH DEFERRED MANUAL " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select str2date(d,'%Y%m%d') ss, a, b, c from jdbc0.partitioned_db0.tbl0;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Materialized view partition function str2date check failed"));
}
}
@Test
public void testPartitionWithFunction() {
String sql = "create materialized view mv1 " +
"partition by date_trunc('month',ss) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
ExpressionPartitionDesc partitionExpDesc = createMaterializedViewStatement.getPartitionExpDesc();
Assert.assertTrue(partitionExpDesc.isFunction());
Assert.assertTrue(partitionExpDesc.getExpr() instanceof FunctionCallExpr);
Assert.assertEquals(partitionExpDesc.getExpr().getChild(1), partitionExpDesc.getSlotRef());
Assert.assertEquals("ss", partitionExpDesc.getSlotRef().getColumnName());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionWithFunctionUseStr2Date() throws Exception {
{
String sql = "create materialized view mv1 " +
"partition by str2date(d,'%Y%m%d') " +
"distributed by hash(a) buckets 10 " +
"REFRESH DEFERRED MANUAL " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select a, b, c, d from jdbc0.partitioned_db0.tbl1;";
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
ExpressionPartitionDesc partitionExpDesc = createMaterializedViewStatement.getPartitionExpDesc();
Assert.assertTrue(partitionExpDesc.isFunction());
Assert.assertTrue(partitionExpDesc.getExpr() instanceof FunctionCallExpr);
Assert.assertEquals(partitionExpDesc.getExpr().getChild(0), partitionExpDesc.getSlotRef());
Assert.assertEquals("d", partitionExpDesc.getSlotRef().getColumnName());
}
{
String sql = "create materialized view mv_str2date " +
"partition by p " +
"distributed by hash(a) buckets 10 " +
"REFRESH DEFERRED MANUAL " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select str2date(d,'%Y%m%d') as p, a, b, c, d from jdbc0.partitioned_db0.tbl1;";
starRocksAssert.withMaterializedView(sql);
}
{
String sql = "create materialized view mv_date_trunc_str2date " +
"partition by date_trunc('month', p) " +
"distributed by hash(a) buckets 10 " +
"REFRESH DEFERRED MANUAL " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select str2date(d,'%Y%m%d') as p, a, b, c, d from jdbc0.partitioned_db0.tbl1;";
starRocksAssert.withMaterializedView(sql);
}
}
@Test
public void testPartitionWithFunctionNoAlias() {
String sql = "create materialized view mv1 " +
"partition by date_trunc('month',k1) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1;";
try {
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
ExpressionPartitionDesc partitionExpDesc = createMaterializedViewStatement.getPartitionExpDesc();
Assert.assertTrue(partitionExpDesc.isFunction());
Assert.assertTrue(partitionExpDesc.getExpr() instanceof FunctionCallExpr);
Assert.assertEquals(partitionExpDesc.getExpr().getChild(1), partitionExpDesc.getSlotRef());
Assert.assertEquals("k1", partitionExpDesc.getSlotRef().getColumnName());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionWithoutFunction() {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1;";
try {
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
ExpressionPartitionDesc partitionExpDesc = createMaterializedViewStatement.getPartitionExpDesc();
Assert.assertFalse(partitionExpDesc.isFunction());
Assert.assertTrue(partitionExpDesc.getExpr() instanceof SlotRef);
Assert.assertEquals(partitionExpDesc.getExpr(), partitionExpDesc.getSlotRef());
Assert.assertEquals("k1", partitionExpDesc.getSlotRef().getColumnName());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionWithFunctionIncludeFunction() {
String sql = "create materialized view mv1 " +
"partition by date_trunc('month',date_trunc('month',ss)) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('month',k1) ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertTrue(e.getMessage(), e.getMessage().contains("Unsupported expr 'date_trunc('month', " +
"date_trunc('month', ss))' in PARTITION BY clause"));
}
}
@Test
public void testPartitionWithFunctionIncludeFunctionInSelect() {
String sql = "create materialized view mv1 " +
"partition by date_trunc('month',ss) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('month',k1) ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 1, column 42 to line 1, column 63. " +
"Detail message: Materialized view partition function date_trunc must related with column.",
e.getMessage());
}
}
@Test
public void testPartitionColumnNoBaseTablePartitionColumn() {
String sql = "create materialized view mv1 " +
"partition by s2 " +
"distributed by hash(s2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1 s1, k2 s2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error. Detail message: Materialized view partition column " +
"in partition exp must be base table partition column.", e.getMessage());
}
}
@Test
public void testPartitionColumnBaseTableHasMultiPartitionColumn() {
String sql = "create materialized view mv1 " +
"partition by s2 " +
"distributed by hash(s2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1 s1, k2 s2 from tbl4;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error. Detail message: Materialized view related base table " +
"partition columns only supports single column.", e.getMessage());
}
}
@Test
public void testBaseTableNoPartitionColumn() {
String sql = "create materialized view mv1 " +
"partition by s1 " +
"distributed by hash(s2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1 s1, k2 s2 from tbl3;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error. Detail message: Materialized view partition column" +
" in partition exp must be base table partition column.", e.getMessage());
}
}
@Test
public void testPartitionByColumn() {
String sql = "create materialized view mv1 " +
"partition by s1 " +
"distributed by hash(s2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('month',k1) s1, k2 s2 from tbl1;";
try {
CreateMaterializedViewStatement statementBase =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
List<BaseTableInfo> baseTableInfos = statementBase.getBaseTableInfos();
Assert.assertEquals(1, baseTableInfos.size());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionByColumnNoAlias() {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionByColumnMixAlias1() {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1, tbl1.k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionByColumnMixAlias2() {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, tbl1.k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionByColumnNotInSelect() {
String sql = "create materialized view mv1 " +
"partition by s8 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k2,sqrt(tbl1.k1) s1 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error. Detail message: Materialized view partition exp " +
"column:s8 is not found in query statement.", e.getMessage());
}
}
@Test
public void testPartitionByFunctionNotInSelect() {
String sql = "create materialized view mv1 " +
"partition by date_trunc('month',s8) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error. Detail message: Materialized view partition exp " +
"column:s8 is not found in query statement.", e.getMessage());
}
}
@Test
public void testPartitionByFunctionColumnNoExists() {
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',tb2.k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, tb2.k2 s2 from tbl1 tb1 join tbl2 tb2 on tb1.k2 = tb2.k2;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 2, column 13 to line 2, column 38. " +
"Detail message: Materialized view partition exp: `tb2`.`k1` must related to column.",
e.getMessage());
}
}
@Test
public void testPartitionByAllowedFunctionNoNeedParams() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select date_trunc(tbl1.k1) ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 11 to line 3, column 29. " +
"Detail message: No matching function with signature: date_trunc(date).", e.getMessage());
}
}
@Test
public void testPartitionByAllowedFunctionNoCorrParams() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select date_trunc('%y%m',k1) ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error at line 3, column 29. " +
"Detail message: date_trunc function can't support argument other than year|quarter|month|week|day.",
e.getMessage());
}
}
@Test
public void testPartitionByAllowedFunctionNoCorrParams1() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select date_trunc('month',k2) ss, k2 from tbl2;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 11 to line 3, column 32. " +
"Detail message: Materialized view partition function date_trunc check failed.", e.getMessage());
}
}
@Test
public void testPartitionByAllowedFunctionUseWeek() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select date_trunc('week',k2) ss, k2 from tbl2;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 11 to line 3, column 31. " +
"Detail message: The function date_trunc used by the materialized view for partition " +
"does not support week formatting.", e.getMessage());
}
}
@Test
public void testPartitionByNoAllowedFunction() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k2, sqrt(tbl1.k1) ss from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 16 to line 3, column 28. " +
"Detail message: Materialized view partition function sqrt is not support.", e.getMessage());
}
}
@Test
public void testPartitionByNoAlias() {
String sql = "create materialized view mv1 " +
"partition by date_trunc('month',k1) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select k1, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Partition exp date_trunc('month', k1) must be alias of select item", e.getMessage());
}
}
@Test
public void testDistributeKeyIsNotKey() {
String sql = "create materialized view mv1 " +
"partition by s1 " +
"distributed by hash(s2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('month',k1) s1, k2 s2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testDistributeByIsNull1() throws Exception {
String sql = "create materialized view mv1 " +
"partition by ss " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss from tbl1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
@Test
public void testDistributeByIsNull2() {
connectContext.getSessionVariable().setAllowDefaultPartition(true);
String sql = "create materialized view mv1 " +
"partition by ss " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
connectContext.getSessionVariable().setAllowDefaultPartition(false);
}
}
@Test
public void testRefreshAsyncOnlyEvery() throws Exception {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async EVERY(INTERVAL 2 MINUTE)" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) statementBase;
RefreshSchemeClause refreshSchemeDesc = createMaterializedViewStatement.getRefreshSchemeDesc();
AsyncRefreshSchemeDesc asyncRefreshSchemeDesc = (AsyncRefreshSchemeDesc) refreshSchemeDesc;
Assert.assertEquals(MaterializedView.RefreshType.ASYNC, refreshSchemeDesc.getType());
Assert.assertNotNull(asyncRefreshSchemeDesc.getStartTime());
Assert.assertEquals(2, ((IntLiteral) asyncRefreshSchemeDesc.getIntervalLiteral().getValue()).getValue());
Assert.assertEquals("MINUTE",
asyncRefreshSchemeDesc.getIntervalLiteral().getUnitIdentifier().getDescription());
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
dropMv("mv1");
}
}
@Test
public void testRefreshAsyncStartBeforeCurr() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2016-12-31') EVERY(INTERVAL 1 HOUR)" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) statementBase;
RefreshSchemeClause refreshSchemeDesc = createMaterializedViewStatement.getRefreshSchemeDesc();
Assert.assertEquals(MaterializedView.RefreshType.ASYNC, refreshSchemeDesc.getType());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testRefreshManual() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh manual " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) statementBase;
RefreshSchemeClause refreshSchemeDesc = createMaterializedViewStatement.getRefreshSchemeDesc();
Assert.assertEquals(MaterializedView.RefreshType.MANUAL, refreshSchemeDesc.getType());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testNoRefresh() {
String sql = "create materialized view mv1 " +
"as select tbl1.k1 ss, k2 from tbl1 group by k1, k2;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
Assert.assertTrue(statementBase instanceof CreateMaterializedViewStmt);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testNoRefreshNoSelectStmt() {
String sql = "create materialized view mv1 " +
"as select t1.k1 ss, t1.k2 from tbl1 t1 union select k1, k2 from tbl1 group by tbl1.k1, tbl1.k2;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Materialized view query statement only support select"));
}
}
@Test
public void testSetOperation() throws Exception {
for (String setOp : Arrays.asList("UNION", "UNION ALL", "INTERSECT", "EXCEPT")) {
String sql = String.format("create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select t1.k1 ss, t1.k2 from tbl1 t1 %s select k1, k2 from tbl2 t2;", setOp);
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
Assert.assertThrows("hehe", AnalysisException.class, () -> {
String sql1 = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select t1.k1 ss, t1.k2 from tbl1 t1 union select * from tbl2 t2;";
UtFrameUtils.parseStmtWithNewParser(sql1, connectContext);
});
Assert.assertThrows("hehe", AnalysisException.class, () -> {
String sql1 = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select t1.k1 ss, t1.k2 from tbl1 t1 union select k1, k2 from tbl2 t2 union select * from tbl2 t3";
UtFrameUtils.parseStmtWithNewParser(sql1, connectContext);
});
}
@Test
public void testAsTableNotInOneDatabase() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select t1.k1 ss, t1.k2 from test2.tbl3 t1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Materialized view do not support table: tbl3 " +
"do not exist in database: test", e.getMessage());
}
}
@Test
public void testMySQLTable() throws Exception {
String sql1 = "create materialized view mv1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select tbl1.k1 ss, tbl1.k2 from mysql_external_table tbl1;";
UtFrameUtils.parseStmtWithNewParser(sql1, connectContext);
String sql2 = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select tbl1.k1 ss, tbl1.k2 from mysql_external_table tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql2, connectContext);
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(e.getMessage()
.contains("Materialized view with partition does not support base table type : MYSQL"));
}
}
@Test
public void testCreateMvFromMv() {
String sql1 = "create materialized view base_mv " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql1, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
String sql2 = "create materialized view mv_from_base_mv " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from base_mv;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql2, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateMvFromMv2() throws Exception {
String sql1 = "create materialized view base_mv2 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1;";
{
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql1, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
}
String sql2 = "create materialized view mv_from_base_mv2 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from base_mv2;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql2, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateMvFromInactiveMv() {
String sql1 = "create materialized view base_inactive_mv " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql1, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
MaterializedView baseInactiveMv = ((MaterializedView) testDb.getTable("base_inactive_mv"));
baseInactiveMv.setInactiveAndReason("");
String sql2 = "create materialized view mv_from_base_inactive_mv " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from base_inactive_mv;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql2, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error at line 3, column 24. Detail message: " +
"Create/Rebuild materialized view from inactive materialized view: base_inactive_mv.",
e.getMessage());
}
}
@Test
public void testAsHasStar() throws Exception {
String sql = "create materialized view testAsHasStar " +
"partition by ss " +
"distributed by hash(ss) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1 ss, * from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
MaterializedView mv = ((MaterializedView) testDb.getTable("testAsHasStar"));
mv.setInactiveAndReason("");
List<Column> mvColumns = mv.getFullSchema();
Table baseTable = testDb.getTable("tbl1");
List<Column> baseColumns = baseTable.getFullSchema();
Assert.assertEquals(mvColumns.size(), baseColumns.size() + 1);
Assert.assertEquals("ss", mvColumns.get(0).getName());
for (int i = 1; i < mvColumns.size(); i++) {
Assert.assertEquals(mvColumns.get(i).getName(),
baseColumns.get(i - 1).getName());
}
} catch (Exception e) {
Assert.fail("Select * should be supported in materialized view");
} finally {
dropMv("testAsHasStar");
}
}
@Test
public void testAsHasStarWithSameColumns() throws Exception {
String sql = "create materialized view testAsHasStar " +
"partition by ss " +
"distributed by hash(ss) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select a.k1 ss, a.*, b.* from tbl1 as a join tbl1 as b on a.k1=b.k1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Duplicate column name 'k1'"));
} finally {
dropMv("testAsHasStar");
}
}
@Test
public void testMVWithSameColumns() throws Exception {
String sql = "create materialized view testAsHasStar " +
"partition by ss " +
"distributed by hash(ss) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select a.k1 ss, a.k2, b.k2 from tbl1 as a join tbl1 as b on a.k1=b.k1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Duplicate column name 'k2'"));
} finally {
dropMv("testAsHasStar");
}
}
@Test
public void testAsHasStarWithNondeterministicFunction() {
String sql = "create materialized view mv1 " +
"distributed by hash(ss) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1 ss, * from (select *, rand(), current_date() from tbl1) as t;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 38 to line 3, column 43." +
" Detail message: Materialized view query statement select item rand()" +
" not supported nondeterministic function.", e.getMessage());
}
}
@Test
public void testAsSelectItemAlias1() throws Exception {
String sql = "create materialized view testAsSelectItemAlias1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select date_trunc('month',tbl1.k1), k1, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
MaterializedView mv = ((MaterializedView) testDb.getTable("testAsSelectItemAlias1"));
mv.setInactiveAndReason("");
List<Column> mvColumns = mv.getFullSchema();
Assert.assertEquals("date_trunc('month', tbl1.k1)", mvColumns.get(0).getName());
Assert.assertEquals("k1", mvColumns.get(1).getName());
Assert.assertEquals("k2", mvColumns.get(2).getName());
} catch (Exception e) {
Assert.fail("Materialized view query statement select item " +
"date_trunc('month', `tbl1`.`k1`) should be supported");
} finally {
dropMv("testAsSelectItemAlias1");
}
}
@Test
public void testAsSelectItemAlias2() throws Exception {
String sql = "create materialized view testAsSelectItemAlias2 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as " +
"select date_trunc('month',tbl1.k1), k1, k2 from tbl1 union all " +
"select date_trunc('month',tbl1.k1), k1, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
MaterializedView mv = ((MaterializedView) testDb.getTable("testAsSelectItemAlias2"));
mv.setInactiveAndReason("");
List<Column> mvColumns = mv.getFullSchema();
Assert.assertEquals("date_trunc('month', tbl1.k1)", mvColumns.get(0).getName());
Assert.assertEquals("k1", mvColumns.get(1).getName());
Assert.assertEquals("k2", mvColumns.get(2).getName());
} finally {
dropMv("testAsSelectItemAlias2");
}
}
@Test
public void testAsSelectItemAlias3() {
String sql = "create materialized view testAsSelectItemAlias3 " +
"partition by date_trunc('month',tbl1.k1) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select date_trunc('month',tbl1.k1), k1, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Materialized view partition exp: " +
"`tbl1`.`k1` must related to column"));
}
}
@Test
public void testAsSelectItemAlias4() {
String sql = "create materialized view testAsSelectItemAlias4 " +
"partition by k1 " +
"distributed by hash(date_trunc('month',tbl1.k1)) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select date_trunc('month',tbl1.k1), k1, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.assertTrue(e.getMessage()
.contains("No viable statement for input 'distributed by hash(date_trunc('."));
}
}
@Test
public void testAsSelectItemNoAliasWithNondeterministicFunction1() {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select rand(), date_trunc('month',tbl1.k1), k1, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 11 to line 3, column 16. " +
"Detail message: Materialized view query statement select item rand() not supported " +
"nondeterministic function.",
e.getMessage());
}
}
@Test
public void testAsSelectItemHasNonDeterministicFunction1() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select rand() s1, date_trunc('month',tbl1.k1) ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 11 to line 3, column 16. " +
"Detail message: Materialized view query statement " +
"select item rand() not supported nondeterministic function.", e.getMessage());
}
}
@Test
public void testAsSelectItemHasNonDeterministicFunction2() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select k2, rand()+rand() s1, date_trunc('month',tbl1.k1) ss from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 15 to line 3, column 20. " +
"Detail message: Materialized view query statement " +
"select item rand() not supported nondeterministic function.", e.getMessage());
}
}
@Test
public void testCreateColocateMvToExitGroup() throws Exception {
starRocksAssert.withTable("CREATE TABLE test.colocateTable\n" +
"(\n" +
" k1 int,\n" +
" k2 int,\n" +
" k3 int\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\",\n" +
"\"colocate_with\" = \"colocate_group1\"\n" +
");");
new MockUp<OlapTable>() {
@Mock
public boolean isEnableColocateMVIndex() throws Exception {
OlapTable table = (OlapTable) testDb.getTable("colocateTable");
if (Strings.isNullOrEmpty(table.getColocateGroup())) {
return false;
}
return table.getIndexIdToMeta().values().stream()
.filter(x -> x.getIndexId() != table.getBaseIndexId())
.allMatch(MaterializedIndexMeta::isColocateMVIndex);
}
};
String sql = "create materialized view colocateMv\n" +
"PROPERTIES (\n" +
"\"colocate_mv\" = \"true\"\n" +
")\n" +
"as select k1, k2 from colocateTable;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStmt) statementBase);
waitingRollupJobV2Finish();
ColocateTableIndex colocateTableIndex = currentState.getColocateTableIndex();
String fullGroupName = testDb.getId() + "_" + "colocate_group1";
long tableId = colocateTableIndex.getTableIdByGroup(fullGroupName);
Assert.assertNotEquals(-1, tableId);
ColocateTableIndex.GroupId groupId = colocateTableIndex.getGroup(tableId);
Assert.assertEquals(1, colocateTableIndex.getAllTableIds(groupId).size());
OlapTable table = (OlapTable) testDb.getTable("colocateTable");
Assert.assertTrue(table.isEnableColocateMVIndex());
dropMv("colocateMv");
Assert.assertTrue(currentState.getColocateTableIndex().isColocateTable(tableId));
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
currentState.getColocateTableIndex().clear();
}
}
@Test
public void testCreateColocateMvWithoutGroup() throws Exception {
starRocksAssert.withTable("CREATE TABLE test.colocateTable2\n" +
"(\n" +
" k1 int,\n" +
" k2 int,\n" +
" k3 int\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
");");
ColocateTableIndex colocateTableIndex = currentState.getColocateTableIndex();
String fullGroupName = testDb.getId() + "_" + "group2";
long tableId = colocateTableIndex.getTableIdByGroup(fullGroupName);
Assert.assertEquals(-1, tableId);
String sql = "create materialized view colocateMv2\n" +
"PROPERTIES (\n" +
"\"colocate_mv\" = \"true\"\n" +
")\n" +
"as select k1, k2 from colocateTable2;";
Assert.assertThrows(AnalysisException.class, () -> {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStmt) statementBase);
});
currentState.getColocateTableIndex().clear();
}
@Test
public void testColocateMvAlterGroup() throws Exception {
starRocksAssert.withTable("CREATE TABLE test.colocateTable3\n" +
"(\n" +
" k1 int,\n" +
" k2 int,\n" +
" k3 int\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\",\n" +
"\"colocate_with\" = \"group3\"\n" +
");");
new MockUp<OlapTable>() {
@Mock
public boolean isEnableColocateMVIndex() throws Exception {
OlapTable table = (OlapTable) testDb.getTable("colocateTable3");
if (Strings.isNullOrEmpty(table.getColocateGroup())) {
return false;
}
return table.getIndexIdToMeta().values().stream()
.filter(x -> x.getIndexId() != table.getBaseIndexId())
.allMatch(MaterializedIndexMeta::isColocateMVIndex);
}
};
String sql = "create materialized view colocateMv3\n" +
"PROPERTIES (\n" +
"\"colocate_mv\" = \"true\"\n" +
")\n" +
"as select k1, k2 from colocateTable3;";
String sql2 = "create materialized view colocateMv4\n" +
"PROPERTIES (\n" +
"\"colocate_mv\" = \"true\"\n" +
")\n" +
"as select k1, k2 from colocateTable3;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStmt) statementBase);
waitingRollupJobV2Finish();
statementBase = UtFrameUtils.parseStmtWithNewParser(sql2, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStmt) statementBase);
waitingRollupJobV2Finish();
ColocateTableIndex colocateTableIndex = currentState.getColocateTableIndex();
String fullGroupName = testDb.getId() + "_" + "group3";
System.out.println(fullGroupName);
long tableId = colocateTableIndex.getTableIdByGroup(fullGroupName);
Assert.assertNotEquals(-1, tableId);
OlapTable table = (OlapTable) testDb.getTable("colocateTable3");
Assert.assertTrue(table.isEnableColocateMVIndex());
ColocateTableIndex.GroupId groupId = colocateTableIndex.getGroup(tableId);
Assert.assertEquals(1, colocateTableIndex.getAllTableIds(groupId).size());
sql = "alter table colocateTable3 set (\"colocate_with\" = \"groupNew\")";
statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
StmtExecutor stmtExecutor = new StmtExecutor(connectContext, statementBase);
stmtExecutor.execute();
Assert.assertEquals("groupNew", table.getColocateGroup());
Assert.assertTrue(table.isEnableColocateMVIndex());
Assert.assertTrue(colocateTableIndex.isColocateTable(tableId));
sql = "alter table colocateTable3 set (\"colocate_with\" = \"\")";
statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
stmtExecutor = new StmtExecutor(connectContext, statementBase);
stmtExecutor.execute();
Assert.assertFalse(colocateTableIndex.isColocateTable(tableId));
Assert.assertFalse(table.isEnableColocateMVIndex());
Assert.assertNotEquals("group1", table.getColocateGroup());
dropMv("colocateMv4");
dropMv("colocateMv3");
Assert.assertFalse(colocateTableIndex.isColocateTable(tableId));
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
currentState.getColocateTableIndex().clear();
}
}
@Test
public void testRandomColocate() {
String sql = "create materialized view mv1 " +
"distributed by random " +
"refresh async " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n," +
"'colocate_with' = 'hehe' " +
")" +
"as select k2, date_trunc('month',tbl1.k1) ss from tbl1;";
Assert.assertThrows(SemanticException.class, () -> starRocksAssert.withMaterializedView(sql));
}
@Test
public void testDisabled() {
Config.enable_materialized_view = false;
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("The experimental mv is disabled", e.getMessage());
} finally {
Config.enable_materialized_view = true;
}
}
@Test
public void testExists() {
String sql = "create materialized view tbl1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Table 'tbl1' already exists", e.getMessage());
}
}
@Test
public void testIfNotExists() {
String sql = "create materialized view if not exists mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testSupportedProperties() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\",\n" +
"\"storage_medium\" = \"SSD\",\n" +
"\"storage_cooldown_time\" = \"2122-12-31 23:59:59\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
private void assertParseFailWithException(String sql, String msg) {
CreateMaterializedViewStatement stmt = null;
try {
stmt = (CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
connectContext);
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains(msg));
}
}
private void assertCreateFailWithException(String sql, String msg) {
CreateMaterializedViewStatement stmt = null;
try {
stmt = (CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
connectContext);
} catch (Exception e) {
Assert.fail();
}
try {
currentState.createMaterializedView(stmt);
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains(msg));
}
}
@Test
public void testUnSupportedProperties() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"short_key\" = \"20\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
assertCreateFailWithException(sql, "Invalid parameter Analyze materialized properties failed because unknown " +
"properties");
}
@Test
public void testCreateMVWithSessionProperties1() {
String sql = "create materialized view mv_with_property1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"session.query_timeout\" = \"10000\"" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
CreateMaterializedViewStatement stmt =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
starRocksAssert.getCtx());
currentState.createMaterializedView(stmt);
Table mv1 = testDb.getTable("mv_with_property1");
Assert.assertTrue(mv1 instanceof MaterializedView);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testCreateMVWithSessionProperties2() {
String sql = "create materialized view mv_with_property2 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"query_timeout\" = \"10000\"" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
assertCreateFailWithException(sql, "Invalid parameter Analyze materialized properties failed because unknown " +
"properties");
}
@Test
public void testCreateMVWithSessionProperties3() {
String sql = "create materialized view mv_with_property3 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"session.query_timeout1\" = \"10000\"" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
assertCreateFailWithException(sql, "Unknown system variable 'query_timeout1'");
}
@Test
public void testNoDuplicateKey() {
String sql = "create materialized view testNoDuplicateKey " +
"partition by s1 " +
"distributed by hash(s2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('month',k1) s1, k2 s2 from tbl1;";
try {
CreateMaterializedViewStatement stmt = (CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
connectContext);
currentState.createMaterializedView(stmt);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testCreateMvWithSortCols() throws Exception {
{
String sql = "create materialized view mv1\n" +
"distributed by hash(s2)\n" +
"order by (`k1`, `s2`)\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
CreateMaterializedViewStatement createMaterializedViewStatement = (CreateMaterializedViewStatement)
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
List<String> keyColumns = createMaterializedViewStatement.getMvColumnItems().stream()
.filter(Column::isKey).map(Column::getName)
.collect(Collectors.toList());
Assert.assertEquals(2, createMaterializedViewStatement.getSortKeys().size());
Assert.assertEquals(Arrays.asList("k1", "s2"), keyColumns);
}
{
String sql = "create materialized view mv1\n" +
"distributed by hash(s2)\n" +
"order by (`s2`)\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
CreateMaterializedViewStatement createMaterializedViewStatement = (CreateMaterializedViewStatement)
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
List<String> keyColumns = createMaterializedViewStatement.getMvColumnItems().stream()
.filter(Column::isKey).map(Column::getName)
.collect(Collectors.toList());
Assert.assertEquals(Arrays.asList("s2"), keyColumns);
}
{
String sql = "create materialized view mv1\n" +
"distributed by hash(s2)\n" +
"order by (`k1`)\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
CreateMaterializedViewStatement createMaterializedViewStatement = (CreateMaterializedViewStatement)
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
List<String> keyColumns = createMaterializedViewStatement.getMvColumnItems().stream()
.filter(Column::isKey).map(Column::getName)
.collect(Collectors.toList());
Assert.assertEquals(Arrays.asList("k1"), keyColumns);
}
{
String sql = "create materialized view mv1\n" +
"distributed by hash(s2)\n" +
"order by (`k3`)\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql, connectContext));
}
{
String sql = "create materialized view mv1\n" +
"distributed by hash(s2)\n" +
"order by (`c_1_7`)\n" +
"as select * from t1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql, connectContext));
}
}
@Test
public void testCreateMvWithInvalidSortCols() throws Exception {
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2)\n" +
"order by (`s2`, `k1`)\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
List<Column> sortKeys = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(sortKeys.get(0).getName().equals("k1"));
Assert.assertTrue(sortKeys.get(1).getName().equals("s2"));
}
@Test
public void testCreateMvWithColocateGroup() throws Exception {
String groupName = name.getMethodName();
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"'colocate_with' = '" + groupName + "'" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
String fullGroupName = testDb.getId() + "_" + groupName;
long tableId = currentState.getColocateTableIndex().getTableIdByGroup(fullGroupName);
Assert.assertTrue(tableId > 0);
}
@Test
public void testCreateMvWithHll() {
String sql = "CREATE MATERIALIZED VIEW mv_function\n" +
"AS SELECT k1,MAX(v2),MIN(v3),SUM(v4),HLL_UNION(v5),BITMAP_UNION(v6),PERCENTILE_UNION(v7)\n" +
"FROM test.aggregate_table_with_null GROUP BY k1\n" +
"ORDER BY k1 DESC";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateMvBaseOnView() {
String sql = "CREATE MATERIALIZED VIEW mv1\n" +
"AS SELECT k1,v2 FROM test.v1";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
Assert.fail();
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error. Detail message: Do not support alter non-OLAP table[v1].",
e.getMessage());
}
}
@Test
public void testAggregateTableWithCount() {
String sql = "CREATE MATERIALIZED VIEW v0 AS SELECT t0_57.c_0_1," +
" COUNT(t0_57.c_0_0) , MAX(t0_57.c_0_2) , MAX(t0_57.c_0_3) , MIN(t0_57.c_0_4)" +
" FROM tbl_for_count AS t0_57 GROUP BY t0_57.c_0_1 ORDER BY t0_57.c_0_1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(
e.getMessage().contains("Getting analyzing error. Detail message: Aggregate type table do not " +
"support count function in materialized view."));
}
}
@Test
public void testNoExistDb() {
String sql = "create materialized view unexisted_db1.mv1\n" +
"partition by s1\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select date_trunc('month',k1) s1, k2 s2 from tbl1;";
assertParseFailWithException(sql, "Can not find database:unexisted_db1.");
}
@Test
public void testMvNameInvalid() {
String sql = "create materialized view mvklajksdjksjkjfksdlkfgkllksdjkgjsdjfjklsdjkfgjkldfkljgljkljklgja\n" +
"partition by s1\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select date_trunc('month',k1) s1, k2 s2 from tbl1;";
try {
CreateMaterializedViewStatement stmt = (CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
connectContext);
currentState.createMaterializedView(stmt);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testMvName1() {
String sql = "create materialized view 22mv\n" +
"partition by s1\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select date_trunc('month',k1) s1, k2 s2 from tbl1;";
try {
CreateMaterializedViewStatement stmt = (CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
connectContext);
currentState.createMaterializedView(stmt);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testPartitionAndDistributionByColumnNameIgnoreCase() {
String sql = "create materialized view mv1 " +
"partition by K1 " +
"distributed by hash(K2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, tbl1.k2 from tbl1;";
try {
CreateMaterializedViewStatement stmt = (CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
connectContext);
currentState.createMaterializedView(stmt);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testDuplicateColumn() {
String sql = "create materialized view mv1 " +
"partition by K1 " +
"distributed by hash(K2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, K1 from tbl1;";
assertParseFailWithException(sql, "Getting analyzing error. Detail message: Duplicate column name 'K1'.");
}
@Test
public void testNoBaseTable() {
String sql = "create materialized view mv1 " +
"partition by K1 " +
"distributed by hash(K2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select 1 as k1, 2 as k2";
assertParseFailWithException(sql, "Getting analyzing error. Detail message: Can not find base " +
"table in query statement.");
}
@Test
public void testUseCte() throws Exception {
String sql = "create materialized view mv1\n" +
"DISTRIBUTED BY HASH(k1) BUCKETS 10\n" +
"REFRESH ASYNC\n" +
"AS with tbl as\n" +
"(select * from tbl1)\n" +
"SELECT k1,k2\n" +
"FROM tbl;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
sql = "create materialized view mv1\n" +
"DISTRIBUTED BY HASH(k1) BUCKETS 10\n" +
"REFRESH ASYNC AS " +
"WITH cte1 AS (select k1, k2 from tbl1),\n" +
" cte2 AS (select count(*) cnt from tbl1)\n" +
"SELECT cte1.k1, cte2.cnt\n" +
"FROM cte1, cte2;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
@Test
public void testUseSubQuery() throws Exception {
String sql = "create materialized view mv1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from (select * from tbl1) tbl";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testUseSubQueryWithPartition() throws Exception {
String sql1 = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from (select * from tbl1) tbl";
String sql2 = "create materialized view mv2 " +
"partition by kk " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('day', k1) as kk, k2 from (select * from tbl1) tbl";
try {
UtFrameUtils.parseStmtWithNewParser(sql1, connectContext);
UtFrameUtils.parseStmtWithNewParser(sql2, connectContext);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testJoinWithPartition() throws Exception {
String sql = "create materialized view mv1 " +
"partition by date_trunc('day', k1) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tb1.kk as k1, tb2.k2 as k2 from (select k1 as kk, k2 from tbl1) tb1 join (select * from tbl2) tb2 on tb1.kk = tb2.k1";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testPartitionByNotFirstColumn() throws Exception {
starRocksAssert.withMaterializedView("create materialized view mv_with_partition_by_not_first_column" +
" partition by k1" +
" distributed by hash(k3) buckets 10" +
" as select k3, k1, sum(v1) as total from tbl5 group by k3, k1");
Database db = starRocksAssert.getCtx().getGlobalStateMgr().getDb("test");
Table table = db.getTable("mv_with_partition_by_not_first_column");
Assert.assertTrue(table instanceof MaterializedView);
MaterializedView mv = (MaterializedView) table;
PartitionInfo partitionInfo = mv.getPartitionInfo();
Assert.assertTrue(partitionInfo instanceof ExpressionRangePartitionInfo);
ExpressionRangePartitionInfo expressionRangePartitionInfo = (ExpressionRangePartitionInfo) partitionInfo;
List<Expr> partitionExpr = expressionRangePartitionInfo.getPartitionExprs();
Assert.assertEquals(1, partitionExpr.size());
Assert.assertTrue(partitionExpr.get(0) instanceof SlotRef);
SlotRef slotRef = (SlotRef) partitionExpr.get(0);
Assert.assertNotNull(slotRef.getSlotDescriptorWithoutCheck());
SlotDescriptor slotDescriptor = slotRef.getSlotDescriptorWithoutCheck();
Assert.assertEquals(1, slotDescriptor.getId().asInt());
}
@Test
public void testHiveMVWithoutPartition() throws Exception {
starRocksAssert.withMaterializedView("CREATE MATERIALIZED VIEW supplier_hive_mv " +
"DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 10 REFRESH MANUAL AS select s_suppkey, s_nationkey," +
"sum(s_acctbal) as total_s_acctbal, count(s_phone) as s_phone_count from hive0.tpch.supplier as supp " +
"group by s_suppkey, s_nationkey order by s_suppkey;");
Database db = starRocksAssert.getCtx().getGlobalStateMgr().getDb("test");
Table table = db.getTable("supplier_hive_mv");
Assert.assertTrue(table instanceof MaterializedView);
MaterializedView mv = (MaterializedView) table;
PartitionInfo partitionInfo = mv.getPartitionInfo();
Assert.assertTrue(partitionInfo instanceof SinglePartitionInfo);
Assert.assertEquals(1, mv.getAllPartitions().size());
starRocksAssert.dropMaterializedView("supplier_hive_mv");
}
@Test
public void testHiveMVJoinWithoutPartition() throws Exception {
starRocksAssert.withMaterializedView("CREATE MATERIALIZED VIEW supplier_nation_hive_mv DISTRIBUTED BY " +
"HASH(`s_suppkey`) BUCKETS 10 REFRESH MANUAL AS select s_suppkey, n_name, sum(s_acctbal) " +
"as total_s_acctbal, count(s_phone) as s_phone_count from " +
"hive0.tpch.supplier as supp join hive0.tpch.nation group by s_suppkey, n_name order by s_suppkey;");
Database db = starRocksAssert.getCtx().getGlobalStateMgr().getDb("test");
Table table = db.getTable("supplier_nation_hive_mv");
Assert.assertTrue(table instanceof MaterializedView);
MaterializedView mv = (MaterializedView) table;
PartitionInfo partitionInfo = mv.getPartitionInfo();
Assert.assertTrue(partitionInfo instanceof SinglePartitionInfo);
Assert.assertEquals(1, mv.getAllPartitions().size());
starRocksAssert.dropMaterializedView("supplier_nation_hive_mv");
}
@Test
public void testHiveMVWithPartition() throws Exception {
starRocksAssert.withMaterializedView("CREATE MATERIALIZED VIEW lineitem_supplier_hive_mv \n" +
"partition by l_shipdate\n" +
"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\n" +
"REFRESH MANUAL\n" +
"AS \n" +
"select l_shipdate, l_orderkey, l_quantity, l_linestatus, s_name from " +
"hive0.partitioned_db.lineitem_par join hive0.tpch.supplier where l_suppkey = s_suppkey\n");
Database db = starRocksAssert.getCtx().getGlobalStateMgr().getDb("test");
Table table = db.getTable("lineitem_supplier_hive_mv");
Assert.assertTrue(table instanceof MaterializedView);
MaterializedView mv = (MaterializedView) table;
PartitionInfo partitionInfo = mv.getPartitionInfo();
Assert.assertTrue(partitionInfo instanceof ExpressionRangePartitionInfo);
ExpressionRangePartitionInfo expressionRangePartitionInfo = (ExpressionRangePartitionInfo) partitionInfo;
Assert.assertEquals(1, expressionRangePartitionInfo.getPartitionColumns().size());
Column partColumn = expressionRangePartitionInfo.getPartitionColumns().get(0);
Assert.assertEquals("l_shipdate", partColumn.getName());
Assert.assertTrue(partColumn.getType().isDate());
starRocksAssert.dropMaterializedView("lineitem_supplier_hive_mv");
}
@Test
public void testHiveMVAsyncRefresh() throws Exception {
starRocksAssert.withMaterializedView("CREATE MATERIALIZED VIEW supplier_hive_mv " +
"DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 10 REFRESH ASYNC START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"AS select s_suppkey, s_nationkey, sum(s_acctbal) as total_s_acctbal, " +
"count(s_phone) as s_phone_count from hive0.tpch.supplier as supp " +
"group by s_suppkey, s_nationkey order by s_suppkey;");
Database db = starRocksAssert.getCtx().getGlobalStateMgr().getDb("test");
Table table = db.getTable("supplier_hive_mv");
Assert.assertTrue(table instanceof MaterializedView);
MaterializedView mv = (MaterializedView) table;
PartitionInfo partitionInfo = mv.getPartitionInfo();
Assert.assertTrue(partitionInfo instanceof SinglePartitionInfo);
Assert.assertEquals(1, mv.getAllPartitions().size());
MaterializedView.MvRefreshScheme mvRefreshScheme = mv.getRefreshScheme();
Assert.assertEquals(mvRefreshScheme.getType(), MaterializedView.RefreshType.ASYNC);
MaterializedView.AsyncRefreshContext asyncRefreshContext = mvRefreshScheme.getAsyncRefreshContext();
Assert.assertEquals(asyncRefreshContext.getTimeUnit(), "HOUR");
starRocksAssert.dropMaterializedView("supplier_hive_mv");
}
@Test
public void testHiveMVAsyncRefreshWithException() throws Exception {
expectedException.expect(DdlException.class);
expectedException.expectMessage("Materialized view which type is ASYNC need to specify refresh interval " +
"for external table");
starRocksAssert.withMaterializedView("CREATE MATERIALIZED VIEW supplier_hive_mv " +
"DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 10 REFRESH ASYNC AS select s_suppkey, s_nationkey," +
"sum(s_acctbal) as total_s_acctbal, count(s_phone) as s_phone_count from hive0.tpch.supplier as supp " +
"group by s_suppkey, s_nationkey order by s_suppkey;");
}
@Test
public void testJdbcTable() throws Exception {
starRocksAssert.withResource("create external resource jdbc0\n" +
"properties (\n" +
" \"type\"=\"jdbc\",\n" +
" \"user\"=\"postgres\",\n" +
" \"password\"=\"changeme\",\n" +
" \"jdbc_uri\"=\"jdbc:postgresql:
" \"driver_url\"=\"https:
" \"driver_class\"=\"org.postgresql.Driver\"\n" +
"); ");
starRocksAssert.withTable("create external table jdbc_tbl (\n" +
" `id` bigint NULL,\n" +
" `data` varchar(200) NULL\n" +
" ) ENGINE=jdbc\n" +
" properties (\n" +
" \"resource\"=\"jdbc0\",\n" +
" \"table\"=\"dest_tbl\"\n" +
" );");
starRocksAssert.withMaterializedView("create materialized view mv_jdbc " +
"distributed by hash(id) refresh deferred manual " +
"as select * from jdbc_tbl;");
}
@Test
public void testCreateRealtimeMV() throws Exception {
String sql = "create materialized view rtmv \n" +
"refresh incremental " +
"distributed by hash(l_shipdate) " +
" as select l_shipdate, l_orderkey, l_quantity, l_linestatus, s_name from " +
"hive0.partitioned_db.lineitem_par join hive0.tpch.supplier where l_suppkey = s_suppkey\n";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
@Test
public void testCreateSyncMvFromSubquery() {
String sql = "create materialized view sync_mv_1 as" +
" select k1, sum(k2) from (select k1, k2 from tbl1 group by k1, k2) a group by k1";
try {
starRocksAssert.withMaterializedView(sql);
} catch (Exception e) {
Assert.assertTrue(
e.getMessage().contains("Materialized view query statement only support direct query from table"));
}
}
@Test
public void testCreateAsyncMv() {
Config.enable_experimental_mv = true;
String sql = "create materialized view async_mv_1 distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1";
try {
starRocksAssert.withMaterializedView(sql);
MaterializedView mv = (MaterializedView) testDb.getTable("async_mv_1");
Assert.assertTrue(mv.getFullSchema().get(0).isKey());
Assert.assertFalse(mv.getFullSchema().get(1).isKey());
} catch (Exception e) {
Assert.fail();
}
String sql2 = "create materialized view async_mv_1 distributed by hash(c_1_4) as" +
" select c_1_4 from t1";
try {
starRocksAssert.withMaterializedView(sql2);
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("All columns of materialized view cannot be used for keys."));
}
}
@Test
public void testCollectAllTableAndView() {
String sql = "select k2,v1 from test.tbl1 where k2 > 0 and v1 not in (select v1 from test.tbl2 where k2 > 0);";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
Map<TableName, Table> result = AnalyzerUtils.collectAllTableAndView(statementBase);
Assert.assertEquals(result.size(), 2);
} catch (Exception e) {
LOG.error("Test CollectAllTableAndView failed", e);
Assert.fail();
}
}
@Test
public void testCreateMVWithDifferentDB() {
try {
ConnectContext newConnectContext = UtFrameUtils.createDefaultCtx();
StarRocksAssert newStarRocksAssert = new StarRocksAssert(newConnectContext);
newStarRocksAssert.withDatabase("test_mv_different_db")
.useDatabase("test_mv_different_db");
String sql = "create materialized view test.test_mv_use_different_tbl " +
"as select k1, sum(v1), min(v2) from test.tbl5 group by k1;";
CreateMaterializedViewStmt stmt =
(CreateMaterializedViewStmt) UtFrameUtils.parseStmtWithNewParser(sql, newStarRocksAssert.getCtx());
Assert.assertEquals(stmt.getDBName(), "test");
Assert.assertEquals(stmt.getMVName(), "test_mv_use_different_tbl");
currentState.createMaterializedView(stmt);
waitingRollupJobV2Finish();
Table table = testDb.getTable("tbl5");
Assert.assertNotNull(table);
OlapTable olapTable = (OlapTable) table;
Assert.assertTrue(olapTable.getIndexIdToMeta().size() >= 2);
Assert.assertTrue(olapTable.getIndexIdToMeta().entrySet().stream()
.anyMatch(x -> x.getValue().getKeysType().isAggregationFamily()));
newStarRocksAssert.dropDatabase("test_mv_different_db");
starRocksAssert.dropMaterializedView("test_mv_use_different_tbl");
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testCreateMVWithDifferentDB2() {
try {
ConnectContext newConnectContext = UtFrameUtils.createDefaultCtx();
StarRocksAssert newStarRocksAssert = new StarRocksAssert(newConnectContext);
newStarRocksAssert.withDatabase("test_mv_different_db")
.useDatabase("test_mv_different_db");
Assert.assertThrows(AnalysisException.class, () -> {
String sql = "create materialized view test_mv_different_db.test_mv_use_different_tbl " +
"as select k1, sum(v1), min(v2) from test.tbl5 group by k1;";
CreateMaterializedViewStmt stmt =
(CreateMaterializedViewStmt) UtFrameUtils.parseStmtWithNewParser(sql, newStarRocksAssert.getCtx());
});
newStarRocksAssert.dropDatabase("test_mv_different_db");
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testCreateAsyncMVWithDifferentDB() {
try {
ConnectContext newConnectContext = UtFrameUtils.createDefaultCtx();
StarRocksAssert newStarRocksAssert = new StarRocksAssert(newConnectContext);
newStarRocksAssert.withDatabase("test_mv_different_db")
.useDatabase("test_mv_different_db");
String sql = "create materialized view test.test_mv_use_different_tbl " +
"distributed by hash(k1) " +
"as select k1, sum(v1), min(v2) from test.tbl5 group by k1;";
CreateMaterializedViewStatement stmt =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
newStarRocksAssert.getCtx());
Assert.assertEquals(stmt.getTableName().getDb(), "test");
Assert.assertEquals(stmt.getTableName().getTbl(), "test_mv_use_different_tbl");
currentState.createMaterializedView(stmt);
newStarRocksAssert.dropDatabase("test_mv_different_db");
Table mv1 = testDb.getTable("test_mv_use_different_tbl");
Assert.assertTrue(mv1 instanceof MaterializedView);
starRocksAssert.dropMaterializedView("test_mv_use_different_tbl");
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testCreateAsyncMVWithDifferentDB2() {
try {
ConnectContext newConnectContext = UtFrameUtils.createDefaultCtx();
StarRocksAssert newStarRocksAssert = new StarRocksAssert(newConnectContext);
newStarRocksAssert.withDatabase("test_mv_different_db")
.useDatabase("test_mv_different_db");
String sql = "create materialized view test_mv_different_db.test_mv_use_different_tbl " +
"distributed by hash(k1) " +
"as select k1, sum(v1), min(v2) from test.tbl5 group by k1;";
CreateMaterializedViewStatement stmt =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
newStarRocksAssert.getCtx());
Assert.assertEquals(stmt.getTableName().getDb(), "test_mv_different_db");
Assert.assertEquals(stmt.getTableName().getTbl(), "test_mv_use_different_tbl");
currentState.createMaterializedView(stmt);
Database differentDb = currentState.getDb("test_mv_different_db");
Table mv1 = differentDb.getTable("test_mv_use_different_tbl");
Assert.assertTrue(mv1 instanceof MaterializedView);
newStarRocksAssert.dropDatabase("test_mv_different_db");
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testCreateSyncMVWithCaseWhenComplexExpression1() {
try {
String t1 = "CREATE TABLE case_when_t1 (\n" +
" k1 INT,\n" +
" k2 char(20))\n" +
"DUPLICATE KEY(k1)\n" +
"DISTRIBUTED BY HASH(k1)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n";
starRocksAssert.withTable(t1);
String mv1 = "create materialized view case_when_mv1 AS SELECT k1, " +
"(CASE k2 WHEN 'beijing' THEN 'bigcity' ELSE 'smallcity' END) as city FROM case_when_t1;\n";
starRocksAssert.withMaterializedView(mv1);
waitingRollupJobV2Finish();
Table table = testDb.getTable("case_when_t1");
Assert.assertNotNull(table);
OlapTable olapTable = (OlapTable) table;
Assert.assertTrue(olapTable.getIndexIdToMeta().size() >= 2);
Assert.assertTrue(olapTable.getIndexIdToMeta().entrySet().stream()
.noneMatch(x -> x.getValue().getKeysType().isAggregationFamily()));
List<Column> fullSchemas = table.getFullSchema();
Assert.assertTrue(fullSchemas.size() == 3);
Column mvColumn = fullSchemas.get(2);
Assert.assertTrue(mvColumn.getName().equals("mv_city"));
Assert.assertTrue(mvColumn.getType().isVarchar());
Assert.assertTrue(mvColumn.getType().getColumnSize() == 1048576);
starRocksAssert.dropTable("case_when_t1");
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testCreateAsync_Deferred(@Mocked TaskManager taskManager) throws Exception {
new Expectations() {
{
taskManager.executeTask((String) any);
times = 0;
}
};
starRocksAssert.withMaterializedView(
"create materialized view deferred_async " +
"refresh deferred async distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
starRocksAssert.withMaterializedView(
"create materialized view deferred_manual " +
"refresh deferred manual distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
starRocksAssert.withMaterializedView(
"create materialized view deferred_scheduled " +
"refresh deferred async every(interval 1 day) distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
}
@Test
public void testCreateAsync_Immediate(@Mocked TaskManager taskManager) throws Exception {
new Expectations() {
{
taskManager.executeTask((String) any);
times = 3;
}
};
starRocksAssert.withMaterializedView(
"create materialized view async_immediate " +
"refresh immediate async distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
starRocksAssert.withMaterializedView(
"create materialized view manual_immediate " +
"refresh immediate manual distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
starRocksAssert.withMaterializedView(
"create materialized view schedule_immediate " +
"refresh immediate async every(interval 1 day) distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
}
@Test
public void testCreateAsync_Immediate_Implicit(@Mocked TaskManager taskManager) throws Exception {
new Expectations() {
{
taskManager.executeTask((String) any);
times = 3;
}
};
starRocksAssert.withMaterializedView(
"create materialized view async_immediate_implicit " +
"refresh async distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
starRocksAssert.withMaterializedView(
"create materialized view manual_immediate_implicit " +
"refresh manual distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
starRocksAssert.withMaterializedView(
"create materialized view schedule_immediate_implicit " +
"refresh async every(interval 1 day) distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
}
private void testMVColumnAlias(String expr) throws Exception {
String mvName = "mv_alias";
try {
String createMvExpr =
String.format("create materialized view %s " +
"refresh deferred manual distributed by hash(c_1_9) as" +
" select c_1_9, %s from t1", mvName, expr);
starRocksAssert.withMaterializedView(createMvExpr);
Database db = starRocksAssert.getCtx().getGlobalStateMgr().getDb("test");
Table table = db.getTable(mvName);
List<String> columnNames = table.getBaseSchema().stream().map(Column::getName).collect(Collectors.toList());
Assert.assertTrue(columnNames.toString(), columnNames.contains(expr));
} finally {
starRocksAssert.dropMaterializedView(mvName);
}
}
@Test
public void testExprAlias() throws Exception {
testMVColumnAlias("c_1_9 + 1");
testMVColumnAlias("char_length(c_1_9)");
testMVColumnAlias("(char_length(c_1_9)) + 1");
testMVColumnAlias("(char_length(c_1_9)) + '$'");
testMVColumnAlias("c_1_9 + c_1_10");
}
private Table getTable(String dbName, String mvName) {
Database db = GlobalStateMgr.getCurrentState().getDb(dbName);
Table table = db.getTable(mvName);
Assert.assertNotNull(table);
return table;
}
private MaterializedView getMv(String dbName, String mvName) {
Table table = getTable(dbName, mvName);
Assert.assertTrue(table instanceof MaterializedView);
MaterializedView mv = (MaterializedView) table;
return mv;
}
@Test
public void testMvNullable() throws Exception {
starRocksAssert.withTable("create table emps (\n" +
" empid int not null,\n" +
" deptno int not null,\n" +
" name varchar(25) not null,\n" +
" salary double\n" +
")\n" +
"distributed by hash(`empid`) buckets 10\n" +
"properties (\n" +
"\"replication_num\" = \"1\"\n" +
");")
.withTable("create table depts (\n" +
" deptno int not null,\n" +
" name varchar(25) not null\n" +
")\n" +
"distributed by hash(`deptno`) buckets 10\n" +
"properties (\n" +
"\"replication_num\" = \"1\"\n" +
");");
{
starRocksAssert.withMaterializedView("create materialized view mv_nullable" +
" distributed by hash(`empid`) as" +
" select empid, d.deptno, d.name" +
" from emps e left outer join depts d on e.deptno = d.deptno");
MaterializedView mv = getMv("test", "mv_nullable");
Assert.assertFalse(mv.getColumn("empid").isAllowNull());
Assert.assertTrue(mv.getColumn("deptno").isAllowNull());
starRocksAssert.dropMaterializedView("mv_nullable");
}
{
starRocksAssert.withMaterializedView("create materialized view mv_nullable" +
" distributed by hash(`empid`) as" +
" select empid, d.deptno, d.name" +
" from emps e right outer join depts d on e.deptno = d.deptno");
MaterializedView mv = getMv("test", "mv_nullable");
Assert.assertTrue(mv.getColumn("empid").isAllowNull());
Assert.assertFalse(mv.getColumn("deptno").isAllowNull());
Assert.assertFalse(mv.getColumn("name").isAllowNull());
starRocksAssert.dropMaterializedView("mv_nullable");
}
{
starRocksAssert.withMaterializedView("create materialized view mv_nullable" +
" distributed by hash(`empid`) as" +
" select empid, d.deptno, d.name" +
" from emps e full outer join depts d on e.deptno = d.deptno");
MaterializedView mv = getMv("test", "mv_nullable");
Assert.assertTrue(mv.getColumn("empid").isAllowNull());
Assert.assertTrue(mv.getColumn("deptno").isAllowNull());
starRocksAssert.dropMaterializedView("mv_nullable");
}
starRocksAssert.dropTable("emps");
starRocksAssert.dropTable("depts");
}
@Test
public void testSelectFromSyncMV() throws Exception {
String sql = "create materialized view sync_mv1 as select k1, sum(v1) from tbl1 group by k1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
waitingRollupJobV2Finish();
sql = "select * from sync_mv1 [_SYNC_MV_];";
Pair<String, ExecPlan> pair = UtFrameUtils.getPlanAndFragment(connectContext, sql);
String explainString = pair.second.getExplainString(StatementBase.ExplainLevel.NORMAL);
Assert.assertTrue(explainString.contains("partitions=2/2\n" +
" rollup: sync_mv1\n" +
" tabletRatio=6/6"));
starRocksAssert.dropMaterializedView("sync_mv1");
}
@Test
public void testCreateSyncMV1() throws Exception {
try {
String sql = "create materialized view aggregate_table_with_null as select k1, sum(v1) from tbl1 group by k1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
Assert.fail();
} catch (Throwable e) {
Assert.assertTrue(e.getMessage().contains("Table [aggregate_table_with_null] already exists in the db test"));
}
}
@Test
public void testCreateSyncMV2() throws Exception {
String sql = "create materialized view sync_mv1 as select k1, sum(v1) from tbl1 group by k1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
waitingRollupJobV2Finish();
OlapTable tbl1 = (OlapTable) (getTable("test", "tbl1"));
Assert.assertTrue(tbl1 != null);
Assert.assertTrue(tbl1.hasMaterializedIndex("sync_mv1"));
try {
sql = "create materialized view sync_mv1 as select k1, sum(v1) from tbl1 group by k1;";
createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
Assert.fail();
} catch (Throwable e) {
Assert.assertTrue(e.getMessage().contains("Materialized view[sync_mv1] already exists in " +
"the table tbl1"));
}
starRocksAssert.dropMaterializedView("sync_mv1");
}
@Test
public void testCreateSyncMV3() throws Exception {
String sql = "create materialized view sync_mv1 as select k1, sum(v1) from tbl1 group by k1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
waitingRollupJobV2Finish();
OlapTable tbl1 = (OlapTable) (getTable("test", "tbl1"));
Assert.assertTrue(tbl1 != null);
Assert.assertTrue(tbl1.hasMaterializedIndex("sync_mv1"));
try {
sql = "create materialized view sync_mv1 as select k1, sum(v1) from tbl3 group by k1;";
createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
Assert.fail();
} catch (Throwable e) {
Assert.assertTrue(e.getMessage().contains("Materialized view[sync_mv1] already exists " +
"in table tbl1"));
}
starRocksAssert.dropMaterializedView("sync_mv1");
}
@Test
public void testCreateSyncMV_WithUpperColumn() throws Exception {
String sql = "create materialized view UPPER_MV1 as select K1, sum(V1) from TBL1 group by K1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
waitingRollupJobV2Finish();
{
sql = "select * from UPPER_MV1 [_SYNC_MV_];";
Pair<String, ExecPlan> pair = UtFrameUtils.getPlanAndFragment(connectContext, sql);
String explainString = pair.second.getExplainString(StatementBase.ExplainLevel.NORMAL);
Assert.assertTrue(explainString.contains("PLAN FRAGMENT 0\n" +
" OUTPUT EXPRS:1: K1 | 2: mv_sum_V1\n" +
" PARTITION: UNPARTITIONED"));
}
{
sql = "select K1, sum(V1) from TBL1 group by K1";
Pair<String, ExecPlan> pair = UtFrameUtils.getPlanAndFragment(connectContext, sql);
String explainString = pair.second.getExplainString(StatementBase.ExplainLevel.NORMAL);
Assert.assertTrue(explainString.contains("1:AGGREGATE (update serialize)\n" +
" | STREAMING\n" +
" | output: sum(4: mv_sum_V1)\n" +
" | group by: 1: K1\n" +
" | \n" +
" 0:OlapScanNode\n" +
" TABLE: TBL1\n" +
" PREAGGREGATION: ON\n" +
" partitions=2/2\n" +
" rollup: UPPER_MV1"));
}
starRocksAssert.dropMaterializedView("UPPER_MV1");
}
@Test
public void testCreateSyncMV_WithLowerColumn() throws Exception {
String sql = "create materialized view lower_mv1 as select k1, sum(v1) from tbl1 group by K1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
waitingRollupJobV2Finish();
{
sql = "select * from lower_mv1 [_SYNC_MV_];";
Pair<String, ExecPlan> pair = UtFrameUtils.getPlanAndFragment(connectContext, sql);
String explainString = pair.second.getExplainString(StatementBase.ExplainLevel.NORMAL);
Assert.assertTrue(explainString.contains("PLAN FRAGMENT 0\n" +
" OUTPUT EXPRS:1: k1 | 2: mv_sum_v1\n" +
" PARTITION: UNPARTITIONED"));
}
{
sql = "select K1, sum(v1) from tbl1 group by K1";
Pair<String, ExecPlan> pair = UtFrameUtils.getPlanAndFragment(connectContext, sql);
String explainString = pair.second.getExplainString(StatementBase.ExplainLevel.NORMAL);
Assert.assertTrue(explainString.contains("1:AGGREGATE (update serialize)\n" +
" | STREAMING\n" +
" | output: sum(4: mv_sum_v1)\n" +
" | group by: 1: k1\n" +
" | \n" +
" 0:OlapScanNode\n" +
" TABLE: tbl1\n" +
" PREAGGREGATION: ON\n" +
" partitions=2/2\n" +
" rollup: lower_mv1"));
}
starRocksAssert.dropMaterializedView("lower_mv1");
}
@Test
public void testCreateAsyncDateTruncAndTimeSLice() throws Exception {
LocalDateTime startTime = LocalDateTime.now().plusSeconds(3);
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month', k11)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select time_slice(tb1.k1, interval 5 minute) as k11, k2 s2 from tbl1 tb1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month', k11)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select time_slice(tb1.k1, interval 5 year) as k11, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql, connectContext));
}
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month', k11)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select time_slice(tb1.k1, interval 5 month) as k11, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql, connectContext));
}
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month', k11)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select time_slice(tb1.k1, interval 5 month, 'ceil') as k11, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql, connectContext));
}
}
@Test
public void testMVWithMaxRewriteStaleness() throws Exception {
LocalDateTime startTime = LocalDateTime.now().plusSeconds(3);
String sql = "create materialized view mv_with_rewrite_staleness \n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 SECOND)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"," +
"\"mv_rewrite_staleness_second\" = \"60\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
try {
Table mv1 = getMaterializedViewChecked(sql);
MaterializedView materializedView = (MaterializedView) mv1;
Assert.assertEquals(materializedView.getMaxMVRewriteStaleness(), 60);
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
dropMv("mv_with_rewrite_staleness");
}
}
@Test
public void testCreateMvWithView() throws Exception {
starRocksAssert.withView("create view view_1 as select tb1.k1, k2 s2 from tbl1 tb1;");
starRocksAssert.withView("create view view_2 as select v1.k1, v1.s2 from view_1 v1;");
starRocksAssert.withView("create view view_3 as select date_trunc('month',k1) d1, v1.s2 from view_1 v1;");
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select k1, s2 from view_1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select v1.k1, v1.s2 from view_1 v1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by d1\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select date_trunc('month',k1) d1, v1.s2 from view_1 v1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by d1\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select v3.d1, v3.s2 from view_3 v3;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select v2.k1, v2.s2 from view_2 v2;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
}
@Test
public void testMvOnUnion() throws Exception {
starRocksAssert.withTable("CREATE TABLE `customer_nullable_1` (\n" +
" `c_custkey` int(11) NULL COMMENT \"\",\n" +
" `c_name` varchar(26) NULL COMMENT \"\",\n" +
" `c_address` varchar(41) NULL COMMENT \"\",\n" +
" `c_city` varchar(11) NULL COMMENT \"\",\n" +
" `c_nation` varchar(16) NULL COMMENT \"\",\n" +
" `c_region` varchar(13) NULL COMMENT \"\",\n" +
" `c_phone` varchar(16) NOT NULL COMMENT \"\",\n" +
" `c_mktsegment` varchar(11) NOT NULL COMMENT \"\"\n" +
") ENGINE=OLAP\n" +
"DUPLICATE KEY(`c_custkey`)\n" +
"COMMENT \"OLAP\"\n" +
"DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 12\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
");");
starRocksAssert.withTable("CREATE TABLE `customer_nullable_2` (\n" +
" `c_custkey` int(11) NULL COMMENT \"\",\n" +
" `c_name` varchar(26) NULL COMMENT \"\",\n" +
" `c_address` varchar(41) NULL COMMENT \"\",\n" +
" `c_city` varchar(11) NULL COMMENT \"\",\n" +
" `c_nation` varchar(16) NULL COMMENT \"\",\n" +
" `c_region` varchar(13) NULL COMMENT \"\",\n" +
" `c_phone` varchar(16) NOT NULL COMMENT \"\",\n" +
" `c_mktsegment` varchar(11) NOT NULL COMMENT \"\"\n" +
") ENGINE=OLAP\n" +
"DUPLICATE KEY(`c_custkey`)\n" +
"COMMENT \"OLAP\"\n" +
"DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 12\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
");");
starRocksAssert.withTable("\n" +
"CREATE TABLE `customer_nullable_3` (\n" +
" `c_custkey` int(11) NULL COMMENT \"\",\n" +
" `c_name` varchar(26) NULL COMMENT \"\",\n" +
" `c_address` varchar(41) NULL COMMENT \"\",\n" +
" `c_city` varchar(11) NULL COMMENT \"\",\n" +
" `c_nation` varchar(16) NULL COMMENT \"\",\n" +
" `c_region` varchar(13) NULL COMMENT \"\",\n" +
" `c_phone` varchar(16) NOT NULL COMMENT \"\",\n" +
" `c_mktsegment` varchar(11) NOT NULL COMMENT \"\",\n" +
" `c_total` decimal(19,6) null default \"0.0\"\n" +
") ENGINE=OLAP\n" +
"DUPLICATE KEY(`c_custkey`)\n" +
"COMMENT \"OLAP\"\n" +
"DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 12\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
");");
starRocksAssert.withMaterializedView("\n" +
"create materialized view customer_mv\n" +
"distributed by hash(`custkey`)\n" +
"as\n" +
"\n" +
"select\n" +
"\tc_custkey custkey,\n" +
"\tc_name name,\n" +
"\tc_phone phone,\n" +
"\t0 total,\n" +
"\t c_mktsegment segment\n" +
"from customer_nullable_1\n" +
"\n" +
"union all\n" +
"\n" +
"select\n" +
"\tc_custkey custkey,\n" +
"\tnull name,\n" +
"\tnull phone,\n" +
"\t0 total,\n" +
"\t c_mktsegment segment\n" +
"from customer_nullable_2\n" +
"\n" +
"union all\n" +
"\n" +
"select\n" +
"\tc_custkey custkey,\n" +
"\tnull name,\n" +
"\tnull phone,\n" +
"\tc_total total,\n" +
"\t c_mktsegment segment\n" +
"from customer_nullable_3;");
Database db = starRocksAssert.getCtx().getGlobalStateMgr().getDb("test");
MaterializedView mv = (MaterializedView) db.getTable("customer_mv");
Assert.assertTrue(mv.getColumn("total").getType().isDecimalOfAnyVersion());
Assert.assertFalse(mv.getColumn("segment").isAllowNull());
}
@Test
public void testRandomizeStart() throws Exception {
final long FIXED_DELTA = 5;
String sql = "create materialized view mv_test_randomize \n" +
"distributed by hash(k1) buckets 10\n" +
"refresh async every(interval 1 minute) " +
"PROPERTIES (\n" +
"'replication_num' = '1'" +
")\n" +
"as " +
"select tb1.k1, k2, " +
"array<int>[1,2,3] as type_array, " +
"map<int, int>{1:2} as type_map, " +
"parse_json('{\"a\": 1}') as type_json, " +
"row('c') as type_struct, " +
"array<json>[parse_json('{}')] as type_array_json " +
"from tbl1 tb1;";
long currentSecond = Utils.getLongFromDateTime(LocalDateTime.now());
starRocksAssert.withMaterializedView(sql);
MaterializedView mv = getMv(testDb.getFullName(), "mv_test_randomize");
long startTime = mv.getRefreshScheme().getAsyncRefreshContext().getStartTime();
long delta = startTime - currentSecond;
Assert.assertTrue("delta is " + delta, delta >= 0 && delta <= 60);
starRocksAssert.dropMaterializedView("mv_test_randomize");
sql = "create materialized view mv_test_randomize \n" +
"distributed by hash(k1) buckets 10\n" +
"refresh async every(interval 1 minute) " +
"PROPERTIES (\n" +
"'replication_num' = '1', " +
"'mv_randomize_start' = '-1'" +
")\n" +
"as " +
"select tb1.k1, k2, " +
"array<int>[1,2,3] as type_array, " +
"map<int, int>{1:2} as type_map, " +
"parse_json('{\"a\": 1}') as type_json, " +
"row('c') as type_struct, " +
"array<json>[parse_json('{}')] as type_array_json " +
"from tbl1 tb1;";
currentSecond = Utils.getLongFromDateTime(LocalDateTime.now());
starRocksAssert.withMaterializedView(sql);
mv = getMv(testDb.getFullName(), "mv_test_randomize");
startTime = mv.getRefreshScheme().getAsyncRefreshContext().getStartTime();
delta = startTime - currentSecond;
Assert.assertTrue("delta is " + delta, delta >= 0 && delta < FIXED_DELTA);
starRocksAssert.dropMaterializedView("mv_test_randomize");
sql = "create materialized view mv_test_randomize \n" +
"distributed by hash(k1) buckets 10\n" +
"refresh async every(interval 1 minute) " +
"PROPERTIES (\n" +
"'replication_num' = '1', " +
"'mv_randomize_start' = '2'" +
")\n" +
"as " +
"select tb1.k1, k2, " +
"array<int>[1,2,3] as type_array, " +
"map<int, int>{1:2} as type_map, " +
"parse_json('{\"a\": 1}') as type_json, " +
"row('c') as type_struct, " +
"array<json>[parse_json('{}')] as type_array_json " +
"from tbl1 tb1;";
currentSecond = Utils.getLongFromDateTime(LocalDateTime.now());
starRocksAssert.withMaterializedView(sql);
mv = getMv(testDb.getFullName(), "mv_test_randomize");
startTime = mv.getRefreshScheme().getAsyncRefreshContext().getStartTime();
delta = startTime - currentSecond;
Assert.assertTrue("delta is " + delta, delta >= 0 && delta < (2 + FIXED_DELTA));
starRocksAssert.dropMaterializedView("mv_test_randomize");
}
@Test
public void testCreateMvWithTypes() throws Exception {
String sql = "create materialized view mv_test_types \n" +
"distributed by hash(k1) buckets 10\n" +
"PROPERTIES (\n" +
"'replication_num' = '1'" +
")\n" +
"as " +
"select tb1.k1, k2, " +
"array<int>[1,2,3] as type_array, " +
"map<int, int>{1:2} as type_map, " +
"parse_json('{\"a\": 1}') as type_json, " +
"row('c') as type_struct, " +
"array<json>[parse_json('{}')] as type_array_json " +
"from tbl1 tb1;";
starRocksAssert.withMaterializedView(sql);
}
@Test
public void testCreateMaterializedViewOnListPartitionTables1() throws Exception {
String createSQL = "CREATE TABLE test.list_partition_tbl1 (\n" +
" id BIGINT,\n" +
" age SMALLINT,\n" +
" dt VARCHAR(10),\n" +
" province VARCHAR(64) not null\n" +
")\n" +
"ENGINE=olap\n" +
"DUPLICATE KEY(id)\n" +
"PARTITION BY LIST (province) (\n" +
" PARTITION p1 VALUES IN (\"beijing\",\"chongqing\") ,\n" +
" PARTITION p2 VALUES IN (\"guangdong\") \n" +
")\n" +
"DISTRIBUTED BY HASH(id) BUCKETS 10\n" +
"PROPERTIES (\n" +
" \"replication_num\" = \"1\"\n" +
")";
starRocksAssert.withTable(createSQL);
String sql = "create materialized view list_partition_mv1 " +
"partition by province " +
"distributed by hash(dt, province) buckets 10 " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select dt, province, avg(age) from list_partition_tbl1 group by dt, province;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Materialized view related base table partition type: LIST not supports."));
}
starRocksAssert.dropTable("list_partition_tbl1");
}
@Test
public void testCreateMaterializedViewOnListPartitionTables2() throws Exception {
String createSQL = "CREATE TABLE test.list_partition_tbl1 (\n" +
" id BIGINT,\n" +
" age SMALLINT,\n" +
" dt VARCHAR(10),\n" +
" province VARCHAR(64) not null\n" +
")\n" +
"ENGINE=olap\n" +
"DUPLICATE KEY(id)\n" +
"PARTITION BY LIST (province) (\n" +
" PARTITION p1 VALUES IN (\"beijing\",\"chongqing\") ,\n" +
" PARTITION p2 VALUES IN (\"guangdong\") \n" +
")\n" +
"DISTRIBUTED BY HASH(id) BUCKETS 10\n" +
"PROPERTIES (\n" +
" \"replication_num\" = \"1\"\n" +
")";
starRocksAssert.withTable(createSQL);
String sql = "create materialized view list_partition_mv1 " +
"distributed by hash(dt, province) buckets 10 " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select dt, province, avg(age) from list_partition_tbl1 group by dt, province;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
starRocksAssert.dropTable("list_partition_tbl1");
}
@Test
public void testCreateMaterializedViewWithTableAlias1() throws Exception {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select t0.k1, t0.k2, t0.sum as sum0 " +
"from (select k1, k2, sum(v1) as sum from tbl1 group by k1, k2) t0 where t0.k2 > 10";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateMaterializedViewWithTableAlias2() throws Exception {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select t0.k1, t0.k2, t0.sum as sum0, t1.sum as sum1, t2.sum as sum2 " +
"from (select k1, k2, sum(v1) as sum from tbl1 group by k1, k2) t0 " +
"left join (select k1, k2, sum(v1) as sum from tbl1 group by k1, k2) t1 on t0.k1=t1.k2 " +
"left join (select k1, k2, sum(v1) as sum from tbl1 group by k1, k2) t2 on t0.k1=t2.k1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateMvWithViewAndSubQuery() throws Exception {
starRocksAssert.withView("create view view_1 as " +
"select k1, s2 from (select tb1.k1, k2 s2 from tbl1 tb1) t where t.k1 > 10;");
starRocksAssert.withView("create view view_2 as " +
"select k1, s2 from (select v1.k1, v1.s2 from view_1 v1) t where t.k1 > 10;");
starRocksAssert.withView("create view view_3 as " +
"select d1, s2 from (select date_trunc('month',k1) d1, v1.s2 from view_1 v1)t where d1 is not null;");
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select k1, s2 from view_1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select view_1.k1, view_2.s2 from view_1 join view_2 on view_1.k1=view_2.k1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by d1\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select v3.d1, v3.s2 from view_3 v3;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select view_1.k1, view_2.s2 from view_1 join view_2 on view_1.k1=view_2.k1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
starRocksAssert.dropView("view_1");
starRocksAssert.dropView("view_2");
starRocksAssert.dropView("view_3");
}
@Test
public void testCreateSynchronousMVOnLakeTable() throws Exception {
String sql = "create materialized view sync_mv1 as select k1, sum(v1) from mocked_cloud_table group by k1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
Table table = getTable("test", "mocked_cloud_table");
Deencapsulation.setField(table, "type", Table.TableType.CLOUD_NATIVE);
DdlException e = Assert.assertThrows(DdlException.class, () -> {
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
});
Assert.assertTrue(e.getMessage().contains("Creating synchronous materialized view(rollup) is not supported in " +
"shared data clusters.\nPlease use asynchronous materialized view instead.\n" +
"Refer to https:
"/data-definition/CREATE%20MATERIALIZED%20VIEW
}
@Test
public void testCreateSynchronousMVOnAnotherMV() throws Exception {
String sql = "create materialized view sync_mv1 as select k1, sum(v1) from mocked_cloud_table group by k1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
Table table = getTable("test", "mocked_cloud_table");
Deencapsulation.setField(table, "type", Table.TableType.MATERIALIZED_VIEW);
DdlException e = Assert.assertThrows(DdlException.class, () -> {
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
});
Assert.assertTrue(e.getMessage().contains("Do not support create synchronous materialized view(rollup) on"));
}
MaterializedView getMaterializedViewChecked(String sql) {
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) statementBase;
currentState.createMaterializedView(createMaterializedViewStatement);
ThreadUtil.sleepAtLeastIgnoreInterrupts(4000L);
TableName mvName = createMaterializedViewStatement.getTableName();
Table table = testDb.getTable(mvName.getTbl());
Assert.assertNotNull(table);
Assert.assertTrue(table instanceof MaterializedView);
return (MaterializedView) table;
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
return null;
}
List<Column> getMaterializedViewKeysChecked(String sql) {
String mvName = null;
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) statementBase;
currentState.createMaterializedView(createMaterializedViewStatement);
ThreadUtil.sleepAtLeastIgnoreInterrupts(4000L);
TableName mvTableName = createMaterializedViewStatement.getTableName();
mvName = mvTableName.getTbl();
Table table = testDb.getTable(mvName);
Assert.assertNotNull(table);
Assert.assertTrue(table instanceof MaterializedView);
MaterializedView mv = (MaterializedView) table;
return mv.getKeyColumns();
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
} finally {
if (!Objects.isNull(mvName)) {
try {
starRocksAssert.dropMaterializedView(mvName);
} catch (Exception e) {
Assert.fail();
}
}
}
return Lists.newArrayList();
}
@Test
@Test
public void testCreateMaterializedViewWithoutSortKeys_Partitioned_2() {
String sql = "create materialized view test_mv_sort_key1 " +
"partition by c_1_3 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_3, c_1_0 , c_1_4, c_1_5 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_0"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_3"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_Partitioned_3() {
String sql = "create materialized view test_mv_sort_key1 " +
"partition by c_1_3 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_5, c_1_3, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_0"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_3"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_Partitioned_4() {
String sql = "create materialized view test_mv_sort_key1 " +
"partition by c_1_3 " +
"distributed by random " +
"order by (c_1_0, c_1_3) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_5, c_1_3, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_0"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_3"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_Partitioned_5() {
String sql = "create materialized view test_mv_sort_key1 " +
"partition by c_1_3 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_5, c_1_10, c_1_3, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_10"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_Partitioned_6() {
String sql = "create materialized view test_mv_sort_key1 " +
"partition by c_1_3 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_3, c_1_10, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_10"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_3"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_UnPartitioned_1() {
String sql = "create materialized view test_mv_sort_key1 " +
"distributed by hash(c_1_3, c_1_0) buckets 10 " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_3, c_1_0, c_1_4, c_1_5 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_0"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_3"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_UnPartitioned_2() {
String sql = "create materialized view test_mv_sort_key1 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_3, c_1_0 , c_1_4, c_1_5 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_0"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_3"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_UnPartitioned_3() {
String sql = "create materialized view test_mv_sort_key1 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_5, c_1_3, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_0"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_3"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_UnPartitioned_4() {
String sql = "create materialized view test_mv_sort_key1 " +
"distributed by random " +
"order by (c_1_0, c_1_3) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_5, c_1_3, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_0"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_3"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_UnPartitioned_5() {
String sql = "create materialized view test_mv_sort_key1 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_5, c_1_10, c_1_3, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_10"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_UnPartitioned_6() {
String sql = "create materialized view test_mv_sort_key1 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_3, c_1_10, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_10"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_3"));
}
} | class CreateMaterializedViewTest {
private static final Logger LOG = LogManager.getLogger(CreateMaterializedViewTest.class);
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Rule
public TestName name = new TestName();
@ClassRule
public static TemporaryFolder temp = new TemporaryFolder();
private static ConnectContext connectContext;
private static StarRocksAssert starRocksAssert;
private static Database testDb;
private static GlobalStateMgr currentState;
@BeforeClass
public static void beforeClass() throws Exception {
ConnectorPlanTestBase.doInit(temp.newFolder().toURI().toString());
Config.alter_scheduler_interval_millisecond = 100;
Config.dynamic_partition_enable = true;
Config.dynamic_partition_check_interval_seconds = 1;
Config.enable_experimental_mv = true;
UtFrameUtils.createMinStarRocksCluster();
connectContext = UtFrameUtils.createDefaultCtx();
starRocksAssert = new StarRocksAssert(connectContext);
if (!starRocksAssert.databaseExist("_statistics_")) {
StatisticsMetaManager m = new StatisticsMetaManager();
m.createStatisticsTablesForTest();
}
starRocksAssert.withDatabase("test").useDatabase("test")
.withTable("CREATE TABLE test.tbl1\n" +
"(\n" +
" k1 date,\n" +
" k2 int,\n" +
" v1 int sum\n" +
")\n" +
"PARTITION BY RANGE(k1)\n" +
"(\n" +
" PARTITION p1 values [('2020-01-01'),('2020-02-01')),\n" +
" PARTITION p2 values [('2020-02-01'),('2020-03-01'))\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.withTable("CREATE TABLE test.TBL1 \n" +
"(\n" +
" K1 date,\n" +
" K2 int,\n" +
" V1 int sum\n" +
")\n" +
"PARTITION BY RANGE(K1)\n" +
"(\n" +
" PARTITION p1 values [('2020-01-01'),('2020-02-01')),\n" +
" PARTITION p2 values [('2020-02-01'),('2020-03-01'))\n" +
")\n" +
"DISTRIBUTED BY HASH(K2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.withTable("CREATE TABLE `aggregate_table_with_null` (\n" +
"`k1` date,\n" +
"`v2` datetime MAX,\n" +
"`v3` char(20) MIN,\n" +
"`v4` bigint SUM,\n" +
"`v8` bigint SUM,\n" +
"`v5` HLL HLL_UNION,\n" +
"`v6` BITMAP BITMAP_UNION,\n" +
"`v7` PERCENTILE PERCENTILE_UNION\n" +
") ENGINE=OLAP\n" +
"AGGREGATE KEY(`k1`)\n" +
"COMMENT \"OLAP\"\n" +
"DISTRIBUTED BY HASH(`k1`) BUCKETS 3\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
");")
.withView("CREATE VIEW v1 AS SELECT * FROM aggregate_table_with_null;")
.withTable("CREATE TABLE test.tbl2\n" +
"(\n" +
" k1 date,\n" +
" k2 int,\n" +
" v1 int sum\n" +
")\n" +
"PARTITION BY RANGE(k2)\n" +
"(\n" +
" PARTITION p1 values less than('10'),\n" +
" PARTITION p2 values less than('20')\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.withTable("CREATE TABLE test.tbl3\n" +
"(\n" +
" k1 date,\n" +
" k2 int,\n" +
" v1 int sum\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.withTable("CREATE TABLE test.tbl4\n" +
"(\n" +
" k1 date,\n" +
" k2 int,\n" +
" k3 int,\n" +
" v1 int sum\n" +
")\n" +
"PARTITION BY RANGE(k2,k3)\n" +
"(\n" +
" PARTITION p1 values less than('20','30'),\n" +
" PARTITION p2 values less than('40','50')\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.withTable("CREATE TABLE `t1` (\n" +
" `c_1_0` decimal128(30, 4) NOT NULL COMMENT \"\",\n" +
" `c_1_1` boolean NOT NULL COMMENT \"\",\n" +
" `c_1_2` date NULL COMMENT \"\",\n" +
" `c_1_3` date NOT NULL COMMENT \"\",\n" +
" `c_1_4` double NULL COMMENT \"\",\n" +
" `c_1_5` double NULL COMMENT \"\",\n" +
" `c_1_6` datetime NULL COMMENT \"\",\n" +
" `c_1_7` ARRAY<int(11)> NULL COMMENT \"\",\n" +
" `c_1_8` smallint(6) NULL COMMENT \"\",\n" +
" `c_1_9` bigint(20) NOT NULL COMMENT \"\",\n" +
" `c_1_10` varchar(31) NOT NULL COMMENT \"\",\n" +
" `c_1_11` decimal128(22, 18) NULL COMMENT \"\",\n" +
" `c_1_12` boolean NULL COMMENT \"\"\n" +
") ENGINE=OLAP \n" +
"DUPLICATE KEY(`c_1_0`, `c_1_1`, `c_1_2`, `c_1_3`)\n" +
"COMMENT \"OLAP\"\n" +
"PARTITION BY RANGE(`c_1_3`)\n" +
"(PARTITION p20000101 VALUES [('2000-01-01'), ('2010-12-31')),\n" +
"PARTITION p20101231 VALUES [('2010-12-31'), ('2021-12-30')),\n" +
"PARTITION p20211230 VALUES [('2021-12-30'), ('2032-12-29')))\n" +
"DISTRIBUTED BY HASH(`c_1_3`, `c_1_2`, `c_1_0`) BUCKETS 10 \n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\",\n" +
"\"in_memory\" = \"false\"\n" +
");")
.withTable("CREATE EXTERNAL TABLE mysql_external_table\n" +
"(\n" +
" k1 DATE,\n" +
" k2 INT,\n" +
" k3 SMALLINT,\n" +
" k4 VARCHAR(2048),\n" +
" k5 DATETIME\n" +
")\n" +
"ENGINE=mysql\n" +
"PROPERTIES\n" +
"(\n" +
" \"host\" = \"127.0.0.1\",\n" +
" \"port\" = \"3306\",\n" +
" \"user\" = \"mysql_user\",\n" +
" \"password\" = \"mysql_passwd\",\n" +
" \"database\" = \"mysql_db_test\",\n" +
" \"table\" = \"mysql_table_test\"\n" +
");")
.withDatabase("test2").useDatabase("test2")
.withTable("CREATE TABLE test2.tbl3\n" +
"(\n" +
" k1 date,\n" +
" k2 int,\n" +
" v1 int sum\n" +
")\n" +
"PARTITION BY RANGE(k1)\n" +
"(\n" +
" PARTITION p1 values less than('2021-02-01'),\n" +
" PARTITION p2 values less than('2021-03-01')\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.withTable("CREATE TABLE test.tbl5\n" +
"(\n" +
" k1 date,\n" +
" k2 int,\n" +
" k3 int,\n" +
" v1 int,\n" +
" v2 int\n" +
")\n" +
"PARTITION BY RANGE(k1)\n" +
"(\n" +
" PARTITION p1 values less than('2021-02-01'),\n" +
" PARTITION p2 values less than('2021-03-01')\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.withTable("CREATE TABLE test.tbl_for_count\n" +
"(\n" +
" c_0_0 BIGINT NULL ,\n" +
" c_0_1 DATE NOT NULL ,\n" +
" c_0_2 DECIMAL(37, 5) NOT NULL,\n" +
" c_0_3 INT MAX NOT NULL ,\n" +
" c_0_4 DATE REPLACE_IF_NOT_NULL NOT NULL ,\n" +
" c_0_5 PERCENTILE PERCENTILE_UNION NOT NULL\n" +
")\n" +
"AGGREGATE KEY (c_0_0,c_0_1,c_0_2)\n" +
"PARTITION BY RANGE(c_0_1)\n" +
"(\n" +
" START (\"2010-01-01\") END (\"2021-12-31\") EVERY (INTERVAL 219 day)\n" +
")\n" +
"DISTRIBUTED BY HASH (c_0_2,c_0_1) BUCKETS 3\n" +
"properties('replication_num'='1');")
.withTable("CREATE TABLE test.mocked_cloud_table\n" +
"(\n" +
" k1 date,\n" +
" k2 int,\n" +
" v1 int sum\n" +
")\n" +
"PARTITION BY RANGE(k1)\n" +
"(\n" +
" PARTITION p1 values [('2020-01-01'),('2020-02-01')),\n" +
" PARTITION p2 values [('2020-02-01'),('2020-03-01'))\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES('replication_num' = '1');")
.useDatabase("test");
starRocksAssert.withView("create view test.view_to_tbl1 as select * from test.tbl1;");
currentState = GlobalStateMgr.getCurrentState();
testDb = currentState.getDb("test");
}
private void dropMv(String mvName) throws Exception {
String sql = "drop materialized view " + mvName;
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
StmtExecutor stmtExecutor = new StmtExecutor(connectContext, statementBase);
stmtExecutor.execute();
}
private void dropTableForce(String tableName) throws Exception {
String sql = "drop table " + tableName + " force";
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
StmtExecutor stmtExecutor = new StmtExecutor(connectContext, statementBase);
stmtExecutor.execute();
}
private void dropTable(String tableName) throws Exception {
String sql = "drop table " + tableName;
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
StmtExecutor stmtExecutor = new StmtExecutor(connectContext, statementBase);
stmtExecutor.execute();
}
private List<TaskRunStatus> waitingTaskFinish() {
TaskManager taskManager = GlobalStateMgr.getCurrentState().getTaskManager();
List<TaskRunStatus> taskRuns = taskManager.showTaskRunStatus(null);
int retryCount = 0, maxRetry = 5;
while (retryCount < maxRetry) {
ThreadUtil.sleepAtLeastIgnoreInterrupts(2000L);
Constants.TaskRunState state = taskRuns.get(0).getState();
if (state == Constants.TaskRunState.FAILED || state == Constants.TaskRunState.SUCCESS) {
break;
}
retryCount++;
LOG.info("waiting for TaskRunState retryCount:" + retryCount);
}
return taskRuns;
}
@Test
public void testFullCreate() throws Exception {
new MockUp<StmtExecutor>() {
@Mock
public void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {
}
};
LocalDateTime startTime = LocalDateTime.now().plusSeconds(3);
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 SECOND)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
try {
MaterializedView materializedView = getMaterializedViewChecked(sql);
PartitionInfo partitionInfo = materializedView.getPartitionInfo();
Assert.assertEquals(1, partitionInfo.getPartitionColumns().size());
Assert.assertTrue(partitionInfo instanceof ExpressionRangePartitionInfo);
ExpressionRangePartitionInfo expressionRangePartitionInfo = (ExpressionRangePartitionInfo) partitionInfo;
Expr partitionExpr = expressionRangePartitionInfo.getPartitionExprs().get(0);
Assert.assertTrue(partitionExpr instanceof FunctionCallExpr);
FunctionCallExpr partitionFunctionCallExpr = (FunctionCallExpr) partitionExpr;
Assert.assertEquals("date_trunc", partitionFunctionCallExpr.getFnName().getFunction());
List<SlotRef> slotRefs = Lists.newArrayList();
partitionFunctionCallExpr.collect(SlotRef.class, slotRefs);
SlotRef partitionSlotRef = slotRefs.get(0);
Assert.assertEquals("k1", partitionSlotRef.getColumnName());
List<BaseTableInfo> baseTableInfos = materializedView.getBaseTableInfos();
Assert.assertEquals(1, baseTableInfos.size());
Expr partitionRefTableExpr = materializedView.getPartitionRefTableExprs().get(0);
List<SlotRef> tableSlotRefs = Lists.newArrayList();
partitionRefTableExpr.collect(SlotRef.class, tableSlotRefs);
SlotRef slotRef = tableSlotRefs.get(0);
TableName baseTableName = slotRef.getTblNameWithoutAnalyzed();
Assert.assertEquals(baseTableName.getDb(), testDb.getFullName());
Table baseTable = testDb.getTable(baseTableName.getTbl());
Assert.assertNotNull(baseTable);
Assert.assertEquals(baseTableInfos.get(0).getTableId(), baseTable.getId());
Assert.assertEquals(1, baseTable.getRelatedMaterializedViews().size());
Column baseColumn = baseTable.getColumn(slotRef.getColumnName());
Assert.assertNotNull(baseColumn);
Assert.assertEquals("k1", baseColumn.getName());
Assert.assertEquals("SELECT `test`.`tb1`.`k1`, `test`.`tb1`.`k2` AS `s2`\n" +
"FROM `test`.`tbl1` AS `tb1`",
materializedView.getViewDefineSql());
TableProperty tableProperty = materializedView.getTableProperty();
Assert.assertEquals(1, tableProperty.getReplicationNum().shortValue());
Assert.assertEquals(OlapTable.OlapTableState.NORMAL, materializedView.getState());
Assert.assertEquals(KeysType.DUP_KEYS, materializedView.getKeysType());
Assert.assertEquals(Table.TableType.MATERIALIZED_VIEW,
materializedView.getType());
Assert.assertEquals(0, materializedView.getRelatedMaterializedViews().size(), 0);
Assert.assertEquals(2, materializedView.getBaseSchema().size());
Assert.assertTrue(materializedView.isActive());
testFullCreateSync(materializedView, baseTable);
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
dropMv("mv1");
}
}
public void testFullCreateSync(MaterializedView materializedView, Table baseTable) throws Exception {
TaskManager taskManager = GlobalStateMgr.getCurrentState().getTaskManager();
String mvTaskName = TaskBuilder.getMvTaskName(materializedView.getId());
List<TaskRunStatus> taskRuns = waitingTaskFinish();
Assert.assertEquals(Constants.TaskRunState.SUCCESS, taskRuns.get(0).getState());
Collection<Partition> baseTablePartitions = baseTable.getPartitions();
Collection<Partition> mvPartitions = materializedView.getPartitions();
Assert.assertEquals(2, mvPartitions.size());
Assert.assertEquals(baseTablePartitions.size(), mvPartitions.size());
String addPartitionSql = "ALTER TABLE test.tbl1 ADD PARTITION p3 values less than('2020-04-01');";
new StmtExecutor(connectContext, addPartitionSql).execute();
taskManager.executeTask(mvTaskName);
waitingTaskFinish();
Assert.assertEquals(3, baseTablePartitions.size());
Assert.assertEquals(baseTablePartitions.size(), mvPartitions.size());
String dropPartitionSql = "ALTER TABLE test.tbl1 DROP PARTITION p3\n";
new StmtExecutor(connectContext, dropPartitionSql).execute();
taskManager.executeTask(mvTaskName);
waitingTaskFinish();
Assert.assertEquals(2, mvPartitions.size());
Assert.assertEquals(baseTablePartitions.size(), mvPartitions.size());
}
@Test
public void testCreateAsync() {
LocalDateTime startTime = LocalDateTime.now().plusSeconds(3);
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 MONTH)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql, connectContext));
}
@Test
public void testCreateAsyncMVWithDuplicatedProperty() {
LocalDateTime startTime = LocalDateTime.now().plusSeconds(3);
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql, connectContext));
String sql2 = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql2, connectContext));
String sql3 = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql3, connectContext));
}
@Test
public void testCreateAsyncNormal() throws Exception {
LocalDateTime startTime = LocalDateTime.now().plusSeconds(3);
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
sql = "create materialized view mv1\n" +
"distributed by hash(s2) buckets 10\n" +
"partition by date_trunc('month',k1)\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
sql = "create materialized view mv1\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"distributed by hash(s2) buckets 10\n" +
"partition by date_trunc('month',k1)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
@Test
public void testCreateAsyncLowercase() throws Exception {
LocalDateTime startTime = LocalDateTime.now().plusSeconds(3);
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 day)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
@Test
public void testCreateAsyncWithSingleTable() throws Exception {
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2)\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
CreateMaterializedViewStatement createMaterializedViewStatement = (CreateMaterializedViewStatement)
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
RefreshSchemeClause refreshSchemeDesc = createMaterializedViewStatement.getRefreshSchemeDesc();
Assert.assertEquals(MaterializedView.RefreshType.MANUAL, refreshSchemeDesc.getType());
}
@Test
public void testCreateSyncWithSingleTable() throws Exception {
String sql = "create materialized view mv1\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
Assert.assertTrue(statementBase instanceof CreateMaterializedViewStmt);
}
@Test
public void testFullCreateMultiTables() throws Exception {
new MockUp<StmtExecutor>() {
@Mock
public void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {
}
};
String sql = "create materialized view mv1\n" +
"partition by s1\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('9999-12-31') EVERY(INTERVAL 3 SECOND)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select date_trunc('month',tb1.k1) s1, tb2.k2 s2 from tbl1 tb1 join tbl2 tb2 on tb1.k2 = tb2.k2;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
Table mv1 = testDb.getTable("mv1");
Assert.assertTrue(mv1 instanceof MaterializedView);
MaterializedView materializedView = (MaterializedView) mv1;
PartitionInfo partitionInfo = materializedView.getPartitionInfo();
Assert.assertEquals(1, partitionInfo.getPartitionColumns().size());
Assert.assertTrue(partitionInfo instanceof ExpressionRangePartitionInfo);
ExpressionRangePartitionInfo expressionRangePartitionInfo = (ExpressionRangePartitionInfo) partitionInfo;
Expr partitionExpr = expressionRangePartitionInfo.getPartitionExprs().get(0);
Assert.assertTrue(partitionExpr instanceof SlotRef);
SlotRef partitionSlotRef = (SlotRef) partitionExpr;
Assert.assertEquals("s1", partitionSlotRef.getColumnName());
List<BaseTableInfo> baseTableInfos = materializedView.getBaseTableInfos();
Assert.assertEquals(2, baseTableInfos.size());
Expr partitionRefTableExpr = materializedView.getPartitionRefTableExprs().get(0);
List<SlotRef> slotRefs = Lists.newArrayList();
partitionRefTableExpr.collect(SlotRef.class, slotRefs);
SlotRef slotRef = slotRefs.get(0);
TableName baseTableName = slotRef.getTblNameWithoutAnalyzed();
Assert.assertEquals(baseTableName.getDb(), testDb.getFullName());
Table baseTable = testDb.getTable(baseTableName.getTbl());
Assert.assertNotNull(baseTable);
Assert.assertTrue(baseTableInfos.stream().anyMatch(baseTableInfo ->
baseTableInfo.getTableId() == baseTable.getId()));
Assert.assertTrue(1 <= baseTable.getRelatedMaterializedViews().size());
Column baseColumn = baseTable.getColumn(slotRef.getColumnName());
Assert.assertNotNull(baseColumn);
Assert.assertEquals("k1", baseColumn.getName());
Assert.assertEquals(
"SELECT date_trunc('month', `test`.`tb1`.`k1`) AS `s1`, `test`.`tb2`.`k2` AS `s2`\n" +
"FROM `test`.`tbl1` AS `tb1` INNER JOIN `test`.`tbl2` AS `tb2` ON `test`.`tb1`.`k2` = `test`.`tb2`.`k2`",
materializedView.getViewDefineSql());
TableProperty tableProperty = materializedView.getTableProperty();
Assert.assertEquals(1, tableProperty.getReplicationNum().shortValue(), 1);
Assert.assertEquals(OlapTable.OlapTableState.NORMAL, materializedView.getState());
Assert.assertEquals(KeysType.DUP_KEYS, materializedView.getKeysType());
Assert.assertEquals(Table.TableType.MATERIALIZED_VIEW,
materializedView.getType());
Assert.assertEquals(0, materializedView.getRelatedMaterializedViews().size());
Assert.assertEquals(2, materializedView.getBaseSchema().size());
Assert.assertTrue(materializedView.isActive());
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
dropMv("mv1");
}
}
@Test
public void testFullCreateNoPartition() throws Exception {
new MockUp<StmtExecutor>() {
@Mock
public void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {
}
};
String sql = "create materialized view mv1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('9999-12-31') EVERY(INTERVAL 3 SECOND) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, tbl1.k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
Table mv1 = testDb.getTable("mv1");
Assert.assertTrue(mv1 instanceof MaterializedView);
MaterializedView materializedView = (MaterializedView) mv1;
PartitionInfo partitionInfo = materializedView.getPartitionInfo();
Assert.assertTrue(partitionInfo instanceof SinglePartitionInfo);
Assert.assertEquals(1, materializedView.getPartitions().size());
Partition partition = materializedView.getPartitions().iterator().next();
Assert.assertNotNull(partition);
Assert.assertEquals("mv1", partition.getName());
List<BaseTableInfo> baseTableInfos = materializedView.getBaseTableInfos();
Assert.assertEquals(1, baseTableInfos.size());
Table baseTable = testDb.getTable(baseTableInfos.iterator().next().getTableId());
Assert.assertTrue(1 <= baseTable.getRelatedMaterializedViews().size());
Assert.assertEquals("SELECT `test`.`tbl1`.`k1`, `test`.`tbl1`.`k2`\nFROM `test`.`tbl1`",
materializedView.getViewDefineSql());
TableProperty tableProperty = materializedView.getTableProperty();
Assert.assertEquals(1, tableProperty.getReplicationNum().shortValue());
Assert.assertEquals(OlapTable.OlapTableState.NORMAL, materializedView.getState());
Assert.assertEquals(KeysType.DUP_KEYS, materializedView.getKeysType());
Assert.assertEquals(Table.TableType.MATERIALIZED_VIEW,
materializedView.getType());
Assert.assertEquals(0, materializedView.getRelatedMaterializedViews().size());
Assert.assertEquals(2, materializedView.getBaseSchema().size());
MaterializedView.AsyncRefreshContext asyncRefreshContext =
materializedView.getRefreshScheme().getAsyncRefreshContext();
Assert.assertTrue(asyncRefreshContext.getStartTime() > 0);
Assert.assertEquals("SECOND", asyncRefreshContext.getTimeUnit());
Assert.assertEquals(3, asyncRefreshContext.getStep());
Assert.assertTrue(materializedView.isActive());
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
dropMv("mv1");
}
}
@Test
public void testCreateWithoutBuckets() throws Exception {
new MockUp<StmtExecutor>() {
@Mock
public void handleDMLStmt(ExecPlan execPlan, DmlStmt stmt) throws Exception {
}
};
String sql = "create materialized view mv1 " +
"distributed by hash(k2)" +
"refresh async START('9999-12-31') EVERY(INTERVAL 3 SECOND) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, tbl1.k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
dropMv("mv1");
}
}
@Test
public void testPartitionByTableAlias() throws Exception {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1 tb1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionNoDataBase() {
starRocksAssert.withoutUseDatabase();
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from test.tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("No database selected"));
} finally {
starRocksAssert.useDatabase("test");
}
}
@Test
public void testPartitionHasDataBase() {
starRocksAssert.withoutUseDatabase();
String sql = "create materialized view test.mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from test.tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
starRocksAssert.useDatabase("test");
}
}
@Test
public void testPartitionNoNeed() {
String sql = "create materialized view mv1 " +
"partition by (a+b) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('month',k1) ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertTrue(e.getMessage(),
e.getMessage().contains("Unsupported expr 'a + b' in PARTITION BY clause"));
} finally {
starRocksAssert.useDatabase("test");
}
}
@Test
public void testCreateMVWithExplainQuery() {
String sql = "create materialized view mv1 " +
"as explain select k1, v2 from aggregate_table_with_null;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
Assert.fail();
} catch (Exception e) {
Assert.assertEquals("Creating materialized view does not support explain query", e.getMessage());
} finally {
starRocksAssert.useDatabase("test");
}
}
@Test
public void testPartitionWithFunctionIn() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('month',tbl1.k1) ss, k2 from tbl1;";
try {
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
ExpressionPartitionDesc partitionExpDesc = createMaterializedViewStatement.getPartitionExpDesc();
Assert.assertFalse(partitionExpDesc.isFunction());
Assert.assertTrue(partitionExpDesc.getExpr() instanceof SlotRef);
Assert.assertEquals("ss", partitionExpDesc.getSlotRef().getColumnName());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionWithFunctionInUseStr2Date() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(a) buckets 10 " +
"REFRESH DEFERRED MANUAL " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select str2date(d,'%Y%m%d') ss, a, b, c from jdbc0.partitioned_db0.tbl1;";
try {
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
ExpressionPartitionDesc partitionExpDesc = createMaterializedViewStatement.getPartitionExpDesc();
Assert.assertFalse(partitionExpDesc.isFunction());
Assert.assertTrue(partitionExpDesc.getExpr() instanceof SlotRef);
Assert.assertEquals("ss", partitionExpDesc.getSlotRef().getColumnName());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionWithFunctionInUseStr2DateForError() {
String sql = "create materialized view mv_error " +
"partition by ss " +
"distributed by hash(a) buckets 10 " +
"REFRESH DEFERRED MANUAL " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select str2date(d,'%Y%m%d') ss, a, b, c from jdbc0.partitioned_db0.tbl0;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Materialized view partition function str2date check failed"));
}
}
@Test
public void testPartitionWithFunction() {
String sql = "create materialized view mv1 " +
"partition by date_trunc('month',ss) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
ExpressionPartitionDesc partitionExpDesc = createMaterializedViewStatement.getPartitionExpDesc();
Assert.assertTrue(partitionExpDesc.isFunction());
Assert.assertTrue(partitionExpDesc.getExpr() instanceof FunctionCallExpr);
Assert.assertEquals(partitionExpDesc.getExpr().getChild(1), partitionExpDesc.getSlotRef());
Assert.assertEquals("ss", partitionExpDesc.getSlotRef().getColumnName());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionWithFunctionUseStr2Date() throws Exception {
{
String sql = "create materialized view mv1 " +
"partition by str2date(d,'%Y%m%d') " +
"distributed by hash(a) buckets 10 " +
"REFRESH DEFERRED MANUAL " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select a, b, c, d from jdbc0.partitioned_db0.tbl1;";
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
ExpressionPartitionDesc partitionExpDesc = createMaterializedViewStatement.getPartitionExpDesc();
Assert.assertTrue(partitionExpDesc.isFunction());
Assert.assertTrue(partitionExpDesc.getExpr() instanceof FunctionCallExpr);
Assert.assertEquals(partitionExpDesc.getExpr().getChild(0), partitionExpDesc.getSlotRef());
Assert.assertEquals("d", partitionExpDesc.getSlotRef().getColumnName());
}
{
String sql = "create materialized view mv_str2date " +
"partition by p " +
"distributed by hash(a) buckets 10 " +
"REFRESH DEFERRED MANUAL " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select str2date(d,'%Y%m%d') as p, a, b, c, d from jdbc0.partitioned_db0.tbl1;";
starRocksAssert.withMaterializedView(sql);
}
{
String sql = "create materialized view mv_date_trunc_str2date " +
"partition by date_trunc('month', p) " +
"distributed by hash(a) buckets 10 " +
"REFRESH DEFERRED MANUAL " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select str2date(d,'%Y%m%d') as p, a, b, c, d from jdbc0.partitioned_db0.tbl1;";
starRocksAssert.withMaterializedView(sql);
}
}
@Test
public void testPartitionWithFunctionNoAlias() {
String sql = "create materialized view mv1 " +
"partition by date_trunc('month',k1) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1;";
try {
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
ExpressionPartitionDesc partitionExpDesc = createMaterializedViewStatement.getPartitionExpDesc();
Assert.assertTrue(partitionExpDesc.isFunction());
Assert.assertTrue(partitionExpDesc.getExpr() instanceof FunctionCallExpr);
Assert.assertEquals(partitionExpDesc.getExpr().getChild(1), partitionExpDesc.getSlotRef());
Assert.assertEquals("k1", partitionExpDesc.getSlotRef().getColumnName());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionWithoutFunction() {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1;";
try {
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
ExpressionPartitionDesc partitionExpDesc = createMaterializedViewStatement.getPartitionExpDesc();
Assert.assertFalse(partitionExpDesc.isFunction());
Assert.assertTrue(partitionExpDesc.getExpr() instanceof SlotRef);
Assert.assertEquals(partitionExpDesc.getExpr(), partitionExpDesc.getSlotRef());
Assert.assertEquals("k1", partitionExpDesc.getSlotRef().getColumnName());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionWithFunctionIncludeFunction() {
String sql = "create materialized view mv1 " +
"partition by date_trunc('month',date_trunc('month',ss)) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('month',k1) ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertTrue(e.getMessage(), e.getMessage().contains("Unsupported expr 'date_trunc('month', " +
"date_trunc('month', ss))' in PARTITION BY clause"));
}
}
@Test
public void testPartitionWithFunctionIncludeFunctionInSelect() {
String sql = "create materialized view mv1 " +
"partition by date_trunc('month',ss) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('month',k1) ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 1, column 42 to line 1, column 63. " +
"Detail message: Materialized view partition function date_trunc must related with column.",
e.getMessage());
}
}
@Test
public void testPartitionColumnNoBaseTablePartitionColumn() {
String sql = "create materialized view mv1 " +
"partition by s2 " +
"distributed by hash(s2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1 s1, k2 s2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error. Detail message: Materialized view partition column " +
"in partition exp must be base table partition column.", e.getMessage());
}
}
@Test
public void testPartitionColumnBaseTableHasMultiPartitionColumn() {
String sql = "create materialized view mv1 " +
"partition by s2 " +
"distributed by hash(s2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1 s1, k2 s2 from tbl4;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error. Detail message: Materialized view related base table " +
"partition columns only supports single column.", e.getMessage());
}
}
@Test
public void testBaseTableNoPartitionColumn() {
String sql = "create materialized view mv1 " +
"partition by s1 " +
"distributed by hash(s2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1 s1, k2 s2 from tbl3;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error. Detail message: Materialized view partition column" +
" in partition exp must be base table partition column.", e.getMessage());
}
}
@Test
public void testPartitionByColumn() {
String sql = "create materialized view mv1 " +
"partition by s1 " +
"distributed by hash(s2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('month',k1) s1, k2 s2 from tbl1;";
try {
CreateMaterializedViewStatement statementBase =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
List<BaseTableInfo> baseTableInfos = statementBase.getBaseTableInfos();
Assert.assertEquals(1, baseTableInfos.size());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionByColumnNoAlias() {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionByColumnMixAlias1() {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1, tbl1.k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionByColumnMixAlias2() {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, tbl1.k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPartitionByColumnNotInSelect() {
String sql = "create materialized view mv1 " +
"partition by s8 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k2,sqrt(tbl1.k1) s1 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error. Detail message: Materialized view partition exp " +
"column:s8 is not found in query statement.", e.getMessage());
}
}
@Test
public void testPartitionByFunctionNotInSelect() {
String sql = "create materialized view mv1 " +
"partition by date_trunc('month',s8) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error. Detail message: Materialized view partition exp " +
"column:s8 is not found in query statement.", e.getMessage());
}
}
@Test
public void testPartitionByFunctionColumnNoExists() {
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',tb2.k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select tb1.k1, tb2.k2 s2 from tbl1 tb1 join tbl2 tb2 on tb1.k2 = tb2.k2;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 2, column 13 to line 2, column 38. " +
"Detail message: Materialized view partition exp: `tb2`.`k1` must related to column.",
e.getMessage());
}
}
@Test
public void testPartitionByAllowedFunctionNoNeedParams() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select date_trunc(tbl1.k1) ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 11 to line 3, column 29. " +
"Detail message: No matching function with signature: date_trunc(date).", e.getMessage());
}
}
@Test
public void testPartitionByAllowedFunctionNoCorrParams() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select date_trunc('%y%m',k1) ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error at line 3, column 29. " +
"Detail message: date_trunc function can't support argument other than year|quarter|month|week|day.",
e.getMessage());
}
}
@Test
public void testPartitionByAllowedFunctionNoCorrParams1() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select date_trunc('month',k2) ss, k2 from tbl2;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 11 to line 3, column 32. " +
"Detail message: Materialized view partition function date_trunc check failed.", e.getMessage());
}
}
@Test
public void testPartitionByAllowedFunctionUseWeek() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select date_trunc('week',k2) ss, k2 from tbl2;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 11 to line 3, column 31. " +
"Detail message: The function date_trunc used by the materialized view for partition " +
"does not support week formatting.", e.getMessage());
}
}
@Test
public void testPartitionByNoAllowedFunction() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k2, sqrt(tbl1.k1) ss from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 16 to line 3, column 28. " +
"Detail message: Materialized view partition function sqrt is not support.", e.getMessage());
}
}
@Test
public void testPartitionByNoAlias() {
String sql = "create materialized view mv1 " +
"partition by date_trunc('month',k1) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select k1, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Partition exp date_trunc('month', k1) must be alias of select item", e.getMessage());
}
}
@Test
public void testDistributeKeyIsNotKey() {
String sql = "create materialized view mv1 " +
"partition by s1 " +
"distributed by hash(s2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('month',k1) s1, k2 s2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testDistributeByIsNull1() throws Exception {
String sql = "create materialized view mv1 " +
"partition by ss " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss from tbl1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
@Test
public void testDistributeByIsNull2() {
connectContext.getSessionVariable().setAllowDefaultPartition(true);
String sql = "create materialized view mv1 " +
"partition by ss " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
connectContext.getSessionVariable().setAllowDefaultPartition(false);
}
}
@Test
public void testRefreshAsyncOnlyEvery() throws Exception {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async EVERY(INTERVAL 2 MINUTE)" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) statementBase;
RefreshSchemeClause refreshSchemeDesc = createMaterializedViewStatement.getRefreshSchemeDesc();
AsyncRefreshSchemeDesc asyncRefreshSchemeDesc = (AsyncRefreshSchemeDesc) refreshSchemeDesc;
Assert.assertEquals(MaterializedView.RefreshType.ASYNC, refreshSchemeDesc.getType());
Assert.assertNotNull(asyncRefreshSchemeDesc.getStartTime());
Assert.assertEquals(2, ((IntLiteral) asyncRefreshSchemeDesc.getIntervalLiteral().getValue()).getValue());
Assert.assertEquals("MINUTE",
asyncRefreshSchemeDesc.getIntervalLiteral().getUnitIdentifier().getDescription());
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
dropMv("mv1");
}
}
@Test
public void testRefreshAsyncStartBeforeCurr() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2016-12-31') EVERY(INTERVAL 1 HOUR)" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) statementBase;
RefreshSchemeClause refreshSchemeDesc = createMaterializedViewStatement.getRefreshSchemeDesc();
Assert.assertEquals(MaterializedView.RefreshType.ASYNC, refreshSchemeDesc.getType());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testRefreshManual() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh manual " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) statementBase;
RefreshSchemeClause refreshSchemeDesc = createMaterializedViewStatement.getRefreshSchemeDesc();
Assert.assertEquals(MaterializedView.RefreshType.MANUAL, refreshSchemeDesc.getType());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testNoRefresh() {
String sql = "create materialized view mv1 " +
"as select tbl1.k1 ss, k2 from tbl1 group by k1, k2;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
Assert.assertTrue(statementBase instanceof CreateMaterializedViewStmt);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testNoRefreshNoSelectStmt() {
String sql = "create materialized view mv1 " +
"as select t1.k1 ss, t1.k2 from tbl1 t1 union select k1, k2 from tbl1 group by tbl1.k1, tbl1.k2;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Materialized view query statement only support select"));
}
}
@Test
public void testSetOperation() throws Exception {
for (String setOp : Arrays.asList("UNION", "UNION ALL", "INTERSECT", "EXCEPT")) {
String sql = String.format("create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select t1.k1 ss, t1.k2 from tbl1 t1 %s select k1, k2 from tbl2 t2;", setOp);
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
Assert.assertThrows("hehe", AnalysisException.class, () -> {
String sql1 = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select t1.k1 ss, t1.k2 from tbl1 t1 union select * from tbl2 t2;";
UtFrameUtils.parseStmtWithNewParser(sql1, connectContext);
});
Assert.assertThrows("hehe", AnalysisException.class, () -> {
String sql1 = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select t1.k1 ss, t1.k2 from tbl1 t1 union select k1, k2 from tbl2 t2 union select * from tbl2 t3";
UtFrameUtils.parseStmtWithNewParser(sql1, connectContext);
});
}
@Test
public void testAsTableNotInOneDatabase() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select t1.k1 ss, t1.k2 from test2.tbl3 t1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Materialized view do not support table: tbl3 " +
"do not exist in database: test", e.getMessage());
}
}
@Test
public void testMySQLTable() throws Exception {
String sql1 = "create materialized view mv1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select tbl1.k1 ss, tbl1.k2 from mysql_external_table tbl1;";
UtFrameUtils.parseStmtWithNewParser(sql1, connectContext);
String sql2 = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select tbl1.k1 ss, tbl1.k2 from mysql_external_table tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql2, connectContext);
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(e.getMessage()
.contains("Materialized view with partition does not support base table type : MYSQL"));
}
}
@Test
public void testCreateMvFromMv() {
String sql1 = "create materialized view base_mv " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql1, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
String sql2 = "create materialized view mv_from_base_mv " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from base_mv;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql2, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateMvFromMv2() throws Exception {
String sql1 = "create materialized view base_mv2 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1;";
{
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql1, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
}
String sql2 = "create materialized view mv_from_base_mv2 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from base_mv2;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql2, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateMvFromInactiveMv() {
String sql1 = "create materialized view base_inactive_mv " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql1, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
MaterializedView baseInactiveMv = ((MaterializedView) testDb.getTable("base_inactive_mv"));
baseInactiveMv.setInactiveAndReason("");
String sql2 = "create materialized view mv_from_base_inactive_mv " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from base_inactive_mv;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql2, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error at line 3, column 24. Detail message: " +
"Create/Rebuild materialized view from inactive materialized view: base_inactive_mv.",
e.getMessage());
}
}
@Test
public void testAsHasStar() throws Exception {
String sql = "create materialized view testAsHasStar " +
"partition by ss " +
"distributed by hash(ss) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1 ss, * from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
MaterializedView mv = ((MaterializedView) testDb.getTable("testAsHasStar"));
mv.setInactiveAndReason("");
List<Column> mvColumns = mv.getFullSchema();
Table baseTable = testDb.getTable("tbl1");
List<Column> baseColumns = baseTable.getFullSchema();
Assert.assertEquals(mvColumns.size(), baseColumns.size() + 1);
Assert.assertEquals("ss", mvColumns.get(0).getName());
for (int i = 1; i < mvColumns.size(); i++) {
Assert.assertEquals(mvColumns.get(i).getName(),
baseColumns.get(i - 1).getName());
}
} catch (Exception e) {
Assert.fail("Select * should be supported in materialized view");
} finally {
dropMv("testAsHasStar");
}
}
@Test
public void testAsHasStarWithSameColumns() throws Exception {
String sql = "create materialized view testAsHasStar " +
"partition by ss " +
"distributed by hash(ss) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select a.k1 ss, a.*, b.* from tbl1 as a join tbl1 as b on a.k1=b.k1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Duplicate column name 'k1'"));
} finally {
dropMv("testAsHasStar");
}
}
@Test
public void testMVWithSameColumns() throws Exception {
String sql = "create materialized view testAsHasStar " +
"partition by ss " +
"distributed by hash(ss) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select a.k1 ss, a.k2, b.k2 from tbl1 as a join tbl1 as b on a.k1=b.k1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Duplicate column name 'k2'"));
} finally {
dropMv("testAsHasStar");
}
}
@Test
public void testAsHasStarWithNondeterministicFunction() {
String sql = "create materialized view mv1 " +
"distributed by hash(ss) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1 ss, * from (select *, rand(), current_date() from tbl1) as t;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 38 to line 3, column 43." +
" Detail message: Materialized view query statement select item rand()" +
" not supported nondeterministic function.", e.getMessage());
}
}
@Test
public void testAsSelectItemAlias1() throws Exception {
String sql = "create materialized view testAsSelectItemAlias1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select date_trunc('month',tbl1.k1), k1, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
MaterializedView mv = ((MaterializedView) testDb.getTable("testAsSelectItemAlias1"));
mv.setInactiveAndReason("");
List<Column> mvColumns = mv.getFullSchema();
Assert.assertEquals("date_trunc('month', tbl1.k1)", mvColumns.get(0).getName());
Assert.assertEquals("k1", mvColumns.get(1).getName());
Assert.assertEquals("k2", mvColumns.get(2).getName());
} catch (Exception e) {
Assert.fail("Materialized view query statement select item " +
"date_trunc('month', `tbl1`.`k1`) should be supported");
} finally {
dropMv("testAsSelectItemAlias1");
}
}
@Test
public void testAsSelectItemAlias2() throws Exception {
String sql = "create materialized view testAsSelectItemAlias2 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as " +
"select date_trunc('month',tbl1.k1), k1, k2 from tbl1 union all " +
"select date_trunc('month',tbl1.k1), k1, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
MaterializedView mv = ((MaterializedView) testDb.getTable("testAsSelectItemAlias2"));
mv.setInactiveAndReason("");
List<Column> mvColumns = mv.getFullSchema();
Assert.assertEquals("date_trunc('month', tbl1.k1)", mvColumns.get(0).getName());
Assert.assertEquals("k1", mvColumns.get(1).getName());
Assert.assertEquals("k2", mvColumns.get(2).getName());
} finally {
dropMv("testAsSelectItemAlias2");
}
}
@Test
public void testAsSelectItemAlias3() {
String sql = "create materialized view testAsSelectItemAlias3 " +
"partition by date_trunc('month',tbl1.k1) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select date_trunc('month',tbl1.k1), k1, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Materialized view partition exp: " +
"`tbl1`.`k1` must related to column"));
}
}
@Test
public void testAsSelectItemAlias4() {
String sql = "create materialized view testAsSelectItemAlias4 " +
"partition by k1 " +
"distributed by hash(date_trunc('month',tbl1.k1)) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select date_trunc('month',tbl1.k1), k1, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
} catch (Exception e) {
Assert.assertTrue(e.getMessage()
.contains("No viable statement for input 'distributed by hash(date_trunc('."));
}
}
@Test
public void testAsSelectItemNoAliasWithNondeterministicFunction1() {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select rand(), date_trunc('month',tbl1.k1), k1, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 11 to line 3, column 16. " +
"Detail message: Materialized view query statement select item rand() not supported " +
"nondeterministic function.",
e.getMessage());
}
}
@Test
public void testAsSelectItemHasNonDeterministicFunction1() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select rand() s1, date_trunc('month',tbl1.k1) ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 11 to line 3, column 16. " +
"Detail message: Materialized view query statement " +
"select item rand() not supported nondeterministic function.", e.getMessage());
}
}
@Test
public void testAsSelectItemHasNonDeterministicFunction2() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")" +
"as select k2, rand()+rand() s1, date_trunc('month',tbl1.k1) ss from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error from line 3, column 15 to line 3, column 20. " +
"Detail message: Materialized view query statement " +
"select item rand() not supported nondeterministic function.", e.getMessage());
}
}
@Test
public void testCreateColocateMvToExitGroup() throws Exception {
starRocksAssert.withTable("CREATE TABLE test.colocateTable\n" +
"(\n" +
" k1 int,\n" +
" k2 int,\n" +
" k3 int\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\",\n" +
"\"colocate_with\" = \"colocate_group1\"\n" +
");");
new MockUp<OlapTable>() {
@Mock
public boolean isEnableColocateMVIndex() throws Exception {
OlapTable table = (OlapTable) testDb.getTable("colocateTable");
if (Strings.isNullOrEmpty(table.getColocateGroup())) {
return false;
}
return table.getIndexIdToMeta().values().stream()
.filter(x -> x.getIndexId() != table.getBaseIndexId())
.allMatch(MaterializedIndexMeta::isColocateMVIndex);
}
};
String sql = "create materialized view colocateMv\n" +
"PROPERTIES (\n" +
"\"colocate_mv\" = \"true\"\n" +
")\n" +
"as select k1, k2 from colocateTable;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStmt) statementBase);
waitingRollupJobV2Finish();
ColocateTableIndex colocateTableIndex = currentState.getColocateTableIndex();
String fullGroupName = testDb.getId() + "_" + "colocate_group1";
long tableId = colocateTableIndex.getTableIdByGroup(fullGroupName);
Assert.assertNotEquals(-1, tableId);
ColocateTableIndex.GroupId groupId = colocateTableIndex.getGroup(tableId);
Assert.assertEquals(1, colocateTableIndex.getAllTableIds(groupId).size());
OlapTable table = (OlapTable) testDb.getTable("colocateTable");
Assert.assertTrue(table.isEnableColocateMVIndex());
dropMv("colocateMv");
Assert.assertTrue(currentState.getColocateTableIndex().isColocateTable(tableId));
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
currentState.getColocateTableIndex().clear();
}
}
@Test
public void testCreateColocateMvWithoutGroup() throws Exception {
starRocksAssert.withTable("CREATE TABLE test.colocateTable2\n" +
"(\n" +
" k1 int,\n" +
" k2 int,\n" +
" k3 int\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
");");
ColocateTableIndex colocateTableIndex = currentState.getColocateTableIndex();
String fullGroupName = testDb.getId() + "_" + "group2";
long tableId = colocateTableIndex.getTableIdByGroup(fullGroupName);
Assert.assertEquals(-1, tableId);
String sql = "create materialized view colocateMv2\n" +
"PROPERTIES (\n" +
"\"colocate_mv\" = \"true\"\n" +
")\n" +
"as select k1, k2 from colocateTable2;";
Assert.assertThrows(AnalysisException.class, () -> {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStmt) statementBase);
});
currentState.getColocateTableIndex().clear();
}
@Test
public void testColocateMvAlterGroup() throws Exception {
starRocksAssert.withTable("CREATE TABLE test.colocateTable3\n" +
"(\n" +
" k1 int,\n" +
" k2 int,\n" +
" k3 int\n" +
")\n" +
"DISTRIBUTED BY HASH(k2) BUCKETS 3\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\",\n" +
"\"colocate_with\" = \"group3\"\n" +
");");
new MockUp<OlapTable>() {
@Mock
public boolean isEnableColocateMVIndex() throws Exception {
OlapTable table = (OlapTable) testDb.getTable("colocateTable3");
if (Strings.isNullOrEmpty(table.getColocateGroup())) {
return false;
}
return table.getIndexIdToMeta().values().stream()
.filter(x -> x.getIndexId() != table.getBaseIndexId())
.allMatch(MaterializedIndexMeta::isColocateMVIndex);
}
};
String sql = "create materialized view colocateMv3\n" +
"PROPERTIES (\n" +
"\"colocate_mv\" = \"true\"\n" +
")\n" +
"as select k1, k2 from colocateTable3;";
String sql2 = "create materialized view colocateMv4\n" +
"PROPERTIES (\n" +
"\"colocate_mv\" = \"true\"\n" +
")\n" +
"as select k1, k2 from colocateTable3;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStmt) statementBase);
waitingRollupJobV2Finish();
statementBase = UtFrameUtils.parseStmtWithNewParser(sql2, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStmt) statementBase);
waitingRollupJobV2Finish();
ColocateTableIndex colocateTableIndex = currentState.getColocateTableIndex();
String fullGroupName = testDb.getId() + "_" + "group3";
System.out.println(fullGroupName);
long tableId = colocateTableIndex.getTableIdByGroup(fullGroupName);
Assert.assertNotEquals(-1, tableId);
OlapTable table = (OlapTable) testDb.getTable("colocateTable3");
Assert.assertTrue(table.isEnableColocateMVIndex());
ColocateTableIndex.GroupId groupId = colocateTableIndex.getGroup(tableId);
Assert.assertEquals(1, colocateTableIndex.getAllTableIds(groupId).size());
sql = "alter table colocateTable3 set (\"colocate_with\" = \"groupNew\")";
statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
StmtExecutor stmtExecutor = new StmtExecutor(connectContext, statementBase);
stmtExecutor.execute();
Assert.assertEquals("groupNew", table.getColocateGroup());
Assert.assertTrue(table.isEnableColocateMVIndex());
Assert.assertTrue(colocateTableIndex.isColocateTable(tableId));
sql = "alter table colocateTable3 set (\"colocate_with\" = \"\")";
statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
stmtExecutor = new StmtExecutor(connectContext, statementBase);
stmtExecutor.execute();
Assert.assertFalse(colocateTableIndex.isColocateTable(tableId));
Assert.assertFalse(table.isEnableColocateMVIndex());
Assert.assertNotEquals("group1", table.getColocateGroup());
dropMv("colocateMv4");
dropMv("colocateMv3");
Assert.assertFalse(colocateTableIndex.isColocateTable(tableId));
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
currentState.getColocateTableIndex().clear();
}
}
@Test
public void testRandomColocate() {
String sql = "create materialized view mv1 " +
"distributed by random " +
"refresh async " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n," +
"'colocate_with' = 'hehe' " +
")" +
"as select k2, date_trunc('month',tbl1.k1) ss from tbl1;";
Assert.assertThrows(SemanticException.class, () -> starRocksAssert.withMaterializedView(sql));
}
@Test
public void testDisabled() {
Config.enable_materialized_view = false;
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("The experimental mv is disabled", e.getMessage());
} finally {
Config.enable_materialized_view = true;
}
}
@Test
public void testExists() {
String sql = "create materialized view tbl1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.assertEquals("Table 'tbl1' already exists", e.getMessage());
}
}
@Test
public void testIfNotExists() {
String sql = "create materialized view if not exists mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testSupportedProperties() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\",\n" +
"\"storage_medium\" = \"SSD\",\n" +
"\"storage_cooldown_time\" = \"2122-12-31 23:59:59\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
private void assertParseFailWithException(String sql, String msg) {
CreateMaterializedViewStatement stmt = null;
try {
stmt = (CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
connectContext);
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains(msg));
}
}
private void assertCreateFailWithException(String sql, String msg) {
CreateMaterializedViewStatement stmt = null;
try {
stmt = (CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
connectContext);
} catch (Exception e) {
Assert.fail();
}
try {
currentState.createMaterializedView(stmt);
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains(msg));
}
}
@Test
public void testUnSupportedProperties() {
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"short_key\" = \"20\"\n" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
assertCreateFailWithException(sql, "Invalid parameter Analyze materialized properties failed because unknown " +
"properties");
}
@Test
public void testCreateMVWithSessionProperties1() {
String sql = "create materialized view mv_with_property1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"session.query_timeout\" = \"10000\"" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
try {
CreateMaterializedViewStatement stmt =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
starRocksAssert.getCtx());
currentState.createMaterializedView(stmt);
Table mv1 = testDb.getTable("mv_with_property1");
Assert.assertTrue(mv1 instanceof MaterializedView);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testCreateMVWithSessionProperties2() {
String sql = "create materialized view mv_with_property2 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"query_timeout\" = \"10000\"" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
assertCreateFailWithException(sql, "Invalid parameter Analyze materialized properties failed because unknown " +
"properties");
}
@Test
public void testCreateMVWithSessionProperties3() {
String sql = "create materialized view mv_with_property3 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"session.query_timeout1\" = \"10000\"" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
assertCreateFailWithException(sql, "Unknown system variable 'query_timeout1'");
}
@Test
public void testNoDuplicateKey() {
String sql = "create materialized view testNoDuplicateKey " +
"partition by s1 " +
"distributed by hash(s2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('month',k1) s1, k2 s2 from tbl1;";
try {
CreateMaterializedViewStatement stmt = (CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
connectContext);
currentState.createMaterializedView(stmt);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testCreateMvWithSortCols() throws Exception {
{
String sql = "create materialized view mv1\n" +
"distributed by hash(s2)\n" +
"order by (`k1`, `s2`)\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
CreateMaterializedViewStatement createMaterializedViewStatement = (CreateMaterializedViewStatement)
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
List<String> keyColumns = createMaterializedViewStatement.getMvColumnItems().stream()
.filter(Column::isKey).map(Column::getName)
.collect(Collectors.toList());
Assert.assertEquals(2, createMaterializedViewStatement.getSortKeys().size());
Assert.assertEquals(Arrays.asList("k1", "s2"), keyColumns);
}
{
String sql = "create materialized view mv1\n" +
"distributed by hash(s2)\n" +
"order by (`s2`)\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
CreateMaterializedViewStatement createMaterializedViewStatement = (CreateMaterializedViewStatement)
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
List<String> keyColumns = createMaterializedViewStatement.getMvColumnItems().stream()
.filter(Column::isKey).map(Column::getName)
.collect(Collectors.toList());
Assert.assertEquals(Arrays.asList("s2"), keyColumns);
}
{
String sql = "create materialized view mv1\n" +
"distributed by hash(s2)\n" +
"order by (`k1`)\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
CreateMaterializedViewStatement createMaterializedViewStatement = (CreateMaterializedViewStatement)
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
List<String> keyColumns = createMaterializedViewStatement.getMvColumnItems().stream()
.filter(Column::isKey).map(Column::getName)
.collect(Collectors.toList());
Assert.assertEquals(Arrays.asList("k1"), keyColumns);
}
{
String sql = "create materialized view mv1\n" +
"distributed by hash(s2)\n" +
"order by (`k3`)\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql, connectContext));
}
{
String sql = "create materialized view mv1\n" +
"distributed by hash(s2)\n" +
"order by (`c_1_7`)\n" +
"as select * from t1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql, connectContext));
}
}
@Test
public void testCreateMvWithInvalidSortCols() throws Exception {
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2)\n" +
"order by (`s2`, `k1`)\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
List<Column> sortKeys = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(sortKeys.get(0).getName().equals("s2"));
Assert.assertTrue(sortKeys.get(1).getName().equals("k1"));
}
@Test
public void testCreateMvWithColocateGroup() throws Exception {
String groupName = name.getMethodName();
String sql = "create materialized view mv1 " +
"partition by ss " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"'colocate_with' = '" + groupName + "'" +
") " +
"as select tbl1.k1 ss, k2 from tbl1;";
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
currentState.createMaterializedView((CreateMaterializedViewStatement) statementBase);
String fullGroupName = testDb.getId() + "_" + groupName;
long tableId = currentState.getColocateTableIndex().getTableIdByGroup(fullGroupName);
Assert.assertTrue(tableId > 0);
}
@Test
public void testCreateMvWithHll() {
String sql = "CREATE MATERIALIZED VIEW mv_function\n" +
"AS SELECT k1,MAX(v2),MIN(v3),SUM(v4),HLL_UNION(v5),BITMAP_UNION(v6),PERCENTILE_UNION(v7)\n" +
"FROM test.aggregate_table_with_null GROUP BY k1\n" +
"ORDER BY k1 DESC";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateMvBaseOnView() {
String sql = "CREATE MATERIALIZED VIEW mv1\n" +
"AS SELECT k1,v2 FROM test.v1";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
Assert.fail();
} catch (Exception e) {
Assert.assertEquals("Getting analyzing error. Detail message: Do not support alter non-OLAP table[v1].",
e.getMessage());
}
}
@Test
public void testAggregateTableWithCount() {
String sql = "CREATE MATERIALIZED VIEW v0 AS SELECT t0_57.c_0_1," +
" COUNT(t0_57.c_0_0) , MAX(t0_57.c_0_2) , MAX(t0_57.c_0_3) , MIN(t0_57.c_0_4)" +
" FROM tbl_for_count AS t0_57 GROUP BY t0_57.c_0_1 ORDER BY t0_57.c_0_1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(
e.getMessage().contains("Getting analyzing error. Detail message: Aggregate type table do not " +
"support count function in materialized view."));
}
}
@Test
public void testNoExistDb() {
String sql = "create materialized view unexisted_db1.mv1\n" +
"partition by s1\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select date_trunc('month',k1) s1, k2 s2 from tbl1;";
assertParseFailWithException(sql, "Can not find database:unexisted_db1.");
}
@Test
public void testMvNameInvalid() {
String sql = "create materialized view mvklajksdjksjkjfksdlkfgkllksdjkgjsdjfjklsdjkfgjkldfkljgljkljklgja\n" +
"partition by s1\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select date_trunc('month',k1) s1, k2 s2 from tbl1;";
try {
CreateMaterializedViewStatement stmt = (CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
connectContext);
currentState.createMaterializedView(stmt);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testMvName1() {
String sql = "create materialized view 22mv\n" +
"partition by s1\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select date_trunc('month',k1) s1, k2 s2 from tbl1;";
try {
CreateMaterializedViewStatement stmt = (CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
connectContext);
currentState.createMaterializedView(stmt);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testPartitionAndDistributionByColumnNameIgnoreCase() {
String sql = "create materialized view mv1 " +
"partition by K1 " +
"distributed by hash(K2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, tbl1.k2 from tbl1;";
try {
CreateMaterializedViewStatement stmt = (CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
connectContext);
currentState.createMaterializedView(stmt);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testDuplicateColumn() {
String sql = "create materialized view mv1 " +
"partition by K1 " +
"distributed by hash(K2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, K1 from tbl1;";
assertParseFailWithException(sql, "Getting analyzing error. Detail message: Duplicate column name 'K1'.");
}
@Test
public void testNoBaseTable() {
String sql = "create materialized view mv1 " +
"partition by K1 " +
"distributed by hash(K2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select 1 as k1, 2 as k2";
assertParseFailWithException(sql, "Getting analyzing error. Detail message: Can not find base " +
"table in query statement.");
}
@Test
public void testUseCte() throws Exception {
String sql = "create materialized view mv1\n" +
"DISTRIBUTED BY HASH(k1) BUCKETS 10\n" +
"REFRESH ASYNC\n" +
"AS with tbl as\n" +
"(select * from tbl1)\n" +
"SELECT k1,k2\n" +
"FROM tbl;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
sql = "create materialized view mv1\n" +
"DISTRIBUTED BY HASH(k1) BUCKETS 10\n" +
"REFRESH ASYNC AS " +
"WITH cte1 AS (select k1, k2 from tbl1),\n" +
" cte2 AS (select count(*) cnt from tbl1)\n" +
"SELECT cte1.k1, cte2.cnt\n" +
"FROM cte1, cte2;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
@Test
public void testUseSubQuery() throws Exception {
String sql = "create materialized view mv1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from (select * from tbl1) tbl";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testUseSubQueryWithPartition() throws Exception {
String sql1 = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select k1, k2 from (select * from tbl1) tbl";
String sql2 = "create materialized view mv2 " +
"partition by kk " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select date_trunc('day', k1) as kk, k2 from (select * from tbl1) tbl";
try {
UtFrameUtils.parseStmtWithNewParser(sql1, connectContext);
UtFrameUtils.parseStmtWithNewParser(sql2, connectContext);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testJoinWithPartition() throws Exception {
String sql = "create materialized view mv1 " +
"partition by date_trunc('day', k1) " +
"distributed by hash(k2) buckets 10 " +
"refresh async START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
") " +
"as select tb1.kk as k1, tb2.k2 as k2 from (select k1 as kk, k2 from tbl1) tb1 join (select * from tbl2) tb2 on tb1.kk = tb2.k1";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testPartitionByNotFirstColumn() throws Exception {
starRocksAssert.withMaterializedView("create materialized view mv_with_partition_by_not_first_column" +
" partition by k1" +
" distributed by hash(k3) buckets 10" +
" as select k3, k1, sum(v1) as total from tbl5 group by k3, k1");
Database db = starRocksAssert.getCtx().getGlobalStateMgr().getDb("test");
Table table = db.getTable("mv_with_partition_by_not_first_column");
Assert.assertTrue(table instanceof MaterializedView);
MaterializedView mv = (MaterializedView) table;
PartitionInfo partitionInfo = mv.getPartitionInfo();
Assert.assertTrue(partitionInfo instanceof ExpressionRangePartitionInfo);
ExpressionRangePartitionInfo expressionRangePartitionInfo = (ExpressionRangePartitionInfo) partitionInfo;
List<Expr> partitionExpr = expressionRangePartitionInfo.getPartitionExprs();
Assert.assertEquals(1, partitionExpr.size());
Assert.assertTrue(partitionExpr.get(0) instanceof SlotRef);
SlotRef slotRef = (SlotRef) partitionExpr.get(0);
Assert.assertNotNull(slotRef.getSlotDescriptorWithoutCheck());
SlotDescriptor slotDescriptor = slotRef.getSlotDescriptorWithoutCheck();
Assert.assertEquals(1, slotDescriptor.getId().asInt());
}
@Test
public void testHiveMVWithoutPartition() throws Exception {
starRocksAssert.withMaterializedView("CREATE MATERIALIZED VIEW supplier_hive_mv " +
"DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 10 REFRESH MANUAL AS select s_suppkey, s_nationkey," +
"sum(s_acctbal) as total_s_acctbal, count(s_phone) as s_phone_count from hive0.tpch.supplier as supp " +
"group by s_suppkey, s_nationkey order by s_suppkey;");
Database db = starRocksAssert.getCtx().getGlobalStateMgr().getDb("test");
Table table = db.getTable("supplier_hive_mv");
Assert.assertTrue(table instanceof MaterializedView);
MaterializedView mv = (MaterializedView) table;
PartitionInfo partitionInfo = mv.getPartitionInfo();
Assert.assertTrue(partitionInfo instanceof SinglePartitionInfo);
Assert.assertEquals(1, mv.getAllPartitions().size());
starRocksAssert.dropMaterializedView("supplier_hive_mv");
}
@Test
public void testHiveMVJoinWithoutPartition() throws Exception {
starRocksAssert.withMaterializedView("CREATE MATERIALIZED VIEW supplier_nation_hive_mv DISTRIBUTED BY " +
"HASH(`s_suppkey`) BUCKETS 10 REFRESH MANUAL AS select s_suppkey, n_name, sum(s_acctbal) " +
"as total_s_acctbal, count(s_phone) as s_phone_count from " +
"hive0.tpch.supplier as supp join hive0.tpch.nation group by s_suppkey, n_name order by s_suppkey;");
Database db = starRocksAssert.getCtx().getGlobalStateMgr().getDb("test");
Table table = db.getTable("supplier_nation_hive_mv");
Assert.assertTrue(table instanceof MaterializedView);
MaterializedView mv = (MaterializedView) table;
PartitionInfo partitionInfo = mv.getPartitionInfo();
Assert.assertTrue(partitionInfo instanceof SinglePartitionInfo);
Assert.assertEquals(1, mv.getAllPartitions().size());
starRocksAssert.dropMaterializedView("supplier_nation_hive_mv");
}
@Test
public void testHiveMVWithPartition() throws Exception {
starRocksAssert.withMaterializedView("CREATE MATERIALIZED VIEW lineitem_supplier_hive_mv \n" +
"partition by l_shipdate\n" +
"DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 10\n" +
"REFRESH MANUAL\n" +
"AS \n" +
"select l_shipdate, l_orderkey, l_quantity, l_linestatus, s_name from " +
"hive0.partitioned_db.lineitem_par join hive0.tpch.supplier where l_suppkey = s_suppkey\n");
Database db = starRocksAssert.getCtx().getGlobalStateMgr().getDb("test");
Table table = db.getTable("lineitem_supplier_hive_mv");
Assert.assertTrue(table instanceof MaterializedView);
MaterializedView mv = (MaterializedView) table;
PartitionInfo partitionInfo = mv.getPartitionInfo();
Assert.assertTrue(partitionInfo instanceof ExpressionRangePartitionInfo);
ExpressionRangePartitionInfo expressionRangePartitionInfo = (ExpressionRangePartitionInfo) partitionInfo;
Assert.assertEquals(1, expressionRangePartitionInfo.getPartitionColumns().size());
Column partColumn = expressionRangePartitionInfo.getPartitionColumns().get(0);
Assert.assertEquals("l_shipdate", partColumn.getName());
Assert.assertTrue(partColumn.getType().isDate());
starRocksAssert.dropMaterializedView("lineitem_supplier_hive_mv");
}
@Test
public void testHiveMVAsyncRefresh() throws Exception {
starRocksAssert.withMaterializedView("CREATE MATERIALIZED VIEW supplier_hive_mv " +
"DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 10 REFRESH ASYNC START('2122-12-31') EVERY(INTERVAL 1 HOUR) " +
"AS select s_suppkey, s_nationkey, sum(s_acctbal) as total_s_acctbal, " +
"count(s_phone) as s_phone_count from hive0.tpch.supplier as supp " +
"group by s_suppkey, s_nationkey order by s_suppkey;");
Database db = starRocksAssert.getCtx().getGlobalStateMgr().getDb("test");
Table table = db.getTable("supplier_hive_mv");
Assert.assertTrue(table instanceof MaterializedView);
MaterializedView mv = (MaterializedView) table;
PartitionInfo partitionInfo = mv.getPartitionInfo();
Assert.assertTrue(partitionInfo instanceof SinglePartitionInfo);
Assert.assertEquals(1, mv.getAllPartitions().size());
MaterializedView.MvRefreshScheme mvRefreshScheme = mv.getRefreshScheme();
Assert.assertEquals(mvRefreshScheme.getType(), MaterializedView.RefreshType.ASYNC);
MaterializedView.AsyncRefreshContext asyncRefreshContext = mvRefreshScheme.getAsyncRefreshContext();
Assert.assertEquals(asyncRefreshContext.getTimeUnit(), "HOUR");
starRocksAssert.dropMaterializedView("supplier_hive_mv");
}
@Test
public void testHiveMVAsyncRefreshWithException() throws Exception {
expectedException.expect(DdlException.class);
expectedException.expectMessage("Materialized view which type is ASYNC need to specify refresh interval " +
"for external table");
starRocksAssert.withMaterializedView("CREATE MATERIALIZED VIEW supplier_hive_mv " +
"DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 10 REFRESH ASYNC AS select s_suppkey, s_nationkey," +
"sum(s_acctbal) as total_s_acctbal, count(s_phone) as s_phone_count from hive0.tpch.supplier as supp " +
"group by s_suppkey, s_nationkey order by s_suppkey;");
}
@Test
public void testJdbcTable() throws Exception {
starRocksAssert.withResource("create external resource jdbc0\n" +
"properties (\n" +
" \"type\"=\"jdbc\",\n" +
" \"user\"=\"postgres\",\n" +
" \"password\"=\"changeme\",\n" +
" \"jdbc_uri\"=\"jdbc:postgresql:
" \"driver_url\"=\"https:
" \"driver_class\"=\"org.postgresql.Driver\"\n" +
"); ");
starRocksAssert.withTable("create external table jdbc_tbl (\n" +
" `id` bigint NULL,\n" +
" `data` varchar(200) NULL\n" +
" ) ENGINE=jdbc\n" +
" properties (\n" +
" \"resource\"=\"jdbc0\",\n" +
" \"table\"=\"dest_tbl\"\n" +
" );");
starRocksAssert.withMaterializedView("create materialized view mv_jdbc " +
"distributed by hash(id) refresh deferred manual " +
"as select * from jdbc_tbl;");
}
@Test
public void testCreateRealtimeMV() throws Exception {
String sql = "create materialized view rtmv \n" +
"refresh incremental " +
"distributed by hash(l_shipdate) " +
" as select l_shipdate, l_orderkey, l_quantity, l_linestatus, s_name from " +
"hive0.partitioned_db.lineitem_par join hive0.tpch.supplier where l_suppkey = s_suppkey\n";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
@Test
public void testCreateSyncMvFromSubquery() {
String sql = "create materialized view sync_mv_1 as" +
" select k1, sum(k2) from (select k1, k2 from tbl1 group by k1, k2) a group by k1";
try {
starRocksAssert.withMaterializedView(sql);
} catch (Exception e) {
Assert.assertTrue(
e.getMessage().contains("Materialized view query statement only support direct query from table"));
}
}
@Test
public void testCreateAsyncMv() {
Config.enable_experimental_mv = true;
String sql = "create materialized view async_mv_1 distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1";
try {
starRocksAssert.withMaterializedView(sql);
MaterializedView mv = (MaterializedView) testDb.getTable("async_mv_1");
Assert.assertTrue(mv.getFullSchema().get(0).isKey());
Assert.assertFalse(mv.getFullSchema().get(1).isKey());
} catch (Exception e) {
Assert.fail();
}
String sql2 = "create materialized view async_mv_1 distributed by hash(c_1_4) as" +
" select c_1_4 from t1";
try {
starRocksAssert.withMaterializedView(sql2);
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("All columns of materialized view cannot be used for keys."));
}
}
@Test
public void testCollectAllTableAndView() {
String sql = "select k2,v1 from test.tbl1 where k2 > 0 and v1 not in (select v1 from test.tbl2 where k2 > 0);";
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
Map<TableName, Table> result = AnalyzerUtils.collectAllTableAndView(statementBase);
Assert.assertEquals(result.size(), 2);
} catch (Exception e) {
LOG.error("Test CollectAllTableAndView failed", e);
Assert.fail();
}
}
@Test
public void testCreateMVWithDifferentDB() {
try {
ConnectContext newConnectContext = UtFrameUtils.createDefaultCtx();
StarRocksAssert newStarRocksAssert = new StarRocksAssert(newConnectContext);
newStarRocksAssert.withDatabase("test_mv_different_db")
.useDatabase("test_mv_different_db");
String sql = "create materialized view test.test_mv_use_different_tbl " +
"as select k1, sum(v1), min(v2) from test.tbl5 group by k1;";
CreateMaterializedViewStmt stmt =
(CreateMaterializedViewStmt) UtFrameUtils.parseStmtWithNewParser(sql, newStarRocksAssert.getCtx());
Assert.assertEquals(stmt.getDBName(), "test");
Assert.assertEquals(stmt.getMVName(), "test_mv_use_different_tbl");
currentState.createMaterializedView(stmt);
waitingRollupJobV2Finish();
Table table = testDb.getTable("tbl5");
Assert.assertNotNull(table);
OlapTable olapTable = (OlapTable) table;
Assert.assertTrue(olapTable.getIndexIdToMeta().size() >= 2);
Assert.assertTrue(olapTable.getIndexIdToMeta().entrySet().stream()
.anyMatch(x -> x.getValue().getKeysType().isAggregationFamily()));
newStarRocksAssert.dropDatabase("test_mv_different_db");
starRocksAssert.dropMaterializedView("test_mv_use_different_tbl");
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testCreateMVWithDifferentDB2() {
try {
ConnectContext newConnectContext = UtFrameUtils.createDefaultCtx();
StarRocksAssert newStarRocksAssert = new StarRocksAssert(newConnectContext);
newStarRocksAssert.withDatabase("test_mv_different_db")
.useDatabase("test_mv_different_db");
Assert.assertThrows(AnalysisException.class, () -> {
String sql = "create materialized view test_mv_different_db.test_mv_use_different_tbl " +
"as select k1, sum(v1), min(v2) from test.tbl5 group by k1;";
CreateMaterializedViewStmt stmt =
(CreateMaterializedViewStmt) UtFrameUtils.parseStmtWithNewParser(sql, newStarRocksAssert.getCtx());
});
newStarRocksAssert.dropDatabase("test_mv_different_db");
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testCreateAsyncMVWithDifferentDB() {
try {
ConnectContext newConnectContext = UtFrameUtils.createDefaultCtx();
StarRocksAssert newStarRocksAssert = new StarRocksAssert(newConnectContext);
newStarRocksAssert.withDatabase("test_mv_different_db")
.useDatabase("test_mv_different_db");
String sql = "create materialized view test.test_mv_use_different_tbl " +
"distributed by hash(k1) " +
"as select k1, sum(v1), min(v2) from test.tbl5 group by k1;";
CreateMaterializedViewStatement stmt =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
newStarRocksAssert.getCtx());
Assert.assertEquals(stmt.getTableName().getDb(), "test");
Assert.assertEquals(stmt.getTableName().getTbl(), "test_mv_use_different_tbl");
currentState.createMaterializedView(stmt);
newStarRocksAssert.dropDatabase("test_mv_different_db");
Table mv1 = testDb.getTable("test_mv_use_different_tbl");
Assert.assertTrue(mv1 instanceof MaterializedView);
starRocksAssert.dropMaterializedView("test_mv_use_different_tbl");
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testCreateAsyncMVWithDifferentDB2() {
try {
ConnectContext newConnectContext = UtFrameUtils.createDefaultCtx();
StarRocksAssert newStarRocksAssert = new StarRocksAssert(newConnectContext);
newStarRocksAssert.withDatabase("test_mv_different_db")
.useDatabase("test_mv_different_db");
String sql = "create materialized view test_mv_different_db.test_mv_use_different_tbl " +
"distributed by hash(k1) " +
"as select k1, sum(v1), min(v2) from test.tbl5 group by k1;";
CreateMaterializedViewStatement stmt =
(CreateMaterializedViewStatement) UtFrameUtils.parseStmtWithNewParser(sql,
newStarRocksAssert.getCtx());
Assert.assertEquals(stmt.getTableName().getDb(), "test_mv_different_db");
Assert.assertEquals(stmt.getTableName().getTbl(), "test_mv_use_different_tbl");
currentState.createMaterializedView(stmt);
Database differentDb = currentState.getDb("test_mv_different_db");
Table mv1 = differentDb.getTable("test_mv_use_different_tbl");
Assert.assertTrue(mv1 instanceof MaterializedView);
newStarRocksAssert.dropDatabase("test_mv_different_db");
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testCreateSyncMVWithCaseWhenComplexExpression1() {
try {
String t1 = "CREATE TABLE case_when_t1 (\n" +
" k1 INT,\n" +
" k2 char(20))\n" +
"DUPLICATE KEY(k1)\n" +
"DISTRIBUTED BY HASH(k1)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n";
starRocksAssert.withTable(t1);
String mv1 = "create materialized view case_when_mv1 AS SELECT k1, " +
"(CASE k2 WHEN 'beijing' THEN 'bigcity' ELSE 'smallcity' END) as city FROM case_when_t1;\n";
starRocksAssert.withMaterializedView(mv1);
waitingRollupJobV2Finish();
Table table = testDb.getTable("case_when_t1");
Assert.assertNotNull(table);
OlapTable olapTable = (OlapTable) table;
Assert.assertTrue(olapTable.getIndexIdToMeta().size() >= 2);
Assert.assertTrue(olapTable.getIndexIdToMeta().entrySet().stream()
.noneMatch(x -> x.getValue().getKeysType().isAggregationFamily()));
List<Column> fullSchemas = table.getFullSchema();
Assert.assertTrue(fullSchemas.size() == 3);
Column mvColumn = fullSchemas.get(2);
Assert.assertTrue(mvColumn.getName().equals("mv_city"));
Assert.assertTrue(mvColumn.getType().isVarchar());
Assert.assertTrue(mvColumn.getType().getColumnSize() == 1048576);
starRocksAssert.dropTable("case_when_t1");
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testCreateAsync_Deferred(@Mocked TaskManager taskManager) throws Exception {
new Expectations() {
{
taskManager.executeTask((String) any);
times = 0;
}
};
starRocksAssert.withMaterializedView(
"create materialized view deferred_async " +
"refresh deferred async distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
starRocksAssert.withMaterializedView(
"create materialized view deferred_manual " +
"refresh deferred manual distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
starRocksAssert.withMaterializedView(
"create materialized view deferred_scheduled " +
"refresh deferred async every(interval 1 day) distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
}
@Test
public void testCreateAsync_Immediate(@Mocked TaskManager taskManager) throws Exception {
new Expectations() {
{
taskManager.executeTask((String) any);
times = 3;
}
};
starRocksAssert.withMaterializedView(
"create materialized view async_immediate " +
"refresh immediate async distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
starRocksAssert.withMaterializedView(
"create materialized view manual_immediate " +
"refresh immediate manual distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
starRocksAssert.withMaterializedView(
"create materialized view schedule_immediate " +
"refresh immediate async every(interval 1 day) distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
}
@Test
public void testCreateAsync_Immediate_Implicit(@Mocked TaskManager taskManager) throws Exception {
new Expectations() {
{
taskManager.executeTask((String) any);
times = 3;
}
};
starRocksAssert.withMaterializedView(
"create materialized view async_immediate_implicit " +
"refresh async distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
starRocksAssert.withMaterializedView(
"create materialized view manual_immediate_implicit " +
"refresh manual distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
starRocksAssert.withMaterializedView(
"create materialized view schedule_immediate_implicit " +
"refresh async every(interval 1 day) distributed by hash(c_1_9) as" +
" select c_1_9, c_1_4 from t1");
}
private void testMVColumnAlias(String expr) throws Exception {
String mvName = "mv_alias";
try {
String createMvExpr =
String.format("create materialized view %s " +
"refresh deferred manual distributed by hash(c_1_9) as" +
" select c_1_9, %s from t1", mvName, expr);
starRocksAssert.withMaterializedView(createMvExpr);
Database db = starRocksAssert.getCtx().getGlobalStateMgr().getDb("test");
Table table = db.getTable(mvName);
List<String> columnNames = table.getBaseSchema().stream().map(Column::getName).collect(Collectors.toList());
Assert.assertTrue(columnNames.toString(), columnNames.contains(expr));
} finally {
starRocksAssert.dropMaterializedView(mvName);
}
}
@Test
public void testExprAlias() throws Exception {
testMVColumnAlias("c_1_9 + 1");
testMVColumnAlias("char_length(c_1_9)");
testMVColumnAlias("(char_length(c_1_9)) + 1");
testMVColumnAlias("(char_length(c_1_9)) + '$'");
testMVColumnAlias("c_1_9 + c_1_10");
}
private Table getTable(String dbName, String mvName) {
Database db = GlobalStateMgr.getCurrentState().getDb(dbName);
Table table = db.getTable(mvName);
Assert.assertNotNull(table);
return table;
}
private MaterializedView getMv(String dbName, String mvName) {
Table table = getTable(dbName, mvName);
Assert.assertTrue(table instanceof MaterializedView);
MaterializedView mv = (MaterializedView) table;
return mv;
}
@Test
public void testMvNullable() throws Exception {
starRocksAssert.withTable("create table emps (\n" +
" empid int not null,\n" +
" deptno int not null,\n" +
" name varchar(25) not null,\n" +
" salary double\n" +
")\n" +
"distributed by hash(`empid`) buckets 10\n" +
"properties (\n" +
"\"replication_num\" = \"1\"\n" +
");")
.withTable("create table depts (\n" +
" deptno int not null,\n" +
" name varchar(25) not null\n" +
")\n" +
"distributed by hash(`deptno`) buckets 10\n" +
"properties (\n" +
"\"replication_num\" = \"1\"\n" +
");");
{
starRocksAssert.withMaterializedView("create materialized view mv_nullable" +
" distributed by hash(`empid`) as" +
" select empid, d.deptno, d.name" +
" from emps e left outer join depts d on e.deptno = d.deptno");
MaterializedView mv = getMv("test", "mv_nullable");
Assert.assertFalse(mv.getColumn("empid").isAllowNull());
Assert.assertTrue(mv.getColumn("deptno").isAllowNull());
starRocksAssert.dropMaterializedView("mv_nullable");
}
{
starRocksAssert.withMaterializedView("create materialized view mv_nullable" +
" distributed by hash(`empid`) as" +
" select empid, d.deptno, d.name" +
" from emps e right outer join depts d on e.deptno = d.deptno");
MaterializedView mv = getMv("test", "mv_nullable");
Assert.assertTrue(mv.getColumn("empid").isAllowNull());
Assert.assertFalse(mv.getColumn("deptno").isAllowNull());
Assert.assertFalse(mv.getColumn("name").isAllowNull());
starRocksAssert.dropMaterializedView("mv_nullable");
}
{
starRocksAssert.withMaterializedView("create materialized view mv_nullable" +
" distributed by hash(`empid`) as" +
" select empid, d.deptno, d.name" +
" from emps e full outer join depts d on e.deptno = d.deptno");
MaterializedView mv = getMv("test", "mv_nullable");
Assert.assertTrue(mv.getColumn("empid").isAllowNull());
Assert.assertTrue(mv.getColumn("deptno").isAllowNull());
starRocksAssert.dropMaterializedView("mv_nullable");
}
starRocksAssert.dropTable("emps");
starRocksAssert.dropTable("depts");
}
@Test
public void testSelectFromSyncMV() throws Exception {
String sql = "create materialized view sync_mv1 as select k1, sum(v1) from tbl1 group by k1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
waitingRollupJobV2Finish();
sql = "select * from sync_mv1 [_SYNC_MV_];";
Pair<String, ExecPlan> pair = UtFrameUtils.getPlanAndFragment(connectContext, sql);
String explainString = pair.second.getExplainString(StatementBase.ExplainLevel.NORMAL);
Assert.assertTrue(explainString.contains("partitions=2/2\n" +
" rollup: sync_mv1\n" +
" tabletRatio=6/6"));
starRocksAssert.dropMaterializedView("sync_mv1");
}
@Test
public void testCreateSyncMV1() throws Exception {
try {
String sql = "create materialized view aggregate_table_with_null as select k1, sum(v1) from tbl1 group by k1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
Assert.fail();
} catch (Throwable e) {
Assert.assertTrue(e.getMessage().contains("Table [aggregate_table_with_null] already exists in the db test"));
}
}
@Test
public void testCreateSyncMV2() throws Exception {
String sql = "create materialized view sync_mv1 as select k1, sum(v1) from tbl1 group by k1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
waitingRollupJobV2Finish();
OlapTable tbl1 = (OlapTable) (getTable("test", "tbl1"));
Assert.assertTrue(tbl1 != null);
Assert.assertTrue(tbl1.hasMaterializedIndex("sync_mv1"));
try {
sql = "create materialized view sync_mv1 as select k1, sum(v1) from tbl1 group by k1;";
createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
Assert.fail();
} catch (Throwable e) {
Assert.assertTrue(e.getMessage().contains("Materialized view[sync_mv1] already exists in " +
"the table tbl1"));
}
starRocksAssert.dropMaterializedView("sync_mv1");
}
@Test
public void testCreateSyncMV3() throws Exception {
String sql = "create materialized view sync_mv1 as select k1, sum(v1) from tbl1 group by k1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
waitingRollupJobV2Finish();
OlapTable tbl1 = (OlapTable) (getTable("test", "tbl1"));
Assert.assertTrue(tbl1 != null);
Assert.assertTrue(tbl1.hasMaterializedIndex("sync_mv1"));
try {
sql = "create materialized view sync_mv1 as select k1, sum(v1) from tbl3 group by k1;";
createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
Assert.fail();
} catch (Throwable e) {
Assert.assertTrue(e.getMessage().contains("Materialized view[sync_mv1] already exists " +
"in table tbl1"));
}
starRocksAssert.dropMaterializedView("sync_mv1");
}
@Test
public void testCreateSyncMV_WithUpperColumn() throws Exception {
String sql = "create materialized view UPPER_MV1 as select K1, sum(V1) from TBL1 group by K1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
waitingRollupJobV2Finish();
{
sql = "select * from UPPER_MV1 [_SYNC_MV_];";
Pair<String, ExecPlan> pair = UtFrameUtils.getPlanAndFragment(connectContext, sql);
String explainString = pair.second.getExplainString(StatementBase.ExplainLevel.NORMAL);
Assert.assertTrue(explainString.contains("PLAN FRAGMENT 0\n" +
" OUTPUT EXPRS:1: K1 | 2: mv_sum_V1\n" +
" PARTITION: UNPARTITIONED"));
}
{
sql = "select K1, sum(V1) from TBL1 group by K1";
Pair<String, ExecPlan> pair = UtFrameUtils.getPlanAndFragment(connectContext, sql);
String explainString = pair.second.getExplainString(StatementBase.ExplainLevel.NORMAL);
Assert.assertTrue(explainString.contains("1:AGGREGATE (update serialize)\n" +
" | STREAMING\n" +
" | output: sum(4: mv_sum_V1)\n" +
" | group by: 1: K1\n" +
" | \n" +
" 0:OlapScanNode\n" +
" TABLE: TBL1\n" +
" PREAGGREGATION: ON\n" +
" partitions=2/2\n" +
" rollup: UPPER_MV1"));
}
starRocksAssert.dropMaterializedView("UPPER_MV1");
}
@Test
public void testCreateSyncMV_WithLowerColumn() throws Exception {
String sql = "create materialized view lower_mv1 as select k1, sum(v1) from tbl1 group by K1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
waitingRollupJobV2Finish();
{
sql = "select * from lower_mv1 [_SYNC_MV_];";
Pair<String, ExecPlan> pair = UtFrameUtils.getPlanAndFragment(connectContext, sql);
String explainString = pair.second.getExplainString(StatementBase.ExplainLevel.NORMAL);
Assert.assertTrue(explainString.contains("PLAN FRAGMENT 0\n" +
" OUTPUT EXPRS:1: k1 | 2: mv_sum_v1\n" +
" PARTITION: UNPARTITIONED"));
}
{
sql = "select K1, sum(v1) from tbl1 group by K1";
Pair<String, ExecPlan> pair = UtFrameUtils.getPlanAndFragment(connectContext, sql);
String explainString = pair.second.getExplainString(StatementBase.ExplainLevel.NORMAL);
Assert.assertTrue(explainString.contains("1:AGGREGATE (update serialize)\n" +
" | STREAMING\n" +
" | output: sum(4: mv_sum_v1)\n" +
" | group by: 1: k1\n" +
" | \n" +
" 0:OlapScanNode\n" +
" TABLE: tbl1\n" +
" PREAGGREGATION: ON\n" +
" partitions=2/2\n" +
" rollup: lower_mv1"));
}
starRocksAssert.dropMaterializedView("lower_mv1");
}
@Test
public void testCreateAsyncDateTruncAndTimeSLice() throws Exception {
LocalDateTime startTime = LocalDateTime.now().plusSeconds(3);
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month', k11)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select time_slice(tb1.k1, interval 5 minute) as k11, k2 s2 from tbl1 tb1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month', k11)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select time_slice(tb1.k1, interval 5 year) as k11, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql, connectContext));
}
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month', k11)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select time_slice(tb1.k1, interval 5 month) as k11, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql, connectContext));
}
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month', k11)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 DAY)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select time_slice(tb1.k1, interval 5 month, 'ceil') as k11, k2 s2 from tbl1 tb1;";
Assert.assertThrows(AnalysisException.class,
() -> UtFrameUtils.parseStmtWithNewParser(sql, connectContext));
}
}
@Test
public void testMVWithMaxRewriteStaleness() throws Exception {
LocalDateTime startTime = LocalDateTime.now().plusSeconds(3);
String sql = "create materialized view mv_with_rewrite_staleness \n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"refresh async START('" + startTime.format(DateUtils.DATE_TIME_FORMATTER) +
"') EVERY(INTERVAL 3 SECOND)\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"," +
"\"mv_rewrite_staleness_second\" = \"60\"\n" +
")\n" +
"as select tb1.k1, k2 s2 from tbl1 tb1;";
try {
Table mv1 = getMaterializedViewChecked(sql);
MaterializedView materializedView = (MaterializedView) mv1;
Assert.assertEquals(materializedView.getMaxMVRewriteStaleness(), 60);
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
dropMv("mv_with_rewrite_staleness");
}
}
@Test
public void testCreateMvWithView() throws Exception {
starRocksAssert.withView("create view view_1 as select tb1.k1, k2 s2 from tbl1 tb1;");
starRocksAssert.withView("create view view_2 as select v1.k1, v1.s2 from view_1 v1;");
starRocksAssert.withView("create view view_3 as select date_trunc('month',k1) d1, v1.s2 from view_1 v1;");
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select k1, s2 from view_1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select v1.k1, v1.s2 from view_1 v1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by d1\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select date_trunc('month',k1) d1, v1.s2 from view_1 v1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by d1\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select v3.d1, v3.s2 from view_3 v3;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select v2.k1, v2.s2 from view_2 v2;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
}
@Test
public void testMvOnUnion() throws Exception {
starRocksAssert.withTable("CREATE TABLE `customer_nullable_1` (\n" +
" `c_custkey` int(11) NULL COMMENT \"\",\n" +
" `c_name` varchar(26) NULL COMMENT \"\",\n" +
" `c_address` varchar(41) NULL COMMENT \"\",\n" +
" `c_city` varchar(11) NULL COMMENT \"\",\n" +
" `c_nation` varchar(16) NULL COMMENT \"\",\n" +
" `c_region` varchar(13) NULL COMMENT \"\",\n" +
" `c_phone` varchar(16) NOT NULL COMMENT \"\",\n" +
" `c_mktsegment` varchar(11) NOT NULL COMMENT \"\"\n" +
") ENGINE=OLAP\n" +
"DUPLICATE KEY(`c_custkey`)\n" +
"COMMENT \"OLAP\"\n" +
"DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 12\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
");");
starRocksAssert.withTable("CREATE TABLE `customer_nullable_2` (\n" +
" `c_custkey` int(11) NULL COMMENT \"\",\n" +
" `c_name` varchar(26) NULL COMMENT \"\",\n" +
" `c_address` varchar(41) NULL COMMENT \"\",\n" +
" `c_city` varchar(11) NULL COMMENT \"\",\n" +
" `c_nation` varchar(16) NULL COMMENT \"\",\n" +
" `c_region` varchar(13) NULL COMMENT \"\",\n" +
" `c_phone` varchar(16) NOT NULL COMMENT \"\",\n" +
" `c_mktsegment` varchar(11) NOT NULL COMMENT \"\"\n" +
") ENGINE=OLAP\n" +
"DUPLICATE KEY(`c_custkey`)\n" +
"COMMENT \"OLAP\"\n" +
"DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 12\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
");");
starRocksAssert.withTable("\n" +
"CREATE TABLE `customer_nullable_3` (\n" +
" `c_custkey` int(11) NULL COMMENT \"\",\n" +
" `c_name` varchar(26) NULL COMMENT \"\",\n" +
" `c_address` varchar(41) NULL COMMENT \"\",\n" +
" `c_city` varchar(11) NULL COMMENT \"\",\n" +
" `c_nation` varchar(16) NULL COMMENT \"\",\n" +
" `c_region` varchar(13) NULL COMMENT \"\",\n" +
" `c_phone` varchar(16) NOT NULL COMMENT \"\",\n" +
" `c_mktsegment` varchar(11) NOT NULL COMMENT \"\",\n" +
" `c_total` decimal(19,6) null default \"0.0\"\n" +
") ENGINE=OLAP\n" +
"DUPLICATE KEY(`c_custkey`)\n" +
"COMMENT \"OLAP\"\n" +
"DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 12\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
");");
starRocksAssert.withMaterializedView("\n" +
"create materialized view customer_mv\n" +
"distributed by hash(`custkey`)\n" +
"as\n" +
"\n" +
"select\n" +
"\tc_custkey custkey,\n" +
"\tc_name name,\n" +
"\tc_phone phone,\n" +
"\t0 total,\n" +
"\t c_mktsegment segment\n" +
"from customer_nullable_1\n" +
"\n" +
"union all\n" +
"\n" +
"select\n" +
"\tc_custkey custkey,\n" +
"\tnull name,\n" +
"\tnull phone,\n" +
"\t0 total,\n" +
"\t c_mktsegment segment\n" +
"from customer_nullable_2\n" +
"\n" +
"union all\n" +
"\n" +
"select\n" +
"\tc_custkey custkey,\n" +
"\tnull name,\n" +
"\tnull phone,\n" +
"\tc_total total,\n" +
"\t c_mktsegment segment\n" +
"from customer_nullable_3;");
Database db = starRocksAssert.getCtx().getGlobalStateMgr().getDb("test");
MaterializedView mv = (MaterializedView) db.getTable("customer_mv");
Assert.assertTrue(mv.getColumn("total").getType().isDecimalOfAnyVersion());
Assert.assertFalse(mv.getColumn("segment").isAllowNull());
}
@Test
public void testRandomizeStart() throws Exception {
final long FIXED_DELTA = 5;
String sql = "create materialized view mv_test_randomize \n" +
"distributed by hash(k1) buckets 10\n" +
"refresh async every(interval 1 minute) " +
"PROPERTIES (\n" +
"'replication_num' = '1'" +
")\n" +
"as " +
"select tb1.k1, k2, " +
"array<int>[1,2,3] as type_array, " +
"map<int, int>{1:2} as type_map, " +
"parse_json('{\"a\": 1}') as type_json, " +
"row('c') as type_struct, " +
"array<json>[parse_json('{}')] as type_array_json " +
"from tbl1 tb1;";
long currentSecond = Utils.getLongFromDateTime(LocalDateTime.now());
starRocksAssert.withMaterializedView(sql);
MaterializedView mv = getMv(testDb.getFullName(), "mv_test_randomize");
long startTime = mv.getRefreshScheme().getAsyncRefreshContext().getStartTime();
long delta = startTime - currentSecond;
Assert.assertTrue("delta is " + delta, delta >= 0 && delta <= 60);
starRocksAssert.dropMaterializedView("mv_test_randomize");
sql = "create materialized view mv_test_randomize \n" +
"distributed by hash(k1) buckets 10\n" +
"refresh async every(interval 1 minute) " +
"PROPERTIES (\n" +
"'replication_num' = '1', " +
"'mv_randomize_start' = '-1'" +
")\n" +
"as " +
"select tb1.k1, k2, " +
"array<int>[1,2,3] as type_array, " +
"map<int, int>{1:2} as type_map, " +
"parse_json('{\"a\": 1}') as type_json, " +
"row('c') as type_struct, " +
"array<json>[parse_json('{}')] as type_array_json " +
"from tbl1 tb1;";
currentSecond = Utils.getLongFromDateTime(LocalDateTime.now());
starRocksAssert.withMaterializedView(sql);
mv = getMv(testDb.getFullName(), "mv_test_randomize");
startTime = mv.getRefreshScheme().getAsyncRefreshContext().getStartTime();
delta = startTime - currentSecond;
Assert.assertTrue("delta is " + delta, delta >= 0 && delta < FIXED_DELTA);
starRocksAssert.dropMaterializedView("mv_test_randomize");
sql = "create materialized view mv_test_randomize \n" +
"distributed by hash(k1) buckets 10\n" +
"refresh async every(interval 1 minute) " +
"PROPERTIES (\n" +
"'replication_num' = '1', " +
"'mv_randomize_start' = '2'" +
")\n" +
"as " +
"select tb1.k1, k2, " +
"array<int>[1,2,3] as type_array, " +
"map<int, int>{1:2} as type_map, " +
"parse_json('{\"a\": 1}') as type_json, " +
"row('c') as type_struct, " +
"array<json>[parse_json('{}')] as type_array_json " +
"from tbl1 tb1;";
currentSecond = Utils.getLongFromDateTime(LocalDateTime.now());
starRocksAssert.withMaterializedView(sql);
mv = getMv(testDb.getFullName(), "mv_test_randomize");
startTime = mv.getRefreshScheme().getAsyncRefreshContext().getStartTime();
delta = startTime - currentSecond;
Assert.assertTrue("delta is " + delta, delta >= 0 && delta < (2 + FIXED_DELTA));
starRocksAssert.dropMaterializedView("mv_test_randomize");
}
@Test
public void testCreateMvWithTypes() throws Exception {
String sql = "create materialized view mv_test_types \n" +
"distributed by hash(k1) buckets 10\n" +
"PROPERTIES (\n" +
"'replication_num' = '1'" +
")\n" +
"as " +
"select tb1.k1, k2, " +
"array<int>[1,2,3] as type_array, " +
"map<int, int>{1:2} as type_map, " +
"parse_json('{\"a\": 1}') as type_json, " +
"row('c') as type_struct, " +
"array<json>[parse_json('{}')] as type_array_json " +
"from tbl1 tb1;";
starRocksAssert.withMaterializedView(sql);
}
@Test
public void testCreateMaterializedViewOnListPartitionTables1() throws Exception {
String createSQL = "CREATE TABLE test.list_partition_tbl1 (\n" +
" id BIGINT,\n" +
" age SMALLINT,\n" +
" dt VARCHAR(10),\n" +
" province VARCHAR(64) not null\n" +
")\n" +
"ENGINE=olap\n" +
"DUPLICATE KEY(id)\n" +
"PARTITION BY LIST (province) (\n" +
" PARTITION p1 VALUES IN (\"beijing\",\"chongqing\") ,\n" +
" PARTITION p2 VALUES IN (\"guangdong\") \n" +
")\n" +
"DISTRIBUTED BY HASH(id) BUCKETS 10\n" +
"PROPERTIES (\n" +
" \"replication_num\" = \"1\"\n" +
")";
starRocksAssert.withTable(createSQL);
String sql = "create materialized view list_partition_mv1 " +
"partition by province " +
"distributed by hash(dt, province) buckets 10 " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select dt, province, avg(age) from list_partition_tbl1 group by dt, province;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Materialized view related base table partition type: LIST not supports."));
}
starRocksAssert.dropTable("list_partition_tbl1");
}
@Test
public void testCreateMaterializedViewOnListPartitionTables2() throws Exception {
String createSQL = "CREATE TABLE test.list_partition_tbl1 (\n" +
" id BIGINT,\n" +
" age SMALLINT,\n" +
" dt VARCHAR(10),\n" +
" province VARCHAR(64) not null\n" +
")\n" +
"ENGINE=olap\n" +
"DUPLICATE KEY(id)\n" +
"PARTITION BY LIST (province) (\n" +
" PARTITION p1 VALUES IN (\"beijing\",\"chongqing\") ,\n" +
" PARTITION p2 VALUES IN (\"guangdong\") \n" +
")\n" +
"DISTRIBUTED BY HASH(id) BUCKETS 10\n" +
"PROPERTIES (\n" +
" \"replication_num\" = \"1\"\n" +
")";
starRocksAssert.withTable(createSQL);
String sql = "create materialized view list_partition_mv1 " +
"distributed by hash(dt, province) buckets 10 " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select dt, province, avg(age) from list_partition_tbl1 group by dt, province;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
starRocksAssert.dropTable("list_partition_tbl1");
}
@Test
public void testCreateMaterializedViewWithTableAlias1() throws Exception {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select t0.k1, t0.k2, t0.sum as sum0 " +
"from (select k1, k2, sum(v1) as sum from tbl1 group by k1, k2) t0 where t0.k2 > 10";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateMaterializedViewWithTableAlias2() throws Exception {
String sql = "create materialized view mv1 " +
"partition by k1 " +
"distributed by hash(k2) buckets 10 " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select t0.k1, t0.k2, t0.sum as sum0, t1.sum as sum1, t2.sum as sum2 " +
"from (select k1, k2, sum(v1) as sum from tbl1 group by k1, k2) t0 " +
"left join (select k1, k2, sum(v1) as sum from tbl1 group by k1, k2) t1 on t0.k1=t1.k2 " +
"left join (select k1, k2, sum(v1) as sum from tbl1 group by k1, k2) t2 on t0.k1=t2.k1;";
try {
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateMvWithViewAndSubQuery() throws Exception {
starRocksAssert.withView("create view view_1 as " +
"select k1, s2 from (select tb1.k1, k2 s2 from tbl1 tb1) t where t.k1 > 10;");
starRocksAssert.withView("create view view_2 as " +
"select k1, s2 from (select v1.k1, v1.s2 from view_1 v1) t where t.k1 > 10;");
starRocksAssert.withView("create view view_3 as " +
"select d1, s2 from (select date_trunc('month',k1) d1, v1.s2 from view_1 v1)t where d1 is not null;");
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select k1, s2 from view_1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select view_1.k1, view_2.s2 from view_1 join view_2 on view_1.k1=view_2.k1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by d1\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select v3.d1, v3.s2 from view_3 v3;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
{
String sql = "create materialized view mv1\n" +
"partition by date_trunc('month',k1)\n" +
"distributed by hash(s2) buckets 10\n" +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"\n" +
")\n" +
"as select view_1.k1, view_2.s2 from view_1 join view_2 on view_1.k1=view_2.k1;";
UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
}
starRocksAssert.dropView("view_1");
starRocksAssert.dropView("view_2");
starRocksAssert.dropView("view_3");
}
@Test
public void testCreateSynchronousMVOnLakeTable() throws Exception {
String sql = "create materialized view sync_mv1 as select k1, sum(v1) from mocked_cloud_table group by k1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
Table table = getTable("test", "mocked_cloud_table");
Deencapsulation.setField(table, "type", Table.TableType.CLOUD_NATIVE);
DdlException e = Assert.assertThrows(DdlException.class, () -> {
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
});
Assert.assertTrue(e.getMessage().contains("Creating synchronous materialized view(rollup) is not supported in " +
"shared data clusters.\nPlease use asynchronous materialized view instead.\n" +
"Refer to https:
"/data-definition/CREATE%20MATERIALIZED%20VIEW
}
@Test
public void testCreateSynchronousMVOnAnotherMV() throws Exception {
String sql = "create materialized view sync_mv1 as select k1, sum(v1) from mocked_cloud_table group by k1;";
CreateMaterializedViewStmt createTableStmt = (CreateMaterializedViewStmt) UtFrameUtils.
parseStmtWithNewParser(sql, connectContext);
Table table = getTable("test", "mocked_cloud_table");
Deencapsulation.setField(table, "type", Table.TableType.MATERIALIZED_VIEW);
DdlException e = Assert.assertThrows(DdlException.class, () -> {
GlobalStateMgr.getCurrentState().getMetadata().createMaterializedView(createTableStmt);
});
Assert.assertTrue(e.getMessage().contains("Do not support create synchronous materialized view(rollup) on"));
}
MaterializedView getMaterializedViewChecked(String sql) {
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) statementBase;
currentState.createMaterializedView(createMaterializedViewStatement);
ThreadUtil.sleepAtLeastIgnoreInterrupts(4000L);
TableName mvName = createMaterializedViewStatement.getTableName();
Table table = testDb.getTable(mvName.getTbl());
Assert.assertNotNull(table);
Assert.assertTrue(table instanceof MaterializedView);
return (MaterializedView) table;
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
return null;
}
List<Column> getMaterializedViewKeysChecked(String sql) {
String mvName = null;
try {
StatementBase statementBase = UtFrameUtils.parseStmtWithNewParser(sql, connectContext);
CreateMaterializedViewStatement createMaterializedViewStatement =
(CreateMaterializedViewStatement) statementBase;
currentState.createMaterializedView(createMaterializedViewStatement);
ThreadUtil.sleepAtLeastIgnoreInterrupts(4000L);
TableName mvTableName = createMaterializedViewStatement.getTableName();
mvName = mvTableName.getTbl();
Table table = testDb.getTable(mvName);
Assert.assertNotNull(table);
Assert.assertTrue(table instanceof MaterializedView);
MaterializedView mv = (MaterializedView) table;
return mv.getFullSchema().stream().filter(Column::isKey).collect(Collectors.toList());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
} finally {
if (!Objects.isNull(mvName)) {
try {
starRocksAssert.dropMaterializedView(mvName);
} catch (Exception e) {
Assert.fail();
}
}
}
return Lists.newArrayList();
}
@Test
@Test
public void testCreateMaterializedViewWithoutSortKeys_Partitioned_2() {
String sql = "create materialized view test_mv_sort_key1 " +
"partition by c_1_3 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_3, c_1_0 , c_1_4, c_1_5 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_3"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_0"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_Partitioned_3() {
String sql = "create materialized view test_mv_sort_key1 " +
"partition by c_1_3 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_5, c_1_3, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_3"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_0"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_Partitioned_4() {
String sql = "create materialized view test_mv_sort_key1 " +
"partition by c_1_3 " +
"distributed by random " +
"order by (c_1_0, c_1_3) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_5, c_1_3, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_0"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_3"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_Partitioned_5() {
String sql = "create materialized view test_mv_sort_key1 " +
"partition by c_1_3 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_5, c_1_10, c_1_3, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_10"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_Partitioned_6() {
String sql = "create materialized view test_mv_sort_key1 " +
"partition by c_1_3 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_3, c_1_10, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_3"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_10"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_UnPartitioned_1() {
String sql = "create materialized view test_mv_sort_key1 " +
"distributed by hash(c_1_3, c_1_0) buckets 10 " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_3, c_1_0, c_1_4, c_1_5 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_3"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_0"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_UnPartitioned_2() {
String sql = "create materialized view test_mv_sort_key1 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_3, c_1_0 , c_1_4, c_1_5 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_3"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_0"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_UnPartitioned_3() {
String sql = "create materialized view test_mv_sort_key1 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_5, c_1_3, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_3"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_0"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_UnPartitioned_4() {
String sql = "create materialized view test_mv_sort_key1 " +
"distributed by random " +
"order by (c_1_0, c_1_3) " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_5, c_1_3, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_0"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_3"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_UnPartitioned_5() {
String sql = "create materialized view test_mv_sort_key1 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_5, c_1_10, c_1_3, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_10"));
}
@Test
public void testCreateMaterializedViewWithoutSortKeys_UnPartitioned_6() {
String sql = "create materialized view test_mv_sort_key1 " +
"distributed by random " +
"PROPERTIES (\n" +
"\"replication_num\" = \"1\"" +
") " +
"as select c_1_4, c_1_3, c_1_10, c_1_0 from t1";
List<Column> keyColumns = getMaterializedViewKeysChecked(sql);
Assert.assertTrue(keyColumns.get(0).getName().equals("c_1_3"));
Assert.assertTrue(keyColumns.get(1).getName().equals("c_1_10"));
}
} |
This is easy to obtain a unstandard format content. For example: ``` BEGIN def a(b): retrun b END ``` Should we check it here? | public ParseNode visitCreateFunctionStatement(StarRocksParser.CreateFunctionStatementContext context) {
String functionType = "SCALAR";
boolean isGlobal = context.GLOBAL() != null;
if (context.functionType != null) {
functionType = context.functionType.getText();
}
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
String functionName = qualifiedName.toString();
TypeDef returnTypeDef = new TypeDef(getType(context.returnType), createPos(context.returnType));
Map<String, String> properties = null;
if (context.properties() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
String inlineContent = null;
if (context.content != null) {
String text = context.content.getText();
inlineContent = text.substring(5, text.length() - 3);
}
FunctionName fnName = FunctionName.createFnName(functionName);
if (isGlobal) {
if (!Strings.isNullOrEmpty(fnName.getDb())) {
throw new ParsingException(PARSER_ERROR_MSG.invalidUDFName(functionName), qualifiedName.getPos());
}
fnName.setAsGlobalFunction();
}
return new CreateFunctionStmt(functionType, fnName,
getFunctionArgsDef(context.typeList()), returnTypeDef, properties, inlineContent);
} | inlineContent = text.substring(5, text.length() - 3); | public ParseNode visitCreateFunctionStatement(StarRocksParser.CreateFunctionStatementContext context) {
String functionType = "SCALAR";
boolean isGlobal = context.GLOBAL() != null;
if (context.functionType != null) {
functionType = context.functionType.getText();
}
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
String functionName = qualifiedName.toString();
TypeDef returnTypeDef = new TypeDef(getType(context.returnType), createPos(context.returnType));
Map<String, String> properties = null;
if (context.properties() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
String inlineContent = null;
if (context.inlineFunction() != null) {
String content = context.inlineFunction().ATTACHMENT().getText();
inlineContent = context.inlineFunction().ATTACHMENT().getText().substring(2, content.length() - 2);
}
FunctionName fnName = FunctionName.createFnName(functionName);
if (isGlobal) {
if (!Strings.isNullOrEmpty(fnName.getDb())) {
throw new ParsingException(PARSER_ERROR_MSG.invalidUDFName(functionName), qualifiedName.getPos());
}
fnName.setAsGlobalFunction();
}
return new CreateFunctionStmt(functionType, fnName,
getFunctionArgsDef(context.typeList()), returnTypeDef, properties, inlineContent);
} | class AstBuilderFactory {
protected AstBuilderFactory() {
}
public AstBuilder create(long sqlMode) {
return new AstBuilder(sqlMode, new IdentityHashMap<>());
}
public AstBuilder create(long sqlMode, IdentityHashMap<ParserRuleContext, List<HintNode>> hintMap) {
return new AstBuilder(sqlMode, hintMap);
}
} | class AstBuilderFactory {
protected AstBuilderFactory() {
}
public AstBuilder create(long sqlMode) {
return new AstBuilder(sqlMode, new IdentityHashMap<>());
}
public AstBuilder create(long sqlMode, IdentityHashMap<ParserRuleContext, List<HintNode>> hintMap) {
return new AstBuilder(sqlMode, hintMap);
}
} |
Yes, the APIS should be changed, here are the final discussion result: ``` post: job/start/ job/stop/ get: job/progress/${id} ``` | protected void channelRead0(final ChannelHandlerContext channelHandlerContext, final FullHttpRequest request) {
String requestPath = request.uri();
String requestBody = request.content().toString(CharsetUtil.UTF_8);
HttpMethod method = request.method();
if (!URL_PATTERN.matcher(requestPath).matches()) {
response("not support request", channelHandlerContext, HttpResponseStatus.BAD_REQUEST, request);
return;
}
if ("/shardingscaling/start".equalsIgnoreCase(requestPath) && method.equals(HttpMethod.POST)) {
startShardingScalingJob(requestBody);
response("start", channelHandlerContext, HttpResponseStatus.OK, request);
return;
}
if (requestPath.contains("/shardingscaling/progress/") && method.equals(HttpMethod.GET)) {
response("progress", channelHandlerContext, HttpResponseStatus.OK, request);
return;
}
if (requestPath.contains("/shardingscaling/stop/") && method.equals(HttpMethod.DELETE)) {
response("stop", channelHandlerContext, HttpResponseStatus.OK, request);
return;
}
response("not support request", channelHandlerContext, HttpResponseStatus.BAD_REQUEST, request);
} | if (requestPath.contains("/shardingscaling/stop/") && method.equals(HttpMethod.DELETE)) { | protected void channelRead0(final ChannelHandlerContext channelHandlerContext, final FullHttpRequest request) {
String requestPath = request.uri();
String requestBody = request.content().toString(CharsetUtil.UTF_8);
HttpMethod method = request.method();
if (!URL_PATTERN.matcher(requestPath).matches()) {
response("not support request", channelHandlerContext, HttpResponseStatus.BAD_REQUEST, request);
return;
}
if ("/shardingscaling/job/start".equalsIgnoreCase(requestPath) && method.equals(HttpMethod.POST)) {
startShardingScalingJob(requestBody);
response("start", channelHandlerContext, HttpResponseStatus.OK, request);
return;
}
if (requestPath.contains("/shardingscaling/job/progress/") && method.equals(HttpMethod.GET)) {
response("progress", channelHandlerContext, HttpResponseStatus.OK, request);
return;
}
if ("/shardingscaling/job/stop".equalsIgnoreCase(requestPath) && method.equals(HttpMethod.POST)) {
response("stop", channelHandlerContext, HttpResponseStatus.OK, request);
return;
}
response("not support request", channelHandlerContext, HttpResponseStatus.BAD_REQUEST, request);
} | class HttpServerHandler extends SimpleChannelInboundHandler<FullHttpRequest> {
private static final Pattern URL_PATTERN = Pattern.compile("(^/shardingscaling/start)|(^/shardingscaling/(progress|stop)/\\d+)",
Pattern.CASE_INSENSITIVE);
private static final Gson GSON = new Gson();
private static final ScalingJobController SCALING_JOB_CONTROLLER = new ScalingJobController();
@Override
/**
* start sharding scaling job.
*
* @param requestBody json format configuration of sharding scaling job
*/
private void startShardingScalingJob(final String requestBody) {
}
/**
* response to client.
*
* @param content content for response
* @param ctx channelHandlerContext
* @param status http response status
* @param request http request
*/
private void response(final String content, final ChannelHandlerContext ctx, final HttpResponseStatus status, final HttpRequest request) {
FullHttpResponse response = new DefaultFullHttpResponse(request.protocolVersion(), status, Unpooled.copiedBuffer(content, CharsetUtil.UTF_8));
response.headers().set(HttpHeaderNames.CONTENT_TYPE, "text/plain;charset=UTF-8");
HttpUtil.setContentLength(response, response.content().readableBytes());
boolean keepAlive = HttpUtil.isKeepAlive(request);
if (keepAlive) {
if (!request.protocolVersion().isKeepAliveDefault()) {
response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE);
}
} else {
response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE);
}
ChannelFuture future = ctx.writeAndFlush(response);
if (!keepAlive) {
future.addListener(ChannelFutureListener.CLOSE);
}
}
@Override
public void exceptionCaught(final ChannelHandlerContext ctx, final Throwable cause) {
log.error("request error", cause);
ctx.close();
}
} | class HttpServerHandler extends SimpleChannelInboundHandler<FullHttpRequest> {
private static final Pattern URL_PATTERN = Pattern.compile("(^/shardingscaling/job/(start|stop))|(^/shardingscaling/job/progress/\\d+)",
Pattern.CASE_INSENSITIVE);
private static final Gson GSON = new Gson();
private static final ScalingJobController SCALING_JOB_CONTROLLER = new ScalingJobController();
@Override
/**
* start sharding scaling job.
*
* @param requestBody json format configuration of sharding scaling job
*/
private void startShardingScalingJob(final String requestBody) {
}
/**
* response to client.
*
* @param content content for response
* @param ctx channelHandlerContext
* @param status http response status
* @param request http request
*/
private void response(final String content, final ChannelHandlerContext ctx, final HttpResponseStatus status, final HttpRequest request) {
FullHttpResponse response = new DefaultFullHttpResponse(request.protocolVersion(), status, Unpooled.copiedBuffer(content, CharsetUtil.UTF_8));
response.headers().set(HttpHeaderNames.CONTENT_TYPE, "text/plain;charset=UTF-8");
HttpUtil.setContentLength(response, response.content().readableBytes());
boolean keepAlive = HttpUtil.isKeepAlive(request);
if (keepAlive) {
if (!request.protocolVersion().isKeepAliveDefault()) {
response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE);
}
} else {
response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE);
}
ChannelFuture future = ctx.writeAndFlush(response);
if (!keepAlive) {
future.addListener(ChannelFutureListener.CLOSE);
}
}
@Override
public void exceptionCaught(final ChannelHandlerContext ctx, final Throwable cause) {
log.error("request error", cause);
ctx.close();
}
} |
as above, moved refcounting into SpannerAccessor. | public void setup() {
spannerAccessor = spannerAccessors.get(spannerConfig);
if (spannerAccessor == null) {
synchronized (spannerAccessors) {
spannerAccessor = spannerAccessors.get(spannerConfig);
if (spannerAccessor == null) {
LOG.info("Connecting to {}", spannerConfig);
spannerAccessor = SpannerAccessor.create(spannerConfig);
spannerAccessors.put(spannerConfig, spannerAccessor);
refcounts.putIfAbsent(spannerConfig, new AtomicInteger(0));
}
}
}
int refcount = refcounts.get(spannerConfig).incrementAndGet();
LOG.debug("Setup: Spanner accessor refcount={} for {}", refcount, spannerConfig);
bundleWriteBackoff =
FluentBackoff.DEFAULT
.withMaxCumulativeBackoff(spannerConfig.getMaxCumulativeBackoff().get())
.withInitialBackoff(spannerConfig.getMaxCumulativeBackoff().get().dividedBy(60));
} | synchronized (spannerAccessors) { | public void setup() {
spannerAccessor = SpannerAccessor.getOrCreate(spannerConfig);
bundleWriteBackoff =
FluentBackoff.DEFAULT
.withMaxCumulativeBackoff(spannerConfig.getMaxCumulativeBackoff().get())
.withInitialBackoff(spannerConfig.getMaxCumulativeBackoff().get().dividedBy(60));
} | class WriteToSpannerFn extends DoFn<Iterable<MutationGroup>, Void> {
private final SpannerConfig spannerConfig;
private final FailureMode failureMode;
private static final ConcurrentHashMap<SpannerConfig, SpannerAccessor> spannerAccessors =
new ConcurrentHashMap<>();
private static final ConcurrentHashMap<SpannerConfig, AtomicInteger> refcounts =
new ConcurrentHashMap<>();
private transient volatile SpannerAccessor spannerAccessor;
/* Number of times an aborted write to spanner could be retried */
private static final int ABORTED_RETRY_ATTEMPTS = 5;
/* Error string in Aborted exception during schema change */
private final String errString =
"Transaction aborted. "
+ "Database schema probably changed during transaction, retry may succeed.";
@VisibleForTesting static Sleeper sleeper = Sleeper.DEFAULT;
private final Counter mutationGroupBatchesReceived =
Metrics.counter(WriteGrouped.class, "mutation_group_batches_received");
private final Counter mutationGroupBatchesWriteSuccess =
Metrics.counter(WriteGrouped.class, "mutation_group_batches_write_success");
private final Counter mutationGroupBatchesWriteFail =
Metrics.counter(WriteGrouped.class, "mutation_group_batches_write_fail");
private final Counter mutationGroupsReceived =
Metrics.counter(WriteGrouped.class, "mutation_groups_received");
private final Counter mutationGroupsWriteSuccess =
Metrics.counter(WriteGrouped.class, "mutation_groups_write_success");
private final Counter mutationGroupsWriteFail =
Metrics.counter(WriteGrouped.class, "mutation_groups_write_fail");
private final Counter spannerWriteSuccess =
Metrics.counter(WriteGrouped.class, "spanner_write_success");
private final Counter spannerWriteFail =
Metrics.counter(WriteGrouped.class, "spanner_write_fail");
private final Distribution spannerWriteLatency =
Metrics.distribution(WriteGrouped.class, "spanner_write_latency_ms");
private final Counter spannerWriteTimeouts =
Metrics.counter(WriteGrouped.class, "spanner_write_timeouts");
private final Counter spannerWriteRetries =
Metrics.counter(WriteGrouped.class, "spanner_write_retries");
private final TupleTag<MutationGroup> failedTag;
private transient FluentBackoff bundleWriteBackoff;
WriteToSpannerFn(
SpannerConfig spannerConfig, FailureMode failureMode, TupleTag<MutationGroup> failedTag) {
this.spannerConfig = spannerConfig;
this.failureMode = failureMode;
this.failedTag = failedTag;
}
@Setup
@Teardown
public void teardown() {
int refcount = refcounts.get(spannerConfig).decrementAndGet();
LOG.debug("Teardown: refcount={} for {}", refcount, spannerConfig);
if (refcount <= 0) {
synchronized (spannerAccessors) {
if (refcounts.get(spannerConfig).get() <= 0) {
spannerAccessors.remove(spannerConfig);
LOG.info("Closing {} ", spannerConfig);
spannerAccessor.close();
}
}
}
}
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
Iterable<MutationGroup> mutations = c.element();
try {
mutationGroupBatchesReceived.inc();
mutationGroupsReceived.inc(Iterables.size(mutations));
Iterable<Mutation> batch = Iterables.concat(mutations);
writeMutations(batch);
mutationGroupBatchesWriteSuccess.inc();
mutationGroupsWriteSuccess.inc(Iterables.size(mutations));
return;
} catch (SpannerException e) {
mutationGroupBatchesWriteFail.inc();
if (failureMode == FailureMode.REPORT_FAILURES) {
} else if (failureMode == FailureMode.FAIL_FAST) {
mutationGroupsWriteFail.inc(Iterables.size(mutations));
throw e;
} else {
throw new IllegalArgumentException("Unknown failure mode " + failureMode);
}
}
for (MutationGroup mg : mutations) {
try {
spannerWriteRetries.inc();
writeMutations(mg);
mutationGroupsWriteSuccess.inc();
} catch (SpannerException e) {
mutationGroupsWriteFail.inc();
LOG.warn("Failed to write the mutation group: " + mg, e);
c.output(failedTag, mg);
}
}
}
/*
Spanner aborts all inflight transactions during a schema change. Client is expected
to retry silently. These must not be counted against retry backoff.
*/
private void spannerWriteWithRetryIfSchemaChange(Iterable<Mutation> batch)
throws SpannerException {
for (int retry = 1; ; retry++) {
try {
spannerAccessor.getDatabaseClient().writeAtLeastOnce(batch);
return;
} catch (AbortedException e) {
if (retry >= ABORTED_RETRY_ATTEMPTS) {
throw e;
}
if (e.isRetryable() || e.getMessage().contains(errString)) {
continue;
}
throw e;
}
}
}
/** Write the Mutations to Spanner, handling DEADLINE_EXCEEDED with backoff/retries. */
private void writeMutations(Iterable<Mutation> mutations) throws SpannerException, IOException {
BackOff backoff = bundleWriteBackoff.backoff();
long mutationsSize = Iterables.size(mutations);
while (true) {
Stopwatch timer = Stopwatch.createStarted();
try {
spannerWriteWithRetryIfSchemaChange(mutations);
spannerWriteSuccess.inc();
return;
} catch (SpannerException exception) {
if (exception.getErrorCode() == ErrorCode.DEADLINE_EXCEEDED) {
spannerWriteTimeouts.inc();
long sleepTimeMsecs = backoff.nextBackOffMillis();
if (sleepTimeMsecs == BackOff.STOP) {
LOG.error(
"DEADLINE_EXCEEDED writing batch of {} mutations to Cloud Spanner. "
+ "Aborting after too many retries.",
mutationsSize);
spannerWriteFail.inc();
throw exception;
}
LOG.info(
"DEADLINE_EXCEEDED writing batch of {} mutations to Cloud Spanner, "
+ "retrying after backoff of {}ms\n"
+ "({})",
mutationsSize,
sleepTimeMsecs,
exception.getMessage());
spannerWriteRetries.inc();
try {
sleeper.sleep(sleepTimeMsecs);
} catch (InterruptedException e) {
}
} else {
spannerWriteFail.inc();
throw exception;
}
} finally {
spannerWriteLatency.update(timer.elapsed(TimeUnit.MILLISECONDS));
}
}
}
} | class WriteToSpannerFn extends DoFn<Iterable<MutationGroup>, Void> {
private final SpannerConfig spannerConfig;
private final FailureMode failureMode;
private transient SpannerAccessor spannerAccessor;
/* Number of times an aborted write to spanner could be retried */
private static final int ABORTED_RETRY_ATTEMPTS = 5;
/* Error string in Aborted exception during schema change */
private final String errString =
"Transaction aborted. "
+ "Database schema probably changed during transaction, retry may succeed.";
@VisibleForTesting static Sleeper sleeper = Sleeper.DEFAULT;
private final Counter mutationGroupBatchesReceived =
Metrics.counter(WriteGrouped.class, "mutation_group_batches_received");
private final Counter mutationGroupBatchesWriteSuccess =
Metrics.counter(WriteGrouped.class, "mutation_group_batches_write_success");
private final Counter mutationGroupBatchesWriteFail =
Metrics.counter(WriteGrouped.class, "mutation_group_batches_write_fail");
private final Counter mutationGroupsReceived =
Metrics.counter(WriteGrouped.class, "mutation_groups_received");
private final Counter mutationGroupsWriteSuccess =
Metrics.counter(WriteGrouped.class, "mutation_groups_write_success");
private final Counter mutationGroupsWriteFail =
Metrics.counter(WriteGrouped.class, "mutation_groups_write_fail");
private final Counter spannerWriteSuccess =
Metrics.counter(WriteGrouped.class, "spanner_write_success");
private final Counter spannerWriteFail =
Metrics.counter(WriteGrouped.class, "spanner_write_fail");
private final Distribution spannerWriteLatency =
Metrics.distribution(WriteGrouped.class, "spanner_write_latency_ms");
private final Counter spannerWriteTimeouts =
Metrics.counter(WriteGrouped.class, "spanner_write_timeouts");
private final Counter spannerWriteRetries =
Metrics.counter(WriteGrouped.class, "spanner_write_retries");
private final TupleTag<MutationGroup> failedTag;
private transient FluentBackoff bundleWriteBackoff;
WriteToSpannerFn(
SpannerConfig spannerConfig, FailureMode failureMode, TupleTag<MutationGroup> failedTag) {
this.spannerConfig = spannerConfig;
this.failureMode = failureMode;
this.failedTag = failedTag;
}
@Setup
@Teardown
public void teardown() {
spannerAccessor.close();
}
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
Iterable<MutationGroup> mutations = c.element();
try {
mutationGroupBatchesReceived.inc();
mutationGroupsReceived.inc(Iterables.size(mutations));
Iterable<Mutation> batch = Iterables.concat(mutations);
writeMutations(batch);
mutationGroupBatchesWriteSuccess.inc();
mutationGroupsWriteSuccess.inc(Iterables.size(mutations));
return;
} catch (SpannerException e) {
mutationGroupBatchesWriteFail.inc();
if (failureMode == FailureMode.REPORT_FAILURES) {
} else if (failureMode == FailureMode.FAIL_FAST) {
mutationGroupsWriteFail.inc(Iterables.size(mutations));
throw e;
} else {
throw new IllegalArgumentException("Unknown failure mode " + failureMode);
}
}
for (MutationGroup mg : mutations) {
try {
spannerWriteRetries.inc();
writeMutations(mg);
mutationGroupsWriteSuccess.inc();
} catch (SpannerException e) {
mutationGroupsWriteFail.inc();
LOG.warn("Failed to write the mutation group: " + mg, e);
c.output(failedTag, mg);
}
}
}
/*
Spanner aborts all inflight transactions during a schema change. Client is expected
to retry silently. These must not be counted against retry backoff.
*/
private void spannerWriteWithRetryIfSchemaChange(Iterable<Mutation> batch)
throws SpannerException {
for (int retry = 1; ; retry++) {
try {
spannerAccessor.getDatabaseClient().writeAtLeastOnce(batch);
return;
} catch (AbortedException e) {
if (retry >= ABORTED_RETRY_ATTEMPTS) {
throw e;
}
if (e.isRetryable() || e.getMessage().contains(errString)) {
continue;
}
throw e;
}
}
}
/** Write the Mutations to Spanner, handling DEADLINE_EXCEEDED with backoff/retries. */
private void writeMutations(Iterable<Mutation> mutations) throws SpannerException, IOException {
BackOff backoff = bundleWriteBackoff.backoff();
long mutationsSize = Iterables.size(mutations);
while (true) {
Stopwatch timer = Stopwatch.createStarted();
try {
spannerWriteWithRetryIfSchemaChange(mutations);
spannerWriteSuccess.inc();
return;
} catch (SpannerException exception) {
if (exception.getErrorCode() == ErrorCode.DEADLINE_EXCEEDED) {
spannerWriteTimeouts.inc();
long sleepTimeMsecs = backoff.nextBackOffMillis();
if (sleepTimeMsecs == BackOff.STOP) {
LOG.error(
"DEADLINE_EXCEEDED writing batch of {} mutations to Cloud Spanner. "
+ "Aborting after too many retries.",
mutationsSize);
spannerWriteFail.inc();
throw exception;
}
LOG.info(
"DEADLINE_EXCEEDED writing batch of {} mutations to Cloud Spanner, "
+ "retrying after backoff of {}ms\n"
+ "({})",
mutationsSize,
sleepTimeMsecs,
exception.getMessage());
spannerWriteRetries.inc();
try {
sleeper.sleep(sleepTimeMsecs);
} catch (InterruptedException e) {
}
} else {
spannerWriteFail.inc();
throw exception;
}
} finally {
spannerWriteLatency.update(timer.elapsed(TimeUnit.MILLISECONDS));
}
}
}
} |
Nice , thanks for introducing fault tolerance. | private void validateDataCreation(int expectedSize) {
final String containerName = _configuration.getCollectionId();
final CosmosAsyncDatabase database = _client.getDatabase(_configuration.getDatabaseId());
final CosmosAsyncContainer container = database.getContainer(containerName);
LOGGER.info("Validating {} documents were loaded into [{}:{}]",
expectedSize, _configuration.getDatabaseId(), containerName);
final List<FeedResponse<ObjectNode>> queryItemsResponseList = container
.queryItems(COUNT_ALL_QUERY, ObjectNode.class)
.byPage()
.collectList()
.block(BULK_LOAD_WAIT_DURATION);
final int resultCount = Optional.ofNullable(queryItemsResponseList)
.map(responseList -> responseList.get(0))
.map(FeedResponse::getResults)
.map(list -> list.get(0))
.map(objectNode -> objectNode.get(COUNT_ALL_QUERY_RESULT_FIELD).intValue())
.orElse(0);
if (resultCount < (expectedSize * 0.90)) {
throw new IllegalStateException(
String.format("Number of documents %d in the container %s is less than the expected threshold %f ",
resultCount, containerName, (expectedSize * 0.90)));
}
} | if (resultCount < (expectedSize * 0.90)) { | private void validateDataCreation(int expectedSize) {
final String containerName = _configuration.getCollectionId();
final CosmosAsyncDatabase database = _client.getDatabase(_configuration.getDatabaseId());
final CosmosAsyncContainer container = database.getContainer(containerName);
LOGGER.info("Validating {} documents were loaded into [{}:{}]",
expectedSize, _configuration.getDatabaseId(), containerName);
final List<FeedResponse<ObjectNode>> queryItemsResponseList = container
.queryItems(COUNT_ALL_QUERY, ObjectNode.class)
.byPage()
.collectList()
.block(BULK_LOAD_WAIT_DURATION);
final int resultCount = Optional.ofNullable(queryItemsResponseList)
.map(responseList -> responseList.get(0))
.map(FeedResponse::getResults)
.map(list -> list.get(0))
.map(objectNode -> objectNode.get(COUNT_ALL_QUERY_RESULT_FIELD).intValue())
.orElse(0);
if (resultCount < (expectedSize * 0.90)) {
throw new IllegalStateException(
String.format("Number of documents %d in the container %s is less than the expected threshold %f ",
resultCount, containerName, (expectedSize * 0.90)));
}
} | class DataLoader {
private static final Logger LOGGER = LoggerFactory.getLogger(DataLoader.class);
private static final int MAX_BATCH_SIZE = 10000;
private static final int BULK_OPERATION_CONCURRENCY = 5;
private static final Duration BULK_LOAD_WAIT_DURATION = Duration.ofSeconds(120);
private static final String COUNT_ALL_QUERY = "SELECT COUNT(1) FROM c";
private static final String COUNT_ALL_QUERY_RESULT_FIELD = "$1";
private final Configuration _configuration;
private final CosmosAsyncClient _client;
private final DataGenerator _dataGenerator;
public DataLoader(final Configuration configuration, final CosmosAsyncClient client) {
_configuration = Preconditions.checkNotNull(configuration,
"The Workload configuration defining the parameters can not be null");
_client = Preconditions.checkNotNull(client,
"The CosmosAsyncClient needed for data loading can not be null");
_dataGenerator = new DataGenerator(_configuration.getNumberOfPreCreatedDocuments());
}
public void loadData() {
LOGGER.info("Starting batched data loading, loading {} documents in each iteration", DataGenerator.BATCH_SIZE);
while (_dataGenerator.hasNext()) {
final Map<Key, ObjectNode> newDocuments = _dataGenerator.next();
bulkCreateItems(newDocuments);
newDocuments.clear();
}
validateDataCreation(_dataGenerator.getGeneratedKeys().size());
}
private void bulkCreateItems(final Map<Key, ObjectNode> records) {
final List<CosmosItemOperation> cosmosItemOperations = mapToCosmosItemOperation(records);
final String containerName = _configuration.getCollectionId();
final CosmosAsyncDatabase database = _client.getDatabase(_configuration.getDatabaseId());
final CosmosAsyncContainer container = database.getContainer(containerName);
LOGGER.info("Bulk loading {} documents in [{}:{}]", cosmosItemOperations.size(),
database.getId(),
containerName);
final BulkProcessingOptions<Object> bulkProcessingOptions = new BulkProcessingOptions<>(Object.class);
bulkProcessingOptions.setMaxMicroBatchSize(MAX_BATCH_SIZE)
.setMaxMicroBatchConcurrency(BULK_OPERATION_CONCURRENCY);
container.processBulkOperations(Flux.fromIterable(cosmosItemOperations), bulkProcessingOptions)
.blockLast(BULK_LOAD_WAIT_DURATION);
LOGGER.info("Completed loading {} documents into [{}:{}]", cosmosItemOperations.size(),
database.getId(),
containerName);
}
/**
* Map the generated data to createItem requests in the underlying container
*
* @param records Data we want to load into the container
* @return List of CosmosItemOperation, each mapping to a createItem for that record
*/
private List<CosmosItemOperation> mapToCosmosItemOperation(final Map<Key, ObjectNode> records) {
return records.entrySet()
.stream()
.map(record -> {
final String partitionKey = record.getKey().getPartitioningKey();
final ObjectNode value = record.getValue();
return BulkOperations.getCreateItemOperation(value, new PartitionKey(partitionKey));
})
.collect(Collectors.toList());
}
/**
* @return Set of Keys representing each document loaded into the test collection
*/
public Set<Key> getLoadedDataKeys() {
return _dataGenerator.getGeneratedKeys();
}
} | class DataLoader {
private static final Logger LOGGER = LoggerFactory.getLogger(DataLoader.class);
private static final int MAX_BATCH_SIZE = 10000;
private static final int BULK_OPERATION_CONCURRENCY = 5;
private static final Duration BULK_LOAD_WAIT_DURATION = Duration.ofSeconds(120);
private static final String COUNT_ALL_QUERY = "SELECT COUNT(1) FROM c";
private static final String COUNT_ALL_QUERY_RESULT_FIELD = "$1";
private final Configuration _configuration;
private final CosmosAsyncClient _client;
private final DataGenerator _dataGenerator;
public DataLoader(final Configuration configuration, final CosmosAsyncClient client) {
_configuration = Preconditions.checkNotNull(configuration,
"The Workload configuration defining the parameters can not be null");
_client = Preconditions.checkNotNull(client,
"The CosmosAsyncClient needed for data loading can not be null");
_dataGenerator = new DataGenerator(_configuration.getNumberOfPreCreatedDocuments());
}
public void loadData() {
LOGGER.info("Starting batched data loading, loading {} documents in each iteration", DataGenerator.BATCH_SIZE);
while (_dataGenerator.hasNext()) {
final Map<Key, ObjectNode> newDocuments = _dataGenerator.next();
bulkCreateItems(newDocuments);
newDocuments.clear();
}
validateDataCreation(_dataGenerator.getGeneratedKeys().size());
}
private void bulkCreateItems(final Map<Key, ObjectNode> records) {
final List<CosmosItemOperation> cosmosItemOperations = mapToCosmosItemOperation(records);
final String containerName = _configuration.getCollectionId();
final CosmosAsyncDatabase database = _client.getDatabase(_configuration.getDatabaseId());
final CosmosAsyncContainer container = database.getContainer(containerName);
LOGGER.info("Bulk loading {} documents in [{}:{}]", cosmosItemOperations.size(),
database.getId(),
containerName);
final BulkProcessingOptions<Object> bulkProcessingOptions = new BulkProcessingOptions<>(Object.class);
bulkProcessingOptions.setMaxMicroBatchSize(MAX_BATCH_SIZE)
.setMaxMicroBatchConcurrency(BULK_OPERATION_CONCURRENCY);
container.processBulkOperations(Flux.fromIterable(cosmosItemOperations), bulkProcessingOptions)
.blockLast(BULK_LOAD_WAIT_DURATION);
LOGGER.info("Completed loading {} documents into [{}:{}]", cosmosItemOperations.size(),
database.getId(),
containerName);
}
/**
* Map the generated data to createItem requests in the underlying container
*
* @param records Data we want to load into the container
* @return List of CosmosItemOperation, each mapping to a createItem for that record
*/
private List<CosmosItemOperation> mapToCosmosItemOperation(final Map<Key, ObjectNode> records) {
return records.entrySet()
.stream()
.map(record -> {
final String partitionKey = record.getKey().getPartitioningKey();
final ObjectNode value = record.getValue();
return BulkOperations.getCreateItemOperation(value, new PartitionKey(partitionKey));
})
.collect(Collectors.toList());
}
/**
* @return Set of Keys representing each document loaded into the test collection
*/
public Set<Key> getLoadedDataKeys() {
return _dataGenerator.getGeneratedKeys();
}
} |
We should not rely on "the last one is running", better filter it by state. So that we can unify the logic just same as batch cancel. | public boolean cancelLoadJob(CancelLoadStmt stmt, boolean isAccurateMatch) throws DdlException {
String dbName = stmt.getDbName();
String label = stmt.getLabel();
Database db = Catalog.getCurrentCatalog().getDb(dbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + dbName);
}
List<LoadJob> loadJobs = Lists.newArrayList();
readLock();
try {
Map<String, List<LoadJob>> labelToLoadJobs = dbLabelToLoadJobs.get(db.getId());
if (labelToLoadJobs == null) {
throw new DdlException("Load job does not exist");
}
List<LoadJob> matchLoadJobs = Lists.newArrayList();
if (isAccurateMatch) {
if (labelToLoadJobs.containsKey(label)) {
matchLoadJobs.addAll(labelToLoadJobs.get(label));
}
} else {
for (Map.Entry<String, List<LoadJob>> entry : labelToLoadJobs.entrySet()) {
if (entry.getKey().contains(label)) {
matchLoadJobs.addAll(entry.getValue());
}
}
}
if (matchLoadJobs.isEmpty()) {
throw new DdlException("Load job does not exist");
}
if (isAccurateMatch) {
LoadJob job = matchLoadJobs.get(matchLoadJobs.size() - 1);
JobState state = job.getState();
if (state == JobState.CANCELLED) {
throw new DdlException("Load job has been cancelled");
} else if (state == JobState.QUORUM_FINISHED || state == JobState.FINISHED) {
throw new DdlException("Load job has been finished");
}
loadJobs.add(job);
} else {
List<LoadJob> uncompletedLoadJob = matchLoadJobs.stream().filter(job -> {
JobState state = job.getState();
return state != JobState.CANCELLED && state != JobState.QUORUM_FINISHED && state != JobState.FINISHED;
}).collect(Collectors.toList());
if (uncompletedLoadJob.isEmpty()) {
throw new DdlException("Load jobs which label like " + stmt.getLabel() +
" have all been cancelled or finished");
}
loadJobs.addAll(uncompletedLoadJob);
}
} finally {
readUnlock();
}
Set<String> tableNames = Sets.newHashSet();
for (LoadJob loadJob : loadJobs) {
tableNames.addAll(loadJob.getTableNames());
}
if (tableNames.isEmpty()) {
if (Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), dbName,
PrivPredicate.LOAD)) {
ErrorReport.reportDdlException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "CANCEL LOAD");
}
} else {
for (String tblName : tableNames) {
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), dbName, tblName,
PrivPredicate.LOAD)) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "CANCEL LOAD",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(), tblName);
}
}
}
for (LoadJob loadJob : loadJobs) {
List<String> failedMsg = Lists.newArrayList();
boolean ok = cancelLoadJob(loadJob, CancelType.USER_CANCEL, "user cancel", failedMsg);
if (!ok) {
throw new DdlException("Cancel load job [" + loadJob.getId() + "] fail, " +
"label=[" + loadJob.getLabel() + "] failed msg=" +
(failedMsg.isEmpty() ? "Unknown reason" : failedMsg.get(0)));
}
}
return true;
} | public boolean cancelLoadJob(CancelLoadStmt stmt, boolean isAccurateMatch) throws DdlException {
String dbName = stmt.getDbName();
String label = stmt.getLabel();
Database db = Catalog.getCurrentCatalog().getDb(dbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + dbName);
}
List<LoadJob> loadJobs = Lists.newArrayList();
readLock();
try {
Map<String, List<LoadJob>> labelToLoadJobs = dbLabelToLoadJobs.get(db.getId());
if (labelToLoadJobs == null) {
throw new DdlException("Load job does not exist");
}
List<LoadJob> matchLoadJobs = Lists.newArrayList();
if (isAccurateMatch) {
if (labelToLoadJobs.containsKey(label)) {
matchLoadJobs.addAll(labelToLoadJobs.get(label));
}
} else {
for (Map.Entry<String, List<LoadJob>> entry : labelToLoadJobs.entrySet()) {
if (entry.getKey().contains(label)) {
matchLoadJobs.addAll(entry.getValue());
}
}
}
if (matchLoadJobs.isEmpty()) {
throw new DdlException("Load job does not exist");
}
List<LoadJob> uncompletedLoadJob = matchLoadJobs.stream().filter(job -> {
JobState state = job.getState();
return state != JobState.CANCELLED && state != JobState.QUORUM_FINISHED && state != JobState.FINISHED;
}).collect(Collectors.toList());
if (uncompletedLoadJob.isEmpty()) {
throw new DdlException("There is no uncompleted job which label " +
(isAccurateMatch ? "is " : "like ") + stmt.getLabel());
}
loadJobs.addAll(uncompletedLoadJob);
} finally {
readUnlock();
}
Set<String> tableNames = Sets.newHashSet();
for (LoadJob loadJob : loadJobs) {
tableNames.addAll(loadJob.getTableNames());
}
if (tableNames.isEmpty()) {
if (Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), dbName,
PrivPredicate.LOAD)) {
ErrorReport.reportDdlException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "CANCEL LOAD");
}
} else {
for (String tblName : tableNames) {
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), dbName, tblName,
PrivPredicate.LOAD)) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "CANCEL LOAD",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(), tblName);
}
}
}
for (LoadJob loadJob : loadJobs) {
List<String> failedMsg = Lists.newArrayList();
boolean ok = cancelLoadJob(loadJob, CancelType.USER_CANCEL, "user cancel", failedMsg);
if (!ok) {
throw new DdlException("Cancel load job [" + loadJob.getId() + "] fail, " +
"label=[" + loadJob.getLabel() + "] failed msg=" +
(failedMsg.isEmpty() ? "Unknown reason" : failedMsg.get(0)));
}
}
return true;
} | class Load {
private static final Logger LOG = LogManager.getLogger(Load.class);
public static final String VERSION = "v1";
private static final Map<JobState, Set<JobState>> STATE_CHANGE_MAP = Maps.newHashMap();
public static DppConfig dppDefaultConfig = null;
public static Map<String, DppConfig> clusterToDppConfig = Maps.newHashMap();
private Map<Long, LoadJob> idToLoadJob;
private Map<Long, List<LoadJob>> dbToLoadJobs;
private Map<Long, Map<String, List<LoadJob>>> dbLabelToLoadJobs;
private Map<Long, LoadJob> idToPendingLoadJob;
private Map<Long, LoadJob> idToEtlLoadJob;
private Map<Long, LoadJob> idToLoadingLoadJob;
private Map<Long, LoadJob> idToQuorumFinishedLoadJob;
private Set<Long> loadingPartitionIds;
private Map<Long, Map<String, Long>> dbToMiniLabels;
private Map<Long, List<DeleteInfo>> dbToDeleteInfos;
private Map<Long, List<LoadJob>> dbToDeleteJobs;
private Set<Long> partitionUnderDelete;
private Map<Long, AsyncDeleteJob> idToQuorumFinishedDeleteJob;
private volatile LoadErrorHub.Param loadErrorHubParam = new LoadErrorHub.Param();
private ReentrantReadWriteLock lock;
static {
Set<JobState> pendingDestStates = Sets.newHashSet();
pendingDestStates.add(JobState.ETL);
pendingDestStates.add(JobState.CANCELLED);
STATE_CHANGE_MAP.put(JobState.PENDING, pendingDestStates);
Set<JobState> etlDestStates = Sets.newHashSet();
etlDestStates.add(JobState.LOADING);
etlDestStates.add(JobState.CANCELLED);
STATE_CHANGE_MAP.put(JobState.ETL, etlDestStates);
Set<JobState> loadingDestStates = Sets.newHashSet();
loadingDestStates.add(JobState.FINISHED);
loadingDestStates.add(JobState.QUORUM_FINISHED);
loadingDestStates.add(JobState.CANCELLED);
STATE_CHANGE_MAP.put(JobState.LOADING, loadingDestStates);
Set<JobState> quorumFinishedDestStates = Sets.newHashSet();
quorumFinishedDestStates.add(JobState.FINISHED);
STATE_CHANGE_MAP.put(JobState.QUORUM_FINISHED, quorumFinishedDestStates);
Gson gson = new Gson();
try {
Map<String, String> defaultConfig =
(HashMap<String, String>) gson.fromJson(Config.dpp_default_config_str, HashMap.class);
dppDefaultConfig = DppConfig.create(defaultConfig);
Map<String, Map<String, String>> clusterToConfig =
(HashMap<String, Map<String, String>>) gson.fromJson(Config.dpp_config_str, HashMap.class);
for (Entry<String, Map<String, String>> entry : clusterToConfig.entrySet()) {
String cluster = entry.getKey();
DppConfig dppConfig = dppDefaultConfig.getCopiedDppConfig();
dppConfig.update(DppConfig.create(entry.getValue()));
dppConfig.check();
clusterToDppConfig.put(cluster, dppConfig);
}
if (!clusterToDppConfig.containsKey(Config.dpp_default_cluster)) {
throw new LoadException("Default cluster not exist");
}
} catch (Throwable e) {
LOG.error("dpp default config ill-formed", e);
System.exit(-1);
}
}
public Load() {
idToLoadJob = Maps.newHashMap();
dbToLoadJobs = Maps.newHashMap();
dbLabelToLoadJobs = Maps.newHashMap();
idToPendingLoadJob = Maps.newLinkedHashMap();
idToEtlLoadJob = Maps.newLinkedHashMap();
idToLoadingLoadJob = Maps.newLinkedHashMap();
idToQuorumFinishedLoadJob = Maps.newLinkedHashMap();
loadingPartitionIds = Sets.newHashSet();
dbToMiniLabels = Maps.newHashMap();
dbToDeleteInfos = Maps.newHashMap();
dbToDeleteJobs = Maps.newHashMap();
partitionUnderDelete = Sets.newHashSet();
idToQuorumFinishedDeleteJob = Maps.newLinkedHashMap();
lock = new ReentrantReadWriteLock(true);
}
public void readLock() {
lock.readLock().lock();
}
public void readUnlock() {
lock.readLock().unlock();
}
private void writeLock() {
lock.writeLock().lock();
}
private void writeUnlock() {
lock.writeLock().unlock();
}
@Deprecated
public boolean addMiniLoadJob(TMiniLoadRequest request) throws DdlException {
String fullDbName = request.getDb();
String tableName = request.getTbl();
String label = request.getLabel();
long timestamp = 0;
if (request.isSetTimestamp()) {
timestamp = request.getTimestamp();
}
TNetworkAddress beAddr = request.getBackend();
String filePathsValue = request.getFiles().get(0);
Map<String, String> params = request.getProperties();
LabelName labelName = new LabelName(fullDbName, label);
if (Strings.isNullOrEmpty(filePathsValue)) {
throw new DdlException("File paths are not specified");
}
List<String> filePaths = Arrays.asList(filePathsValue.split(","));
List<String> partitionNames = null;
List<String> columnNames = null;
ColumnSeparator columnSeparator = null;
List<String> hllColumnPairList = null;
String lineDelimiter = null;
String formatType = null;
if (params != null) {
String specifiedPartitions = params.get(LoadStmt.KEY_IN_PARAM_PARTITIONS);
if (!Strings.isNullOrEmpty(specifiedPartitions)) {
partitionNames = Arrays.asList(specifiedPartitions.split(","));
}
String specifiedColumns = params.get(LoadStmt.KEY_IN_PARAM_COLUMNS);
if (!Strings.isNullOrEmpty(specifiedColumns)) {
columnNames = Arrays.asList(specifiedColumns.split(","));
}
final String hll = params.get(LoadStmt.KEY_IN_PARAM_HLL);
if (!Strings.isNullOrEmpty(hll)) {
hllColumnPairList = Arrays.asList(hll.split(":"));
}
String columnSeparatorStr = params.get(LoadStmt.KEY_IN_PARAM_COLUMN_SEPARATOR);
if (columnSeparatorStr != null) {
if (columnSeparatorStr.isEmpty()) {
columnSeparatorStr = "\t";
}
columnSeparator = new ColumnSeparator(columnSeparatorStr);
try {
columnSeparator.analyze();
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
}
lineDelimiter = params.get(LoadStmt.KEY_IN_PARAM_LINE_DELIMITER);
formatType = params.get(LoadStmt.KEY_IN_PARAM_FORMAT_TYPE);
}
DataDescription dataDescription = new DataDescription(
tableName,
partitionNames != null ? new PartitionNames(false, partitionNames) : null,
filePaths,
columnNames,
columnSeparator,
formatType,
false,
null
);
dataDescription.setLineDelimiter(lineDelimiter);
dataDescription.setBeAddr(beAddr);
if (hllColumnPairList != null) {
for (int i = 0; i < hllColumnPairList.size(); i++) {
final String pairStr = hllColumnPairList.get(i);
final List<String> pairList = Arrays.asList(pairStr.split(","));
if (pairList.size() != 2) {
throw new DdlException("hll param format error");
}
final String resultColumn = pairList.get(0);
final String hashColumn = pairList.get(1);
final Pair<String, List<String>> pair = new Pair<String, List<String>>(FunctionSet.HLL_HASH,
Arrays.asList(hashColumn));
dataDescription.addColumnMapping(resultColumn, pair);
}
}
List<DataDescription> dataDescriptions = Lists.newArrayList(dataDescription);
Map<String, String> properties = Maps.newHashMap();
if (params != null) {
String maxFilterRatio = params.get(LoadStmt.MAX_FILTER_RATIO_PROPERTY);
if (!Strings.isNullOrEmpty(maxFilterRatio)) {
properties.put(LoadStmt.MAX_FILTER_RATIO_PROPERTY, maxFilterRatio);
}
String timeout = params.get(LoadStmt.TIMEOUT_PROPERTY);
if (!Strings.isNullOrEmpty(timeout)) {
properties.put(LoadStmt.TIMEOUT_PROPERTY, timeout);
}
}
LoadStmt stmt = new LoadStmt(labelName, dataDescriptions, null, null, properties);
if (!registerMiniLabel(fullDbName, label, timestamp)) {
return false;
}
try {
addLoadJob(stmt, EtlJobType.MINI, timestamp);
return true;
} finally {
deregisterMiniLabel(fullDbName, label);
}
}
public void addLoadJob(LoadStmt stmt, EtlJobType etlJobType, long timestamp) throws DdlException {
String dbName = stmt.getLabel().getDbName();
Database db = Catalog.getCurrentCatalog().getDb(dbName);
if (db == null) {
throw new DdlException("Database[" + dbName + "] does not exist");
}
LoadJob job = createLoadJob(stmt, etlJobType, db, timestamp);
addLoadJob(job, db);
}
private void addLoadJob(LoadJob job, Database db) throws DdlException {
Catalog.getCurrentSystemInfo().checkClusterCapacity(db.getClusterName());
if (!job.isSyncDeleteJob()) {
db.checkDataSizeQuota();
}
db.readLock();
try {
for (Long tblId : job.getIdToTableLoadInfo().keySet()) {
Table tbl = db.getTable(tblId);
if (tbl != null && tbl.getType() == TableType.OLAP
&& ((OlapTable) tbl).getState() == OlapTableState.RESTORE) {
throw new DdlException("Table " + tbl.getName() + " is in restore process. "
+ "Can not load into it");
}
}
} finally {
db.readUnlock();
}
writeLock();
try {
unprotectAddLoadJob(job, false /* not replay */);
MetricRepo.COUNTER_LOAD_ADD.increase(1L);
Catalog.getCurrentCatalog().getEditLog().logLoadStart(job);
} finally {
writeUnlock();
}
LOG.info("add load job. job: {}", job);
}
private LoadJob createLoadJob(LoadStmt stmt, EtlJobType etlJobType,
Database db, long timestamp) throws DdlException {
String label = stmt.getLabel().getLabelName();
List<DataDescription> dataDescriptions = stmt.getDataDescriptions();
Map<String, String> properties = stmt.getProperties();
try {
FeNameFormat.checkLabel(label);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
if (dataDescriptions == null || dataDescriptions.isEmpty()) {
throw new DdlException("No data file in load statement.");
}
LoadJob job = new LoadJob(label);
job.setEtlJobType(etlJobType);
job.setDbId(db.getId());
job.setTimestamp(timestamp);
job.setBrokerDesc(stmt.getBrokerDesc());
if (ConnectContext.get() != null) {
job.setResourceInfo(ConnectContext.get().toResourceCtx());
job.setExecMemLimit(ConnectContext.get().getSessionVariable().getMaxExecMemByte());
}
if (properties != null) {
if (properties.containsKey(LoadStmt.TIMEOUT_PROPERTY)) {
try {
job.setTimeoutSecond(Integer.parseInt(properties.get(LoadStmt.TIMEOUT_PROPERTY)));
} catch (NumberFormatException e) {
throw new DdlException("Timeout is not INT", e);
}
}
if (properties.containsKey(LoadStmt.MAX_FILTER_RATIO_PROPERTY)) {
try {
job.setMaxFilterRatio(Double.parseDouble(properties.get(LoadStmt.MAX_FILTER_RATIO_PROPERTY)));
} catch (NumberFormatException e) {
throw new DdlException("Max filter ratio is not DOUBLE", e);
}
}
if (properties.containsKey(LoadStmt.LOAD_DELETE_FLAG_PROPERTY)) {
throw new DdlException("Do not support load_delete_flag");
}
if (properties.containsKey(LoadStmt.EXEC_MEM_LIMIT)) {
try {
job.setExecMemLimit(Long.parseLong(properties.get(LoadStmt.EXEC_MEM_LIMIT)));
} catch (NumberFormatException e) {
throw new DdlException("Execute memory limit is not Long", e);
}
}
}
Map<Long, TableLoadInfo> idToTableLoadInfo = Maps.newHashMap();
Map<Long, Map<Long, List<Source>>> tableToPartitionSources = Maps.newHashMap();
for (DataDescription dataDescription : dataDescriptions) {
checkAndCreateSource(db, dataDescription, tableToPartitionSources, etlJobType);
job.addTableName(dataDescription.getTableName());
}
for (Entry<Long, Map<Long, List<Source>>> tableEntry : tableToPartitionSources.entrySet()) {
long tableId = tableEntry.getKey();
Map<Long, List<Source>> partitionToSources = tableEntry.getValue();
Map<Long, PartitionLoadInfo> idToPartitionLoadInfo = Maps.newHashMap();
for (Entry<Long, List<Source>> partitionEntry : partitionToSources.entrySet()) {
PartitionLoadInfo info = new PartitionLoadInfo(partitionEntry.getValue());
idToPartitionLoadInfo.put(partitionEntry.getKey(), info);
}
idToTableLoadInfo.put(tableId, new TableLoadInfo(idToPartitionLoadInfo));
}
job.setIdToTableLoadInfo(idToTableLoadInfo);
if (etlJobType == EtlJobType.BROKER) {
BrokerFileGroupAggInfo sourceInfo = new BrokerFileGroupAggInfo();
for (DataDescription dataDescription : dataDescriptions) {
BrokerFileGroup fileGroup = new BrokerFileGroup(dataDescription);
fileGroup.parse(db, dataDescription);
sourceInfo.addFileGroup(fileGroup);
}
job.setPullLoadSourceInfo(sourceInfo);
LOG.info("source info is {}", sourceInfo);
}
if (etlJobType == EtlJobType.MINI) {
Map<Long, MiniEtlTaskInfo> idToEtlTask = Maps.newHashMap();
long etlTaskId = 0;
db.readLock();
try {
for (DataDescription dataDescription : dataDescriptions) {
String tableName = dataDescription.getTableName();
OlapTable table = (OlapTable) db.getTable(tableName);
if (table == null) {
throw new DdlException("Table[" + tableName + "] does not exist");
}
TNetworkAddress beAddress = dataDescription.getBeAddr();
Backend backend = Catalog.getCurrentSystemInfo().getBackendWithBePort(beAddress.getHostname(),
beAddress.getPort());
if (!Catalog.getCurrentSystemInfo().checkBackendAvailable(backend.getId())) {
throw new DdlException("Etl backend is null or not available");
}
MiniEtlTaskInfo taskInfo = new MiniEtlTaskInfo(etlTaskId++, backend.getId(), table.getId());
idToEtlTask.put(taskInfo.getId(), taskInfo);
}
} finally {
db.readUnlock();
}
job.setMiniEtlTasks(idToEtlTask);
job.setPrority(TPriority.HIGH);
if (job.getTimeoutSecond() == 0) {
job.setTimeoutSecond(Config.mini_load_default_timeout_second);
}
} else if (etlJobType == EtlJobType.HADOOP) {
DppConfig dppConfig = dppDefaultConfig.getCopiedDppConfig();
String cluster = stmt.getCluster();
if (cluster == null && properties != null) {
cluster = properties.get(LoadStmt.CLUSTER_PROPERTY);
}
Pair<String, DppConfig> clusterInfo = Catalog.getCurrentCatalog().getAuth().getLoadClusterInfo(
stmt.getUser(), cluster);
cluster = clusterInfo.first;
DppConfig clusterConfig = clusterInfo.second;
if (cluster == null || clusterConfig == null) {
if (cluster == null) {
cluster = Config.dpp_default_cluster;
}
clusterConfig = clusterToDppConfig.get(cluster);
if (clusterConfig == null) {
throw new DdlException("Load cluster[" + cluster + "] does not exist");
}
}
dppConfig.update(clusterConfig);
try {
dppConfig.updateHadoopConfigs(properties);
dppConfig.check();
job.setClusterInfo(cluster, dppConfig);
job.setPrority(dppConfig.getPriority());
} catch (LoadException e) {
throw new DdlException(e.getMessage());
}
if (job.getTimeoutSecond() == 0) {
job.setTimeoutSecond(Config.hadoop_load_default_timeout_second);
}
} else if (etlJobType == EtlJobType.BROKER) {
if (job.getTimeoutSecond() == 0) {
job.setTimeoutSecond(Config.broker_load_default_timeout_second);
}
} else if (etlJobType == EtlJobType.INSERT) {
job.setPrority(TPriority.HIGH);
if (job.getTimeoutSecond() == 0) {
job.setTimeoutSecond(Config.insert_load_default_timeout_second);
}
}
job.setId(Catalog.getCurrentCatalog().getNextId());
return job;
}
/*
* This is only used for hadoop load
*/
public static void checkAndCreateSource(Database db, DataDescription dataDescription,
Map<Long, Map<Long, List<Source>>> tableToPartitionSources, EtlJobType jobType) throws DdlException {
Source source = new Source(dataDescription.getFilePaths());
long tableId = -1;
Set<Long> sourcePartitionIds = Sets.newHashSet();
String tableName = dataDescription.getTableName();
Map<String, Pair<String, List<String>>> columnToFunction = null;
db.readLock();
try {
Table table = db.getTable(tableName);
if (table == null) {
throw new DdlException("Table [" + tableName + "] does not exist");
}
tableId = table.getId();
if (table.getType() != TableType.OLAP) {
throw new DdlException("Table [" + tableName + "] is not olap table");
}
if (((OlapTable) table).getPartitionInfo().isMultiColumnPartition() && jobType == EtlJobType.HADOOP) {
throw new DdlException("Load by hadoop cluster does not support table with multi partition columns."
+ " Table: " + table.getName() + ". Try using broker load. See 'help broker load;'");
}
if (dataDescription.getPartitionNames() != null &&
((OlapTable) table).getPartitionInfo().getType() == PartitionType.UNPARTITIONED) {
ErrorReport.reportDdlException(ErrorCode.ERR_PARTITION_CLAUSE_NO_ALLOWED);
}
if (((OlapTable) table).getState() == OlapTableState.RESTORE) {
throw new DdlException("Table [" + tableName + "] is under restore");
}
if (((OlapTable) table).getKeysType() != KeysType.AGG_KEYS && dataDescription.isNegative()) {
throw new DdlException("Load for AGG_KEYS table should not specify NEGATIVE");
}
List<Column> baseSchema = table.getBaseSchema(false);
dataDescription.fillColumnInfoIfNotSpecified(baseSchema);
List<String> columnNames = Lists.newArrayList();
List<String> assignColumnNames = Lists.newArrayList();
if (dataDescription.getFileFieldNames() != null) {
assignColumnNames.addAll(dataDescription.getFileFieldNames());
if (dataDescription.getColumnsFromPath() != null) {
assignColumnNames.addAll(dataDescription.getColumnsFromPath());
}
}
if (assignColumnNames.isEmpty()) {
for (Column column : baseSchema) {
columnNames.add(column.getName());
}
} else {
for (String assignCol : assignColumnNames) {
if (table.getColumn(assignCol) != null) {
columnNames.add(table.getColumn(assignCol).getName());
} else {
columnNames.add(assignCol);
}
}
}
source.setColumnNames(columnNames);
Map<String, Pair<String, List<String>>> columnToHadoopFunction = dataDescription.getColumnToHadoopFunction();
List<ImportColumnDesc> parsedColumnExprList = dataDescription.getParsedColumnExprList();
Map<String, Expr> parsedColumnExprMap = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
for (ImportColumnDesc importColumnDesc : parsedColumnExprList) {
parsedColumnExprMap.put(importColumnDesc.getColumnName(), importColumnDesc.getExpr());
}
for (Column column : baseSchema) {
String columnName = column.getName();
if (columnNames.contains(columnName)) {
continue;
}
if (parsedColumnExprMap.containsKey(columnName)) {
continue;
}
if (column.getDefaultValue() != null || column.isAllowNull()) {
continue;
}
throw new DdlException("Column has no default value. column: " + columnName);
}
if (dataDescription.isNegative()) {
for (Column column : baseSchema) {
if (!column.isKey() && column.getAggregationType() != AggregateType.SUM) {
throw new DdlException("Column is not SUM AggreateType. column:" + column.getName());
}
}
}
for (Column column : baseSchema) {
if (column.getDataType() == PrimitiveType.HLL) {
if (columnToHadoopFunction != null && !columnToHadoopFunction.containsKey(column.getName())) {
throw new DdlException("Hll column is not assigned. column:" + column.getName());
}
}
}
for (Column column : table.getFullSchema()) {
if (column.isNameWithPrefix(SchemaChangeHandler.SHADOW_NAME_PRFIX)) {
String originCol = column.getNameWithoutPrefix(SchemaChangeHandler.SHADOW_NAME_PRFIX);
if (parsedColumnExprMap.containsKey(originCol)) {
Expr mappingExpr = parsedColumnExprMap.get(originCol);
if (mappingExpr != null) {
/*
* eg:
* (A, C) SET (B = func(xx))
* ->
* (A, C) SET (B = func(xx), __doris_shadow_B = func(xxx))
*/
if (columnToHadoopFunction.containsKey(originCol)) {
columnToHadoopFunction.put(column.getName(), columnToHadoopFunction.get(originCol));
}
ImportColumnDesc importColumnDesc = new ImportColumnDesc(column.getName(), mappingExpr);
parsedColumnExprList.add(importColumnDesc);
} else {
/*
* eg:
* (A, B, C)
* ->
* (A, B, C) SET (__doris_shadow_B = substitute(B))
*/
columnToHadoopFunction.put(column.getName(), Pair.create("substitute", Lists.newArrayList(originCol)));
ImportColumnDesc importColumnDesc = new ImportColumnDesc(column.getName(), new SlotRef(null, originCol));
parsedColumnExprList.add(importColumnDesc);
}
} else {
/*
* There is a case that if user does not specify the related origin column, eg:
* COLUMNS (A, C), and B is not specified, but B is being modified so there is a shadow column '__doris_shadow_B'.
* We can not just add a mapping function "__doris_shadow_B = substitute(B)", because Doris can not find column B.
* In this case, __doris_shadow_B can use its default value, so no need to add it to column mapping
*/
}
}
}
LOG.debug("after add shadow column. parsedColumnExprList: {}, columnToHadoopFunction: {}",
parsedColumnExprList, columnToHadoopFunction);
Map<String, String> columnNameMap = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
for (String columnName : columnNames) {
columnNameMap.put(columnName, columnName);
}
if (columnToHadoopFunction != null) {
columnToFunction = Maps.newHashMap();
for (Entry<String, Pair<String, List<String>>> entry : columnToHadoopFunction.entrySet()) {
String mappingColumnName = entry.getKey();
Column mappingColumn = table.getColumn(mappingColumnName);
if (mappingColumn == null) {
throw new DdlException("Mapping column is not in table. column: " + mappingColumnName);
}
Pair<String, List<String>> function = entry.getValue();
try {
DataDescription.validateMappingFunction(function.first, function.second, columnNameMap,
mappingColumn, dataDescription.isHadoopLoad());
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
columnToFunction.put(mappingColumn.getName(), function);
}
}
OlapTable olapTable = (OlapTable) table;
PartitionNames partitionNames = dataDescription.getPartitionNames();
if (partitionNames == null) {
for (Partition partition : olapTable.getPartitions()) {
sourcePartitionIds.add(partition.getId());
}
} else {
for (String partitionName : partitionNames.getPartitionNames()) {
Partition partition = olapTable.getPartition(partitionName, partitionNames.isTemp());
if (partition == null) {
throw new DdlException("Partition [" + partitionName + "] does not exist");
}
sourcePartitionIds.add(partition.getId());
}
}
} finally {
db.readUnlock();
}
String columnSeparator = dataDescription.getColumnSeparator();
if (!Strings.isNullOrEmpty(columnSeparator)) {
source.setColumnSeparator(columnSeparator);
}
String lineDelimiter = dataDescription.getLineDelimiter();
if (!Strings.isNullOrEmpty(lineDelimiter)) {
source.setLineDelimiter(lineDelimiter);
}
source.setNegative(dataDescription.isNegative());
if (columnToFunction != null) {
source.setColumnToFunction(columnToFunction);
}
Map<Long, List<Source>> partitionToSources = null;
if (tableToPartitionSources.containsKey(tableId)) {
partitionToSources = tableToPartitionSources.get(tableId);
} else {
partitionToSources = Maps.newHashMap();
tableToPartitionSources.put(tableId, partitionToSources);
}
for (long partitionId : sourcePartitionIds) {
List<Source> sources = null;
if (partitionToSources.containsKey(partitionId)) {
sources = partitionToSources.get(partitionId);
} else {
sources = new ArrayList<Source>();
partitionToSources.put(partitionId, sources);
}
sources.add(source);
}
}
/**
* When doing schema change, there may have some 'shadow' columns, with prefix '__doris_shadow_' in
* their names. These columns are invisible to user, but we need to generate data for these columns.
* So we add column mappings for these column.
* eg1:
* base schema is (A, B, C), and B is under schema change, so there will be a shadow column: '__doris_shadow_B'
* So the final column mapping should looks like: (A, B, C, __doris_shadow_B = substitute(B));
*/
public static List<ImportColumnDesc> getSchemaChangeShadowColumnDesc(Table tbl, Map<String, Expr> columnExprMap) {
List<ImportColumnDesc> shadowColumnDescs = Lists.newArrayList();
for (Column column : tbl.getFullSchema()) {
if (!column.isNameWithPrefix(SchemaChangeHandler.SHADOW_NAME_PRFIX)) {
continue;
}
String originCol = column.getNameWithoutPrefix(SchemaChangeHandler.SHADOW_NAME_PRFIX);
if (columnExprMap.containsKey(originCol)) {
Expr mappingExpr = columnExprMap.get(originCol);
if (mappingExpr != null) {
/*
* eg:
* (A, C) SET (B = func(xx))
* ->
* (A, C) SET (B = func(xx), __doris_shadow_B = func(xx))
*/
ImportColumnDesc importColumnDesc = new ImportColumnDesc(column.getName(), mappingExpr);
shadowColumnDescs.add(importColumnDesc);
} else {
/*
* eg:
* (A, B, C)
* ->
* (A, B, C) SET (__doris_shadow_B = B)
*/
ImportColumnDesc importColumnDesc = new ImportColumnDesc(column.getName(),
new SlotRef(null, originCol));
shadowColumnDescs.add(importColumnDesc);
}
} else {
/*
* There is a case that if user does not specify the related origin column, eg:
* COLUMNS (A, C), and B is not specified, but B is being modified so there is a shadow column '__doris_shadow_B'.
* We can not just add a mapping function "__doris_shadow_B = substitute(B)", because Doris can not find column B.
* In this case, __doris_shadow_B can use its default value, so no need to add it to column mapping
*/
}
}
return shadowColumnDescs;
}
/*
* used for spark load job
* not init slot desc and analyze exprs
*/
public static void initColumns(Table tbl, List<ImportColumnDesc> columnExprs,
Map<String, Pair<String, List<String>>> columnToHadoopFunction) throws UserException {
initColumns(tbl, columnExprs, columnToHadoopFunction, null, null, null, null, null, false);
}
/*
* This function should be used for broker load v2 and stream load.
* And it must be called in same db lock when planing.
*/
public static void initColumns(Table tbl, List<ImportColumnDesc> columnExprs,
Map<String, Pair<String, List<String>>> columnToHadoopFunction,
Map<String, Expr> exprsByName, Analyzer analyzer, TupleDescriptor srcTupleDesc,
Map<String, SlotDescriptor> slotDescByName, TBrokerScanRangeParams params) throws UserException {
initColumns(tbl, columnExprs, columnToHadoopFunction, exprsByName, analyzer,
srcTupleDesc, slotDescByName, params, true);
}
/*
* This function will do followings:
* 1. fill the column exprs if user does not specify any column or column mapping.
* 2. For not specified columns, check if they have default value.
* 3. Add any shadow columns if have.
* 4. validate hadoop functions
* 5. init slot descs and expr map for load plan
*/
public static void initColumns(Table tbl, List<ImportColumnDesc> columnExprs,
Map<String, Pair<String, List<String>>> columnToHadoopFunction,
Map<String, Expr> exprsByName, Analyzer analyzer, TupleDescriptor srcTupleDesc,
Map<String, SlotDescriptor> slotDescByName, TBrokerScanRangeParams params,
boolean needInitSlotAndAnalyzeExprs) throws UserException {
for (ImportColumnDesc importColumnDesc : columnExprs) {
if (importColumnDesc.isColumn()) {
continue;
}
String mappingColumnName = importColumnDesc.getColumnName();
if (tbl.getColumn(mappingColumnName) == null) {
throw new DdlException("Mapping column is not in table. column: " + mappingColumnName);
}
}
List<ImportColumnDesc> copiedColumnExprs = Lists.newArrayList(columnExprs);
boolean specifyFileFieldNames = copiedColumnExprs.stream().anyMatch(p -> p.isColumn());
if (!specifyFileFieldNames) {
List<Column> columns = tbl.getBaseSchema(false);
for (Column column : columns) {
ImportColumnDesc columnDesc = new ImportColumnDesc(column.getName());
LOG.debug("add base column {} to stream load task", column.getName());
copiedColumnExprs.add(columnDesc);
}
}
Map<String, Expr> columnExprMap = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
for (ImportColumnDesc importColumnDesc : copiedColumnExprs) {
columnExprMap.put(importColumnDesc.getColumnName(), importColumnDesc.getExpr());
}
for (Column column : tbl.getBaseSchema()) {
String columnName = column.getName();
if (columnExprMap.containsKey(columnName)) {
continue;
}
if (column.getDefaultValue() != null || column.isAllowNull()) {
continue;
}
throw new DdlException("Column has no default value. column: " + columnName);
}
copiedColumnExprs.addAll(getSchemaChangeShadowColumnDesc(tbl, columnExprMap));
if (columnToHadoopFunction != null) {
Map<String, String> columnNameMap = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
for (ImportColumnDesc importColumnDesc : copiedColumnExprs) {
if (importColumnDesc.isColumn()) {
columnNameMap.put(importColumnDesc.getColumnName(), importColumnDesc.getColumnName());
}
}
for (Entry<String, Pair<String, List<String>>> entry : columnToHadoopFunction.entrySet()) {
String mappingColumnName = entry.getKey();
Column mappingColumn = tbl.getColumn(mappingColumnName);
Pair<String, List<String>> function = entry.getValue();
try {
DataDescription.validateMappingFunction(function.first, function.second, columnNameMap,
mappingColumn, false);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
}
}
if (!needInitSlotAndAnalyzeExprs) {
return;
}
for (ImportColumnDesc importColumnDesc : copiedColumnExprs) {
String columnName = importColumnDesc.getColumnName();
String realColName = tbl.getColumn(columnName) == null ? columnName
: tbl.getColumn(columnName).getName();
if (importColumnDesc.getExpr() != null) {
Expr expr = transformHadoopFunctionExpr(tbl, realColName, importColumnDesc.getExpr());
exprsByName.put(realColName, expr);
} else {
SlotDescriptor slotDesc = analyzer.getDescTbl().addSlotDescriptor(srcTupleDesc);
slotDesc.setType(ScalarType.createType(PrimitiveType.VARCHAR));
slotDesc.setIsMaterialized(true);
slotDesc.setIsNullable(true);
slotDesc.setColumn(new Column(realColName, PrimitiveType.VARCHAR));
params.addToSrcSlotIds(slotDesc.getId().asInt());
slotDescByName.put(realColName, slotDesc);
}
}
/*
* The extension column of the materialized view is added to the expression evaluation of load
* To avoid nested expressions. eg : column(a, tmp_c, c = expr(tmp_c)) ,
* __doris_materialized_view_bitmap_union_c need be analyzed after exprsByName
* So the columns of the materialized view are stored separately here
*/
Map<String, Expr> mvDefineExpr = Maps.newHashMap();
for (Column column : tbl.getFullSchema()) {
if (column.getDefineExpr() != null) {
mvDefineExpr.put(column.getName(), column.getDefineExpr());
}
}
LOG.debug("slotDescByName: {}, exprsByName: {}, mvDefineExpr: {}", slotDescByName, exprsByName, mvDefineExpr);
for (Map.Entry<String, Expr> entry : exprsByName.entrySet()) {
ExprSubstitutionMap smap = new ExprSubstitutionMap();
List<SlotRef> slots = Lists.newArrayList();
entry.getValue().collect(SlotRef.class, slots);
for (SlotRef slot : slots) {
SlotDescriptor slotDesc = slotDescByName.get(slot.getColumnName());
if (slotDesc == null) {
throw new UserException("unknown reference column, column=" + entry.getKey()
+ ", reference=" + slot.getColumnName());
}
smap.getLhs().add(slot);
smap.getRhs().add(new SlotRef(slotDesc));
}
Expr expr = entry.getValue().clone(smap);
expr.analyze(analyzer);
List<FunctionCallExpr> funcs = Lists.newArrayList();
expr.collect(FunctionCallExpr.class, funcs);
for (FunctionCallExpr fn : funcs) {
if (fn.isAggregateFunction()) {
throw new AnalysisException("Don't support aggregation function in load expression");
}
}
exprsByName.put(entry.getKey(), expr);
}
for (Map.Entry<String, Expr> entry : mvDefineExpr.entrySet()) {
ExprSubstitutionMap smap = new ExprSubstitutionMap();
List<SlotRef> slots = Lists.newArrayList();
entry.getValue().collect(SlotRef.class, slots);
for (SlotRef slot : slots) {
if (slotDescByName.get(slot.getColumnName()) != null) {
smap.getLhs().add(slot);
smap.getRhs().add(new CastExpr(tbl.getColumn(slot.getColumnName()).getType(),
new SlotRef(slotDescByName.get(slot.getColumnName()))));
} else if (exprsByName.get(slot.getColumnName()) != null) {
smap.getLhs().add(slot);
smap.getRhs().add(new CastExpr(tbl.getColumn(slot.getColumnName()).getType(),
exprsByName.get(slot.getColumnName())));
} else {
throw new UserException("unknown reference column, column=" + entry.getKey()
+ ", reference=" + slot.getColumnName());
}
}
Expr expr = entry.getValue().clone(smap);
expr.analyze(analyzer);
exprsByName.put(entry.getKey(), expr);
}
LOG.debug("after init column, exprMap: {}", exprsByName);
}
/**
* This method is used to transform hadoop function.
* The hadoop function includes: replace_value, strftime, time_format, alignment_timestamp, default_value, now.
* It rewrites those function with real function name and param.
* For the other function, the expr only go through this function and the origin expr is returned.
*
* @param columnName
* @param originExpr
* @return
* @throws UserException
*/
private static Expr transformHadoopFunctionExpr(Table tbl, String columnName, Expr originExpr)
throws UserException {
Column column = tbl.getColumn(columnName);
if (column == null) {
return originExpr;
}
if (originExpr instanceof FunctionCallExpr) {
FunctionCallExpr funcExpr = (FunctionCallExpr) originExpr;
String funcName = funcExpr.getFnName().getFunction();
if (funcName.equalsIgnoreCase("replace_value")) {
List<Expr> exprs = Lists.newArrayList();
SlotRef slotRef = new SlotRef(null, columnName);
/*
* We will convert this based on different cases:
* case 1: k1 = replace_value(null, anyval);
* to: k1 = if (k1 is not null, k1, anyval);
*
* case 2: k1 = replace_value(anyval1, anyval2);
* to: k1 = if (k1 is not null, if(k1 != anyval1, k1, anyval2), null);
*/
if (funcExpr.getChild(0) instanceof NullLiteral) {
exprs.add(new IsNullPredicate(slotRef, true));
exprs.add(slotRef);
if (funcExpr.hasChild(1)) {
exprs.add(funcExpr.getChild(1));
} else {
if (column.getDefaultValue() != null) {
exprs.add(new StringLiteral(column.getDefaultValue()));
} else {
if (column.isAllowNull()) {
exprs.add(NullLiteral.create(Type.VARCHAR));
} else {
throw new UserException("Column(" + columnName + ") has no default value.");
}
}
}
} else {
exprs.add(new IsNullPredicate(slotRef, true));
List<Expr> innerIfExprs = Lists.newArrayList();
innerIfExprs.add(new BinaryPredicate(BinaryPredicate.Operator.NE, slotRef, funcExpr.getChild(0)));
innerIfExprs.add(slotRef);
if (funcExpr.hasChild(1)) {
innerIfExprs.add(funcExpr.getChild(1));
} else {
if (column.getDefaultValue() != null) {
innerIfExprs.add(new StringLiteral(column.getDefaultValue()));
} else {
if (column.isAllowNull()) {
innerIfExprs.add(NullLiteral.create(Type.VARCHAR));
} else {
throw new UserException("Column(" + columnName + ") has no default value.");
}
}
}
FunctionCallExpr innerIfFn = new FunctionCallExpr("if", innerIfExprs);
exprs.add(innerIfFn);
exprs.add(NullLiteral.create(Type.VARCHAR));
}
LOG.debug("replace_value expr: {}", exprs);
FunctionCallExpr newFn = new FunctionCallExpr("if", exprs);
return newFn;
} else if (funcName.equalsIgnoreCase("strftime")) {
FunctionName fromUnixName = new FunctionName("FROM_UNIXTIME");
List<Expr> fromUnixArgs = Lists.newArrayList(funcExpr.getChild(1));
FunctionCallExpr fromUnixFunc = new FunctionCallExpr(
fromUnixName, new FunctionParams(false, fromUnixArgs));
return fromUnixFunc;
} else if (funcName.equalsIgnoreCase("time_format")) {
FunctionName strToDateName = new FunctionName("STR_TO_DATE");
List<Expr> strToDateExprs = Lists.newArrayList(funcExpr.getChild(2), funcExpr.getChild(1));
FunctionCallExpr strToDateFuncExpr = new FunctionCallExpr(
strToDateName, new FunctionParams(false, strToDateExprs));
FunctionName dateFormatName = new FunctionName("DATE_FORMAT");
List<Expr> dateFormatArgs = Lists.newArrayList(strToDateFuncExpr, funcExpr.getChild(0));
FunctionCallExpr dateFormatFunc = new FunctionCallExpr(
dateFormatName, new FunctionParams(false, dateFormatArgs));
return dateFormatFunc;
} else if (funcName.equalsIgnoreCase("alignment_timestamp")) {
/*
* change to:
* UNIX_TIMESTAMP(DATE_FORMAT(FROM_UNIXTIME(ts), "%Y-01-01 00:00:00"));
*
*/
FunctionName fromUnixName = new FunctionName("FROM_UNIXTIME");
List<Expr> fromUnixArgs = Lists.newArrayList(funcExpr.getChild(1));
FunctionCallExpr fromUnixFunc = new FunctionCallExpr(
fromUnixName, new FunctionParams(false, fromUnixArgs));
StringLiteral precision = (StringLiteral) funcExpr.getChild(0);
StringLiteral format;
if (precision.getStringValue().equalsIgnoreCase("year")) {
format = new StringLiteral("%Y-01-01 00:00:00");
} else if (precision.getStringValue().equalsIgnoreCase("month")) {
format = new StringLiteral("%Y-%m-01 00:00:00");
} else if (precision.getStringValue().equalsIgnoreCase("day")) {
format = new StringLiteral("%Y-%m-%d 00:00:00");
} else if (precision.getStringValue().equalsIgnoreCase("hour")) {
format = new StringLiteral("%Y-%m-%d %H:00:00");
} else {
throw new UserException("Unknown precision(" + precision.getStringValue() + ")");
}
FunctionName dateFormatName = new FunctionName("DATE_FORMAT");
List<Expr> dateFormatArgs = Lists.newArrayList(fromUnixFunc, format);
FunctionCallExpr dateFormatFunc = new FunctionCallExpr(
dateFormatName, new FunctionParams(false, dateFormatArgs));
FunctionName unixTimeName = new FunctionName("UNIX_TIMESTAMP");
List<Expr> unixTimeArgs = Lists.newArrayList();
unixTimeArgs.add(dateFormatFunc);
FunctionCallExpr unixTimeFunc = new FunctionCallExpr(
unixTimeName, new FunctionParams(false, unixTimeArgs));
return unixTimeFunc;
} else if (funcName.equalsIgnoreCase("default_value")) {
return funcExpr.getChild(0);
} else if (funcName.equalsIgnoreCase("now")) {
FunctionName nowFunctionName = new FunctionName("NOW");
FunctionCallExpr newFunc = new FunctionCallExpr(nowFunctionName, new FunctionParams(null));
return newFunc;
} else if (funcName.equalsIgnoreCase("substitute")) {
return funcExpr.getChild(0);
}
}
return originExpr;
}
public void unprotectAddLoadJob(LoadJob job, boolean isReplay) throws DdlException {
long jobId = job.getId();
long dbId = job.getDbId();
String label = job.getLabel();
if (!isReplay && getAllUnfinishedLoadJob() > Config.max_unfinished_load_job) {
throw new DdlException(
"Number of unfinished load jobs exceed the max number: " + Config.max_unfinished_load_job);
}
if (!job.isSyncDeleteJob()) {
boolean checkMini = true;
if (job.getEtlJobType() == EtlJobType.MINI) {
checkMini = false;
}
unprotectIsLabelUsed(dbId, label, -1, checkMini);
Map<String, List<LoadJob>> labelToLoadJobs = null;
if (dbLabelToLoadJobs.containsKey(dbId)) {
labelToLoadJobs = dbLabelToLoadJobs.get(dbId);
} else {
labelToLoadJobs = Maps.newHashMap();
dbLabelToLoadJobs.put(dbId, labelToLoadJobs);
}
List<LoadJob> labelLoadJobs = null;
if (labelToLoadJobs.containsKey(label)) {
labelLoadJobs = labelToLoadJobs.get(label);
} else {
labelLoadJobs = Lists.newArrayList();
labelToLoadJobs.put(label, labelLoadJobs);
}
List<LoadJob> dbLoadJobs = null;
if (dbToLoadJobs.containsKey(dbId)) {
dbLoadJobs = dbToLoadJobs.get(dbId);
} else {
dbLoadJobs = Lists.newArrayList();
dbToLoadJobs.put(dbId, dbLoadJobs);
}
idToLoadJob.put(jobId, job);
dbLoadJobs.add(job);
labelLoadJobs.add(job);
} else {
List<LoadJob> dbDeleteJobs = null;
if (dbToDeleteJobs.containsKey(dbId)) {
dbDeleteJobs = dbToDeleteJobs.get(dbId);
} else {
dbDeleteJobs = Lists.newArrayList();
dbToDeleteJobs.put(dbId, dbDeleteJobs);
}
idToLoadJob.put(jobId, job);
dbDeleteJobs.add(job);
}
switch (job.getState()) {
case PENDING:
idToPendingLoadJob.put(jobId, job);
break;
case ETL:
idToEtlLoadJob.put(jobId, job);
break;
case LOADING:
idToLoadingLoadJob.put(jobId, job);
recoverLoadingPartitions(job);
break;
case QUORUM_FINISHED:
idToQuorumFinishedLoadJob.put(jobId, job);
break;
case FINISHED:
break;
case CANCELLED:
break;
default:
Preconditions.checkNotNull(null, "Should not be here");
}
}
private long getAllUnfinishedLoadJob() {
return idToPendingLoadJob.size() + idToEtlLoadJob.size() + idToLoadingLoadJob.size()
+ idToQuorumFinishedLoadJob.size();
}
public void replayAddLoadJob(LoadJob job) throws DdlException {
writeLock();
try {
unprotectAddLoadJob(job, true /* replay */);
} finally {
writeUnlock();
}
}
public void unprotectEtlLoadJob(LoadJob job) {
long jobId = job.getId();
idToPendingLoadJob.remove(jobId);
idToEtlLoadJob.put(jobId, job);
replaceLoadJob(job);
}
public void replayEtlLoadJob(LoadJob job) throws DdlException {
writeLock();
try {
unprotectEtlLoadJob(job);
} finally {
writeUnlock();
}
}
public void unprotectLoadingLoadJob(LoadJob job) {
long jobId = job.getId();
idToEtlLoadJob.remove(jobId);
idToLoadingLoadJob.put(jobId, job);
recoverLoadingPartitions(job);
replaceLoadJob(job);
}
public void replayLoadingLoadJob(LoadJob job) throws DdlException {
writeLock();
try {
unprotectLoadingLoadJob(job);
} finally {
writeUnlock();
}
}
public boolean registerMiniLabel(String fullDbName, String label, long timestamp) throws DdlException {
Database db = Catalog.getCurrentCatalog().getDb(fullDbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + fullDbName);
}
long dbId = db.getId();
writeLock();
try {
if (unprotectIsLabelUsed(dbId, label, timestamp, true)) {
return false;
}
Map<String, Long> miniLabels = null;
if (dbToMiniLabels.containsKey(dbId)) {
miniLabels = dbToMiniLabels.get(dbId);
} else {
miniLabels = Maps.newHashMap();
dbToMiniLabels.put(dbId, miniLabels);
}
miniLabels.put(label, timestamp);
return true;
} finally {
writeUnlock();
}
}
public void deregisterMiniLabel(String fullDbName, String label) throws DdlException {
Database db = Catalog.getCurrentCatalog().getDb(fullDbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + fullDbName);
}
long dbId = db.getId();
writeLock();
try {
if (!dbToMiniLabels.containsKey(dbId)) {
return;
}
Map<String, Long> miniLabels = dbToMiniLabels.get(dbId);
miniLabels.remove(label);
if (miniLabels.isEmpty()) {
dbToMiniLabels.remove(dbId);
}
} finally {
writeUnlock();
}
}
public boolean isLabelUsed(long dbId, String label) throws DdlException {
readLock();
try {
return unprotectIsLabelUsed(dbId, label, -1, true);
} finally {
readUnlock();
}
}
/*
* 1. if label is already used, and this is not a retry request,
* throw exception ("Label already used")
* 2. if label is already used, but this is a retry request,
* return true
* 3. if label is not used, return false
* 4. throw exception if encounter error.
*/
private boolean unprotectIsLabelUsed(long dbId, String label, long timestamp, boolean checkMini)
throws DdlException {
if (dbLabelToLoadJobs.containsKey(dbId)) {
Map<String, List<LoadJob>> labelToLoadJobs = dbLabelToLoadJobs.get(dbId);
if (labelToLoadJobs.containsKey(label)) {
List<LoadJob> labelLoadJobs = labelToLoadJobs.get(label);
for (LoadJob oldJob : labelLoadJobs) {
JobState oldJobState = oldJob.getState();
if (oldJobState != JobState.CANCELLED) {
if (timestamp == -1) {
throw new LabelAlreadyUsedException(label);
} else {
if (timestamp == oldJob.getTimestamp()) {
LOG.info("get a retry request with label: {}, timestamp: {}. return ok",
label, timestamp);
return true;
} else {
throw new LabelAlreadyUsedException(label);
}
}
}
}
}
}
if (checkMini) {
if (dbToMiniLabels.containsKey(dbId)) {
Map<String, Long> uncommittedLabels = dbToMiniLabels.get(dbId);
if (uncommittedLabels.containsKey(label)) {
if (timestamp == -1) {
throw new LabelAlreadyUsedException(label);
} else {
if (timestamp == uncommittedLabels.get(label)) {
LOG.info("get a retry mini load request with label: {}, timestamp: {}. return ok",
label, timestamp);
return true;
} else {
throw new LabelAlreadyUsedException(label);
}
}
}
}
}
return false;
}
public boolean isLabelExist(String dbName, String labelValue, boolean isAccurateMatch) throws DdlException {
Database db = Catalog.getCurrentCatalog().getDb(dbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + dbName);
}
readLock();
try {
Map<String, List<LoadJob>> labelToLoadJobs = dbLabelToLoadJobs.get(db.getId());
if (labelToLoadJobs == null) {
return false;
}
List<LoadJob> loadJobs = Lists.newArrayList();
if (isAccurateMatch) {
if (labelToLoadJobs.containsKey(labelValue)) {
loadJobs.addAll(labelToLoadJobs.get(labelValue));
}
} else {
for (Map.Entry<String, List<LoadJob>> entry : labelToLoadJobs.entrySet()) {
if (entry.getKey().contains(labelValue)) {
loadJobs.addAll(entry.getValue());
}
}
}
if (loadJobs.isEmpty()) {
return false;
}
if (loadJobs.stream().filter(entity -> entity.getState() != JobState.CANCELLED).count() == 0) {
return false;
}
return true;
} finally {
readUnlock();
}
}
public boolean cancelLoadJob(CancelLoadStmt stmt) throws DdlException {
String dbName = stmt.getDbName();
String label = stmt.getLabel();
Database db = Catalog.getCurrentCatalog().getDb(dbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + dbName);
}
LoadJob job = null;
readLock();
try {
Map<String, List<LoadJob>> labelToLoadJobs = dbLabelToLoadJobs.get(db.getId());
if (labelToLoadJobs == null) {
throw new DdlException("Load job does not exist");
}
List<LoadJob> loadJobs = labelToLoadJobs.get(label);
if (loadJobs == null) {
throw new DdlException("Load job does not exist");
}
job = loadJobs.get(loadJobs.size() - 1);
JobState state = job.getState();
if (state == JobState.CANCELLED) {
throw new DdlException("Load job has been cancelled");
} else if (state == JobState.QUORUM_FINISHED || state == JobState.FINISHED) {
throw new DdlException("Load job has been finished");
}
} finally {
readUnlock();
}
Set<String> tableNames = job.getTableNames();
if (tableNames.isEmpty()) {
if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), dbName,
PrivPredicate.LOAD)) {
ErrorReport.reportDdlException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "CANCEL LOAD");
}
} else {
for (String tblName : tableNames) {
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), dbName, tblName,
PrivPredicate.LOAD)) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "CANCEL LOAD",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(), tblName);
}
}
}
List<String> failedMsg = Lists.newArrayList();
if (!cancelLoadJob(job, CancelType.USER_CANCEL, "user cancel", failedMsg)) {
throw new DdlException("Cancel load job fail: " + (failedMsg.isEmpty() ? "Unknown reason" : failedMsg.get(0)));
}
return true;
}
public boolean cancelLoadJob(LoadJob job, CancelType cancelType, String msg) {
return cancelLoadJob(job, cancelType, msg, null);
}
public boolean cancelLoadJob(LoadJob job, CancelType cancelType, String msg, List<String> failedMsg) {
LOG.info("try to cancel load job: {}", job);
JobState srcState = job.getState();
if (!updateLoadJobState(job, JobState.CANCELLED, cancelType, msg, failedMsg)) {
LOG.warn("cancel load job failed. job: {}", job);
return false;
}
if (job.getHadoopDppConfig() != null) {
clearJob(job, srcState);
}
if (job.getBrokerDesc() != null) {
if (srcState == JobState.ETL) {
Catalog.getCurrentCatalog().getPullLoadJobMgr().cancelJob(job.getId());
}
}
LOG.info("cancel load job success. job: {}", job);
return true;
}
public void unprotectCancelLoadJob(LoadJob job) {
long jobId = job.getId();
LoadJob oldJob = idToLoadJob.get(jobId);
if (oldJob == null) {
LOG.warn("cancel job does not exist. id: {}", jobId);
return;
}
switch (oldJob.getState()) {
case PENDING:
idToPendingLoadJob.remove(jobId);
break;
case ETL:
idToEtlLoadJob.remove(jobId);
break;
case LOADING:
idToLoadingLoadJob.remove(jobId);
removeLoadingPartitions(oldJob);
break;
default:
LOG.warn("cancel job has wrong src state: {}", oldJob.getState().name());
return;
}
replaceLoadJob(job);
}
public void replayCancelLoadJob(LoadJob job) {
writeLock();
try {
unprotectCancelLoadJob(job);
} finally {
writeUnlock();
}
}
public void removeDeleteJobAndSetState(AsyncDeleteJob job) {
job.clearTasks();
writeLock();
try {
idToQuorumFinishedDeleteJob.remove(job.getJobId());
List<DeleteInfo> deleteInfos = dbToDeleteInfos.get(job.getDbId());
Preconditions.checkNotNull(deleteInfos);
for (DeleteInfo deleteInfo : deleteInfos) {
if (deleteInfo.getJobId() == job.getJobId()) {
deleteInfo.getAsyncDeleteJob().setState(DeleteState.FINISHED);
LOG.info("replay set async delete job to finished: {}", job.getJobId());
}
}
} finally {
writeUnlock();
}
}
public List<AsyncDeleteJob> getQuorumFinishedDeleteJobs() {
List<AsyncDeleteJob> jobs = Lists.newArrayList();
Collection<AsyncDeleteJob> stateJobs = null;
readLock();
try {
stateJobs = idToQuorumFinishedDeleteJob.values();
if (stateJobs != null) {
jobs.addAll(stateJobs);
}
} finally {
readUnlock();
}
return jobs;
}
public int getLoadJobNumber() {
readLock();
try {
if (idToLoadJob == null) {
return 0;
}
int loadJobNum = 0;
for (LoadJob loadJob : idToLoadJob.values()) {
if (!loadJob.isSyncDeleteJob()) {
++loadJobNum;
}
}
return loadJobNum;
} finally {
readUnlock();
}
}
public Map<Long, LoadJob> getIdToLoadJob() {
return idToLoadJob;
}
public Map<Long, List<LoadJob>> getDbToLoadJobs() {
return dbToLoadJobs;
}
public Map<Long, List<LoadJob>> getDbToDeleteJobs() {
return dbToDeleteJobs;
}
public Map<Long, List<DeleteInfo>> getDbToDeleteInfos() {
return dbToDeleteInfos;
}
public Set<Long> getTxnIdsByDb(Long dbId) {
Set<Long> txnIds = Sets.newHashSet();
readLock();
try {
List<LoadJob> jobs = dbToLoadJobs.get(dbId);
if (jobs != null) {
for (LoadJob loadJob : jobs) {
txnIds.add(loadJob.getTransactionId());
}
}
} finally {
readUnlock();
}
return txnIds;
}
public List<LoadJob> getDbLoadJobs(long dbId) {
readLock();
try {
return dbToLoadJobs.get(dbId);
} finally {
readUnlock();
}
}
public List<LoadJob> getLoadJobs(JobState jobState) {
List<LoadJob> jobs = new ArrayList<LoadJob>();
Collection<LoadJob> stateJobs = null;
readLock();
try {
switch (jobState) {
case PENDING:
stateJobs = idToPendingLoadJob.values();
break;
case ETL:
stateJobs = idToEtlLoadJob.values();
break;
case LOADING:
stateJobs = idToLoadingLoadJob.values();
break;
case QUORUM_FINISHED:
stateJobs = idToQuorumFinishedLoadJob.values();
break;
default:
break;
}
if (stateJobs != null) {
jobs.addAll(stateJobs);
}
} finally {
readUnlock();
}
return jobs;
}
public long getLoadJobNum(JobState jobState, long dbId) {
readLock();
try {
List<LoadJob> loadJobs = this.dbToLoadJobs.get(dbId);
if (loadJobs == null) {
return 0;
}
int jobNum = 0;
for (LoadJob job : loadJobs) {
if (job.getState() == jobState) {
++jobNum;
}
}
return jobNum;
} finally {
readUnlock();
}
}
public LoadJob getLoadJob(long jobId) {
readLock();
try {
return idToLoadJob.get(jobId);
} finally {
readUnlock();
}
}
public AsyncDeleteJob getAsyncDeleteJob(long jobId) {
readLock();
try {
return idToQuorumFinishedDeleteJob.get(jobId);
} finally {
readUnlock();
}
}
public List<AsyncDeleteJob> getCopiedAsyncDeleteJobs() {
readLock();
try {
return Lists.newArrayList(idToQuorumFinishedDeleteJob.values());
} finally {
readUnlock();
}
}
public LinkedList<List<Comparable>> getLoadJobInfosByDb(long dbId, String dbName, String labelValue,
boolean accurateMatch, Set<JobState> states) {
LinkedList<List<Comparable>> loadJobInfos = new LinkedList<List<Comparable>>();
readLock();
try {
List<LoadJob> loadJobs = this.dbToLoadJobs.get(dbId);
if (loadJobs == null) {
return loadJobInfos;
}
long start = System.currentTimeMillis();
LOG.debug("begin to get load job info, size: {}", loadJobs.size());
for (LoadJob loadJob : loadJobs) {
String label = loadJob.getLabel();
JobState state = loadJob.getState();
if (labelValue != null) {
if (accurateMatch) {
if (!label.equals(labelValue)) {
continue;
}
} else {
if (!label.contains(labelValue)) {
continue;
}
}
}
if (states != null) {
if (!states.contains(state)) {
continue;
}
}
Set<String> tableNames = loadJob.getTableNames();
if (tableNames.isEmpty()) {
if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), dbName,
PrivPredicate.LOAD)) {
continue;
}
} else {
boolean auth = true;
for (String tblName : tableNames) {
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), dbName,
tblName, PrivPredicate.LOAD)) {
auth = false;
break;
}
}
if (!auth) {
continue;
}
}
List<Comparable> jobInfo = new ArrayList<Comparable>();
jobInfo.add(loadJob.getId());
jobInfo.add(label);
jobInfo.add(state.name());
switch (loadJob.getState()) {
case PENDING:
jobInfo.add("ETL:0%; LOAD:0%");
break;
case ETL:
jobInfo.add("ETL:" + loadJob.getProgress() + "%; LOAD:0%");
break;
case LOADING:
jobInfo.add("ETL:100%; LOAD:" + loadJob.getProgress() + "%");
break;
case QUORUM_FINISHED:
jobInfo.add("ETL:100%; LOAD:100%");
break;
case FINISHED:
jobInfo.add("ETL:100%; LOAD:100%");
break;
case CANCELLED:
jobInfo.add("ETL:N/A; LOAD:N/A");
break;
default:
jobInfo.add("ETL:N/A; LOAD:N/A");
break;
}
jobInfo.add(loadJob.getEtlJobType().name());
EtlStatus status = loadJob.getEtlJobStatus();
if (status == null || status.getState() == TEtlState.CANCELLED) {
jobInfo.add(FeConstants.null_string);
} else {
Map<String, String> counters = status.getCounters();
List<String> info = Lists.newArrayList();
for (String key : counters.keySet()) {
if (key.equalsIgnoreCase("HDFS bytes read")
|| key.equalsIgnoreCase("Map input records")
|| key.startsWith("dpp.")
|| loadJob.getEtlJobType() == EtlJobType.MINI) {
info.add(key + "=" + counters.get(key));
}
}
if (info.isEmpty()) {
jobInfo.add(FeConstants.null_string);
} else {
jobInfo.add(StringUtils.join(info, "; "));
}
}
jobInfo.add("cluster:" + loadJob.getHadoopCluster()
+ "; timeout(s):" + loadJob.getTimeoutSecond()
+ "; max_filter_ratio:" + loadJob.getMaxFilterRatio());
if (loadJob.getState() == JobState.CANCELLED) {
FailMsg failMsg = loadJob.getFailMsg();
jobInfo.add("type:" + failMsg.getCancelType() + "; msg:" + failMsg.getMsg());
} else {
jobInfo.add(FeConstants.null_string);
}
jobInfo.add(TimeUtils.longToTimeString(loadJob.getCreateTimeMs()));
jobInfo.add(TimeUtils.longToTimeString(loadJob.getEtlStartTimeMs()));
jobInfo.add(TimeUtils.longToTimeString(loadJob.getEtlFinishTimeMs()));
jobInfo.add(TimeUtils.longToTimeString(loadJob.getLoadStartTimeMs()));
jobInfo.add(TimeUtils.longToTimeString(loadJob.getLoadFinishTimeMs()));
jobInfo.add(status.getTrackingUrl());
loadJobInfos.add(jobInfo);
}
LOG.debug("finished to get load job info, cost: {}", (System.currentTimeMillis() - start));
} finally {
readUnlock();
}
return loadJobInfos;
}
public long getLatestJobIdByLabel(long dbId, String labelValue) {
LoadJob job = null;
long jobId = 0;
readLock();
try {
List<LoadJob> loadJobs = this.dbToLoadJobs.get(dbId);
if (loadJobs == null) {
return 0;
}
for (LoadJob loadJob : loadJobs) {
String label = loadJob.getLabel();
if (labelValue != null) {
if (!label.equals(labelValue)) {
continue;
}
}
long currJobId = loadJob.getId();
if (currJobId > jobId) {
jobId = currJobId;
job = loadJob;
}
}
} finally {
readUnlock();
}
return jobId;
}
public List<List<Comparable>> getLoadJobUnfinishedInfo(long jobId) {
LinkedList<List<Comparable>> infos = new LinkedList<List<Comparable>>();
TabletInvertedIndex invertedIndex = Catalog.getCurrentInvertedIndex();
LoadJob loadJob = getLoadJob(jobId);
if (loadJob == null
|| (loadJob.getState() != JobState.LOADING && loadJob.getState() != JobState.QUORUM_FINISHED)) {
return infos;
}
long dbId = loadJob.getDbId();
Database db = Catalog.getCurrentCatalog().getDb(dbId);
if (db == null) {
return infos;
}
db.readLock();
try {
readLock();
try {
Map<Long, TabletLoadInfo> tabletMap = loadJob.getIdToTabletLoadInfo();
for (long tabletId : tabletMap.keySet()) {
TabletMeta tabletMeta = invertedIndex.getTabletMeta(tabletId);
if (tabletMeta == null) {
continue;
}
long tableId = tabletMeta.getTableId();
OlapTable table = (OlapTable) db.getTable(tableId);
if (table == null) {
continue;
}
long partitionId = tabletMeta.getPartitionId();
Partition partition = table.getPartition(partitionId);
if (partition == null) {
continue;
}
long indexId = tabletMeta.getIndexId();
MaterializedIndex index = partition.getIndex(indexId);
if (index == null) {
continue;
}
Tablet tablet = index.getTablet(tabletId);
if (tablet == null) {
continue;
}
PartitionLoadInfo partitionLoadInfo = loadJob.getPartitionLoadInfo(tableId, partitionId);
long version = partitionLoadInfo.getVersion();
long versionHash = partitionLoadInfo.getVersionHash();
for (Replica replica : tablet.getReplicas()) {
if (replica.checkVersionCatchUp(version, versionHash, false)) {
continue;
}
List<Comparable> info = Lists.newArrayList();
info.add(replica.getBackendId());
info.add(tabletId);
info.add(replica.getId());
info.add(replica.getVersion());
info.add(replica.getVersionHash());
info.add(partitionId);
info.add(version);
info.add(versionHash);
infos.add(info);
}
}
} finally {
readUnlock();
}
} finally {
db.readUnlock();
}
ListComparator<List<Comparable>> comparator = new ListComparator<List<Comparable>>(3, 0);
Collections.sort(infos, comparator);
return infos;
}
public LoadErrorHub.Param getLoadErrorHubInfo() {
return loadErrorHubParam;
}
public void setLoadErrorHubInfo(LoadErrorHub.Param info) {
this.loadErrorHubParam = info;
}
public void setLoadErrorHubInfo(Map<String, String> properties) throws DdlException {
String type = properties.get("type");
if (type.equalsIgnoreCase("MYSQL")) {
String host = properties.get("host");
if (Strings.isNullOrEmpty(host)) {
throw new DdlException("mysql host is missing");
}
int port = -1;
try {
port = Integer.valueOf(properties.get("port"));
} catch (NumberFormatException e) {
throw new DdlException("invalid mysql port: " + properties.get("port"));
}
String user = properties.get("user");
if (Strings.isNullOrEmpty(user)) {
throw new DdlException("mysql user name is missing");
}
String db = properties.get("database");
if (Strings.isNullOrEmpty(db)) {
throw new DdlException("mysql database is missing");
}
String tbl = properties.get("table");
if (Strings.isNullOrEmpty(tbl)) {
throw new DdlException("mysql table is missing");
}
String pwd = Strings.nullToEmpty(properties.get("password"));
MysqlLoadErrorHub.MysqlParam param = new MysqlLoadErrorHub.MysqlParam(host, port, user, pwd, db, tbl);
loadErrorHubParam = LoadErrorHub.Param.createMysqlParam(param);
} else if (type.equalsIgnoreCase("BROKER")) {
String brokerName = properties.get("name");
if (Strings.isNullOrEmpty(brokerName)) {
throw new DdlException("broker name is missing");
}
properties.remove("name");
if (!Catalog.getCurrentCatalog().getBrokerMgr().contaisnBroker(brokerName)) {
throw new DdlException("broker does not exist: " + brokerName);
}
String path = properties.get("path");
if (Strings.isNullOrEmpty(path)) {
throw new DdlException("broker path is missing");
}
properties.remove("path");
BlobStorage blobStorage = new BlobStorage(brokerName, properties);
Status st = blobStorage.checkPathExist(path);
if (!st.ok()) {
throw new DdlException("failed to visit path: " + path + ", err: " + st.getErrMsg());
}
BrokerLoadErrorHub.BrokerParam param = new BrokerLoadErrorHub.BrokerParam(brokerName, path, properties);
loadErrorHubParam = LoadErrorHub.Param.createBrokerParam(param);
} else if (type.equalsIgnoreCase("null")) {
loadErrorHubParam = LoadErrorHub.Param.createNullParam();
}
Catalog.getCurrentCatalog().getEditLog().logSetLoadErrorHub(loadErrorHubParam);
LOG.info("set load error hub info: {}", loadErrorHubParam);
}
public static class JobInfo {
public String dbName;
public Set<String> tblNames = Sets.newHashSet();
public String label;
public String clusterName;
public JobState state;
public String failMsg;
public String trackingUrl;
public JobInfo(String dbName, String label, String clusterName) {
this.dbName = dbName;
this.label = label;
this.clusterName = clusterName;
}
}
public void getJobInfo(JobInfo info) throws DdlException, MetaNotFoundException {
String fullDbName = ClusterNamespace.getFullName(info.clusterName, info.dbName);
info.dbName = fullDbName;
Database db = Catalog.getCurrentCatalog().getDb(fullDbName);
if (db == null) {
throw new MetaNotFoundException("Unknown database(" + info.dbName + ")");
}
readLock();
try {
Map<String, List<LoadJob>> labelToLoadJobs = dbLabelToLoadJobs.get(db.getId());
if (labelToLoadJobs == null) {
throw new DdlException("No jobs belong to database(" + info.dbName + ")");
}
List<LoadJob> loadJobs = labelToLoadJobs.get(info.label);
if (loadJobs == null) {
throw new DdlException("Unknown job(" + info.label + ")");
}
LoadJob job = loadJobs.get(loadJobs.size() - 1);
if (!job.getTableNames().isEmpty()) {
info.tblNames.addAll(job.getTableNames());
}
info.state = job.getState();
if (info.state == JobState.QUORUM_FINISHED) {
info.state = JobState.FINISHED;
}
info.failMsg = job.getFailMsg().getMsg();
info.trackingUrl = job.getEtlJobStatus().getTrackingUrl();
} finally {
readUnlock();
}
}
public void unprotectQuorumLoadJob(LoadJob job, Database db) {
if (job.getTransactionId() < 0) {
removeLoadingPartitions(job);
Map<Long, ReplicaPersistInfo> replicaInfos = job.getReplicaPersistInfos();
if (replicaInfos != null) {
for (ReplicaPersistInfo info : replicaInfos.values()) {
OlapTable table = (OlapTable) db.getTable(info.getTableId());
if (table == null) {
LOG.warn("the table[{}] is missing", info.getIndexId());
continue;
}
Partition partition = table.getPartition(info.getPartitionId());
if (partition == null) {
LOG.warn("the partition[{}] is missing", info.getIndexId());
continue;
}
MaterializedIndex index = partition.getIndex(info.getIndexId());
if (index == null) {
LOG.warn("the index[{}] is missing", info.getIndexId());
continue;
}
Tablet tablet = index.getTablet(info.getTabletId());
if (tablet == null) {
LOG.warn("the tablet[{}] is missing", info.getTabletId());
continue;
}
Replica replica = tablet.getReplicaById(info.getReplicaId());
if (replica == null) {
LOG.warn("the replica[{}] is missing", info.getReplicaId());
continue;
}
replica.updateVersionInfo(info.getVersion(), info.getVersionHash(),
info.getDataSize(), info.getRowCount());
}
}
long jobId = job.getId();
Map<Long, TableLoadInfo> idToTableLoadInfo = job.getIdToTableLoadInfo();
if (idToTableLoadInfo != null) {
for (Entry<Long, TableLoadInfo> tableEntry : idToTableLoadInfo.entrySet()) {
long tableId = tableEntry.getKey();
OlapTable table = (OlapTable) db.getTable(tableId);
TableLoadInfo tableLoadInfo = tableEntry.getValue();
for (Entry<Long, PartitionLoadInfo> entry : tableLoadInfo.getIdToPartitionLoadInfo().entrySet()) {
long partitionId = entry.getKey();
Partition partition = table.getPartition(partitionId);
PartitionLoadInfo partitionLoadInfo = entry.getValue();
if (!partitionLoadInfo.isNeedLoad()) {
continue;
}
updatePartitionVersion(partition, partitionLoadInfo.getVersion(),
partitionLoadInfo.getVersionHash(), jobId);
for (MaterializedIndex materializedIndex : partition.getMaterializedIndices(IndexExtState.ALL)) {
long indexRowCount = 0L;
for (Tablet tablet : materializedIndex.getTablets()) {
long tabletRowCount = 0L;
for (Replica replica : tablet.getReplicas()) {
long replicaRowCount = replica.getRowCount();
if (replicaRowCount > tabletRowCount) {
tabletRowCount = replicaRowCount;
}
}
indexRowCount += tabletRowCount;
}
materializedIndex.setRowCount(indexRowCount);
}
}
}
}
idToLoadingLoadJob.remove(jobId);
idToQuorumFinishedLoadJob.put(jobId, job);
}
replaceLoadJob(job);
}
public void replayQuorumLoadJob(LoadJob job, Catalog catalog) throws DdlException {
Database db = catalog.getDb(job.getDbId());
db.writeLock();
try {
writeLock();
try {
unprotectQuorumLoadJob(job, db);
} finally {
writeUnlock();
}
} finally {
db.writeUnlock();
}
}
public void unprotectFinishLoadJob(LoadJob job, Database db) {
long jobId = job.getId();
if (job.getTransactionId() < 0) {
idToQuorumFinishedLoadJob.remove(jobId);
Map<Long, ReplicaPersistInfo> replicaInfos = job.getReplicaPersistInfos();
if (replicaInfos != null) {
for (ReplicaPersistInfo info : replicaInfos.values()) {
OlapTable table = (OlapTable) db.getTable(info.getTableId());
if (table == null) {
LOG.warn("the table[{}] is missing", info.getIndexId());
continue;
}
Partition partition = table.getPartition(info.getPartitionId());
if (partition == null) {
LOG.warn("the partition[{}] is missing", info.getIndexId());
continue;
}
MaterializedIndex index = partition.getIndex(info.getIndexId());
if (index == null) {
LOG.warn("the index[{}] is missing", info.getIndexId());
continue;
}
Tablet tablet = index.getTablet(info.getTabletId());
if (tablet == null) {
LOG.warn("the tablet[{}] is missing", info.getTabletId());
continue;
}
Replica replica = tablet.getReplicaById(info.getReplicaId());
if (replica == null) {
LOG.warn("the replica[{}] is missing", info.getReplicaId());
continue;
}
replica.updateVersionInfo(info.getVersion(), info.getVersionHash(),
info.getDataSize(), info.getRowCount());
}
}
} else {
idToPendingLoadJob.remove(jobId);
idToLoadingLoadJob.remove(jobId);
job.setProgress(100);
job.setLoadFinishTimeMs(System.currentTimeMillis());
}
replaceLoadJob(job);
}
public void replayFinishLoadJob(LoadJob job, Catalog catalog) {
Database db = catalog.getDb(job.getDbId());
db.writeLock();
try {
writeLock();
try {
unprotectFinishLoadJob(job, db);
} finally {
writeUnlock();
}
} finally {
db.writeUnlock();
}
}
public void replayClearRollupInfo(ReplicaPersistInfo info, Catalog catalog) {
Database db = catalog.getDb(info.getDbId());
db.writeLock();
try {
OlapTable olapTable = (OlapTable) db.getTable(info.getTableId());
Partition partition = olapTable.getPartition(info.getPartitionId());
MaterializedIndex index = partition.getIndex(info.getIndexId());
index.clearRollupIndexInfo();
} finally {
db.writeUnlock();
}
}
private void replaceLoadJob(LoadJob job) {
long jobId = job.getId();
if (!idToLoadJob.containsKey(jobId)) {
LOG.warn("Does not find load job in idToLoadJob. JobId : {}", jobId);
return;
}
idToLoadJob.put(jobId, job);
if (!job.isSyncDeleteJob()) {
List<LoadJob> jobs = dbToLoadJobs.get(job.getDbId());
if (jobs == null) {
LOG.warn("Does not find db in dbToLoadJobs. DbId : {}",
job.getDbId());
return;
}
int pos = 0;
for (LoadJob oneJob : jobs) {
if (oneJob.getId() == jobId) {
break;
}
pos++;
}
if (pos == jobs.size()) {
LOG.warn("Does not find load job for db. DbId : {}, jobId : {}",
job.getDbId(), jobId);
return;
}
jobs.remove(pos);
jobs.add(pos, job);
if (dbLabelToLoadJobs.get(job.getDbId()) == null) {
LOG.warn("Does not find db in dbLabelToLoadJobs. DbId : {}",
job.getDbId());
return;
}
jobs = dbLabelToLoadJobs.get(job.getDbId()).get(job.getLabel());
if (jobs == null) {
LOG.warn("Does not find label for db. label : {}, DbId : {}",
job.getLabel(), job.getDbId());
return;
}
pos = 0;
for (LoadJob oneJob : jobs) {
if (oneJob.getId() == jobId) {
break;
}
pos++;
}
if (pos == jobs.size()) {
LOG.warn("Does not find load job for label. label : {}, DbId : {}",
job.getLabel(), job.getDbId());
return;
}
jobs.remove(pos);
jobs.add(pos, job);
} else {
List<LoadJob> jobs = dbToDeleteJobs.get(job.getDbId());
if (jobs == null) {
LOG.warn("Does not find db in dbToDeleteJobs. DbId : {}",
job.getDbId());
return;
}
int pos = 0;
for (LoadJob oneJob : jobs) {
if (oneJob.getId() == jobId) {
break;
}
pos++;
}
if (pos == jobs.size()) {
LOG.warn("Does not find delete load job for db. DbId : {}, jobId : {}",
job.getDbId(), jobId);
return;
}
jobs.remove(pos);
jobs.add(pos, job);
}
}
public void removeDbLoadJob(long dbId) {
writeLock();
try {
if (dbToLoadJobs.containsKey(dbId)) {
List<LoadJob> dbLoadJobs = dbToLoadJobs.remove(dbId);
for (LoadJob job : dbLoadJobs) {
JobState state = job.getState();
if (state == JobState.CANCELLED || state == JobState.FINISHED) {
idToLoadJob.remove(job.getId());
}
}
}
if (dbLabelToLoadJobs.containsKey(dbId)) {
dbLabelToLoadJobs.remove(dbId);
}
if (dbToDeleteJobs.containsKey(dbId)) {
dbToDeleteJobs.remove(dbId);
}
} finally {
writeUnlock();
}
}
public void removeOldLoadJobs() {
long currentTimeMs = System.currentTimeMillis();
writeLock();
try {
Iterator<Map.Entry<Long, LoadJob>> iter = idToLoadJob.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<Long, LoadJob> entry = iter.next();
LoadJob job = entry.getValue();
if ((currentTimeMs - job.getCreateTimeMs()) / 1000 > Config.label_keep_max_second
&& (job.getState() == JobState.FINISHED || job.getState() == JobState.CANCELLED)) {
long dbId = job.getDbId();
String label = job.getLabel();
iter.remove();
List<LoadJob> loadJobs = dbToLoadJobs.get(dbId);
if (loadJobs != null) {
loadJobs.remove(job);
if (loadJobs.size() == 0) {
dbToLoadJobs.remove(dbId);
}
}
List<LoadJob> deleteJobs = dbToDeleteJobs.get(dbId);
if (deleteJobs != null) {
deleteJobs.remove(job);
if (deleteJobs.size() == 0) {
dbToDeleteJobs.remove(dbId);
}
}
Map<String, List<LoadJob>> mapLabelToJobs = dbLabelToLoadJobs.get(dbId);
if (mapLabelToJobs != null) {
loadJobs = mapLabelToJobs.get(label);
if (loadJobs != null) {
loadJobs.remove(job);
if (loadJobs.size() == 0) {
mapLabelToJobs.remove(label);
if (mapLabelToJobs.size() == 0) {
dbLabelToLoadJobs.remove(dbId);
}
}
}
}
}
}
} finally {
writeUnlock();
}
}
public void clearJob(LoadJob job, JobState srcState) {
JobState state = job.getState();
if (state != JobState.CANCELLED && state != JobState.FINISHED) {
LOG.warn("job state error. state: {}", state);
return;
}
EtlJobType etlJobType = job.getEtlJobType();
switch (etlJobType) {
case HADOOP:
DppScheduler dppScheduler = new DppScheduler(job.getHadoopDppConfig());
if (state == JobState.CANCELLED && srcState == JobState.ETL) {
try {
dppScheduler.killEtlJob(job.getHadoopEtlJobId());
} catch (Exception e) {
LOG.warn("kill etl job error", e);
}
}
DppConfig dppConfig = job.getHadoopDppConfig();
String outputPath = DppScheduler.getEtlOutputPath(dppConfig.getFsDefaultName(),
dppConfig.getOutputPath(), job.getDbId(), job.getLabel(), "");
try {
dppScheduler.deleteEtlOutputPath(outputPath);
} catch (Exception e) {
LOG.warn("delete etl output path error", e);
}
break;
case MINI:
for (MiniEtlTaskInfo taskInfo : job.getMiniEtlTasks().values()) {
long backendId = taskInfo.getBackendId();
Backend backend = Catalog.getCurrentSystemInfo().getBackend(backendId);
if (backend == null) {
LOG.warn("backend does not exist. id: {}", backendId);
break;
}
long dbId = job.getDbId();
Database db = Catalog.getCurrentCatalog().getDb(dbId);
if (db == null) {
LOG.warn("db does not exist. id: {}", dbId);
break;
}
AgentClient client = new AgentClient(backend.getHost(), backend.getBePort());
client.deleteEtlFiles(dbId, job.getId(), db.getFullName(), job.getLabel());
}
break;
case INSERT:
break;
case BROKER:
break;
case DELETE:
break;
default:
LOG.warn("unknown etl job type. type: {}, job id: {}", etlJobType.name(), job.getId());
break;
}
}
public boolean updateLoadJobState(LoadJob job, JobState destState) {
return updateLoadJobState(job, destState, CancelType.UNKNOWN, null, null);
}
public boolean updateLoadJobState(LoadJob job, JobState destState, CancelType cancelType, String msg,
List<String> failedMsg) {
boolean result = true;
JobState srcState = null;
long jobId = job.getId();
long dbId = job.getDbId();
Database db = Catalog.getCurrentCatalog().getDb(dbId);
String errMsg = msg;
if (db == null) {
errMsg = "db does not exist. id: " + dbId;
LOG.warn(errMsg);
writeLock();
try {
processCancelled(job, cancelType, errMsg, failedMsg);
} finally {
writeUnlock();
}
} else {
db.writeLock();
try {
writeLock();
try {
srcState = job.getState();
if (!STATE_CHANGE_MAP.containsKey(srcState)) {
LOG.warn("src state error. src state: {}", srcState.name());
return false;
}
Set<JobState> destStates = STATE_CHANGE_MAP.get(srcState);
if (!destStates.contains(destState)) {
LOG.warn("state change error. src state: {}, dest state: {}",
srcState.name(), destState.name());
return false;
}
switch (destState) {
case ETL:
idToPendingLoadJob.remove(jobId);
idToEtlLoadJob.put(jobId, job);
job.setProgress(0);
job.setEtlStartTimeMs(System.currentTimeMillis());
job.setState(destState);
Catalog.getCurrentCatalog().getEditLog().logLoadEtl(job);
break;
case LOADING:
idToEtlLoadJob.remove(jobId);
idToLoadingLoadJob.put(jobId, job);
job.setProgress(0);
job.setLoadStartTimeMs(System.currentTimeMillis());
job.setState(destState);
Catalog.getCurrentCatalog().getEditLog().logLoadLoading(job);
break;
case QUORUM_FINISHED:
if (processQuorumFinished(job, db)) {
Catalog.getCurrentCatalog().getEditLog().logLoadQuorum(job);
} else {
errMsg = "process loading finished fail";
processCancelled(job, cancelType, errMsg, failedMsg);
}
break;
case FINISHED:
if (job.getTransactionId() > 0) {
idToPendingLoadJob.remove(jobId);
idToLoadingLoadJob.remove(jobId);
job.setProgress(100);
job.setLoadFinishTimeMs(System.currentTimeMillis());
if (job.isSyncDeleteJob()) {
TransactionState transactionState = Catalog.getCurrentGlobalTransactionMgr()
.getTransactionState(job.getDbId(), job.getTransactionId());
DeleteInfo deleteInfo = job.getDeleteInfo();
TableCommitInfo tableCommitInfo = transactionState.getTableCommitInfo(deleteInfo.getTableId());
PartitionCommitInfo partitionCommitInfo = tableCommitInfo.getPartitionCommitInfo(deleteInfo.getPartitionId());
deleteInfo.updatePartitionVersionInfo(partitionCommitInfo.getVersion(),
partitionCommitInfo.getVersionHash());
}
}
MetricRepo.COUNTER_LOAD_FINISHED.increase(1L);
idToLoadingLoadJob.remove(jobId);
idToQuorumFinishedLoadJob.remove(jobId);
job.setState(destState);
for (PushTask pushTask : job.getPushTasks()) {
AgentTaskQueue.removePushTask(pushTask.getBackendId(), pushTask.getSignature(),
pushTask.getVersion(), pushTask.getVersionHash(),
pushTask.getPushType(), pushTask.getTaskType());
}
if (!job.isSyncDeleteJob()) {
job.clearRedundantInfoForHistoryJob();
}
Catalog.getCurrentCatalog().getEditLog().logLoadDone(job);
break;
case CANCELLED:
processCancelled(job, cancelType, errMsg, failedMsg);
break;
default:
Preconditions.checkState(false, "wrong job state: " + destState.name());
break;
}
} finally {
writeUnlock();
}
} finally {
db.writeUnlock();
}
}
if (destState != job.getState()) {
result = false;
}
return result;
}
private boolean processQuorumFinished(LoadJob job, Database db) {
long jobId = job.getId();
removeLoadingPartitions(job);
Map<Long, TableLoadInfo> idToTableLoadInfo = job.getIdToTableLoadInfo();
for (Entry<Long, TableLoadInfo> tableEntry : idToTableLoadInfo.entrySet()) {
long tableId = tableEntry.getKey();
OlapTable table = (OlapTable) db.getTable(tableId);
if (table == null) {
LOG.warn("table does not exist, id: {}", tableId);
return false;
}
TableLoadInfo tableLoadInfo = tableEntry.getValue();
for (Entry<Long, PartitionLoadInfo> partitionEntry : tableLoadInfo.getIdToPartitionLoadInfo().entrySet()) {
long partitionId = partitionEntry.getKey();
PartitionLoadInfo partitionLoadInfo = partitionEntry.getValue();
if (!partitionLoadInfo.isNeedLoad()) {
continue;
}
Partition partition = table.getPartition(partitionId);
if (partition == null) {
LOG.warn("partition does not exist, id: {}", partitionId);
return false;
}
}
}
for (Entry<Long, TableLoadInfo> tableEntry : idToTableLoadInfo.entrySet()) {
long tableId = tableEntry.getKey();
OlapTable table = (OlapTable) db.getTable(tableId);
TableLoadInfo tableLoadInfo = tableEntry.getValue();
for (Entry<Long, PartitionLoadInfo> entry : tableLoadInfo.getIdToPartitionLoadInfo().entrySet()) {
long partitionId = entry.getKey();
Partition partition = table.getPartition(partitionId);
PartitionLoadInfo partitionLoadInfo = entry.getValue();
if (!partitionLoadInfo.isNeedLoad()) {
continue;
}
updatePartitionVersion(partition, partitionLoadInfo.getVersion(),
partitionLoadInfo.getVersionHash(), jobId);
for (MaterializedIndex materializedIndex : partition.getMaterializedIndices(IndexExtState.ALL)) {
long tableRowCount = 0L;
for (Tablet tablet : materializedIndex.getTablets()) {
long tabletRowCount = 0L;
for (Replica replica : tablet.getReplicas()) {
long replicaRowCount = replica.getRowCount();
if (replicaRowCount > tabletRowCount) {
tabletRowCount = replicaRowCount;
}
}
tableRowCount += tabletRowCount;
}
materializedIndex.setRowCount(tableRowCount);
}
}
}
idToPendingLoadJob.remove(jobId);
idToLoadingLoadJob.remove(jobId);
idToQuorumFinishedLoadJob.put(jobId, job);
job.setProgress(100);
job.setLoadFinishTimeMs(System.currentTimeMillis());
job.setState(JobState.QUORUM_FINISHED);
return true;
}
private void updatePartitionVersion(Partition partition, long version, long versionHash, long jobId) {
long partitionId = partition.getId();
partition.updateVisibleVersionAndVersionHash(version, versionHash);
LOG.info("update partition version success. version: {}, version hash: {}, job id: {}, partition id: {}",
version, versionHash, jobId, partitionId);
}
private boolean processCancelled(LoadJob job, CancelType cancelType, String msg, List<String> failedMsg) {
long jobId = job.getId();
JobState srcState = job.getState();
CancelType tmpCancelType = CancelType.UNKNOWN;
try {
Catalog.getCurrentGlobalTransactionMgr().abortTransaction(
job.getDbId(),
job.getTransactionId(),
job.getFailMsg().toString());
} catch (TransactionNotFoundException e) {
LOG.info("transaction not found when try to abort it: {}", e.getTransactionId());
} catch (Exception e) {
LOG.info("errors while abort transaction", e);
if (failedMsg != null) {
failedMsg.add("Abort tranaction failed: " + e.getMessage());
}
return false;
}
switch (srcState) {
case PENDING:
idToPendingLoadJob.remove(jobId);
tmpCancelType = CancelType.ETL_SUBMIT_FAIL;
break;
case ETL:
idToEtlLoadJob.remove(jobId);
tmpCancelType = CancelType.ETL_RUN_FAIL;
break;
case LOADING:
removeLoadingPartitions(job);
idToLoadingLoadJob.remove(jobId);
tmpCancelType = CancelType.LOAD_RUN_FAIL;
break;
case QUORUM_FINISHED:
idToQuorumFinishedLoadJob.remove(jobId);
tmpCancelType = CancelType.LOAD_RUN_FAIL;
break;
default:
Preconditions.checkState(false, "wrong job state: " + srcState.name());
break;
}
CancelType newCancelType = cancelType;
if (newCancelType == CancelType.UNKNOWN) {
newCancelType = tmpCancelType;
}
FailMsg failMsg = new FailMsg(newCancelType, msg);
job.setFailMsg(failMsg);
job.setLoadFinishTimeMs(System.currentTimeMillis());
job.setState(JobState.CANCELLED);
if (srcState == JobState.LOADING || srcState == JobState.QUORUM_FINISHED) {
for (PushTask pushTask : job.getPushTasks()) {
AgentTaskQueue.removePushTask(pushTask.getBackendId(), pushTask.getSignature(),
pushTask.getVersion(), pushTask.getVersionHash(),
pushTask.getPushType(), pushTask.getTaskType());
}
}
job.clearRedundantInfoForHistoryJob();
Catalog.getCurrentCatalog().getEditLog().logLoadCancel(job);
return true;
}
public boolean addLoadingPartitions(Set<Long> partitionIds) {
writeLock();
try {
for (long partitionId : partitionIds) {
if (loadingPartitionIds.contains(partitionId)) {
LOG.info("partition {} is loading", partitionId);
return false;
}
}
loadingPartitionIds.addAll(partitionIds);
return true;
} finally {
writeUnlock();
}
}
private void recoverLoadingPartitions(LoadJob job) {
if (job.getTransactionId() > 0) {
return;
}
for (TableLoadInfo tableLoadInfo : job.getIdToTableLoadInfo().values()) {
Map<Long, PartitionLoadInfo> idToPartitionLoadInfo = tableLoadInfo.getIdToPartitionLoadInfo();
for (Entry<Long, PartitionLoadInfo> entry : idToPartitionLoadInfo.entrySet()) {
PartitionLoadInfo partitionLoadInfo = entry.getValue();
if (partitionLoadInfo.isNeedLoad()) {
loadingPartitionIds.add(entry.getKey());
}
}
}
}
public void removeLoadingPartitions(Set<Long> partitionIds) {
writeLock();
try {
loadingPartitionIds.removeAll(partitionIds);
} finally {
writeUnlock();
}
}
private void removeLoadingPartitions(LoadJob job) {
for (TableLoadInfo tableLoadInfo : job.getIdToTableLoadInfo().values()) {
Map<Long, PartitionLoadInfo> idToPartitionLoadInfo = tableLoadInfo.getIdToPartitionLoadInfo();
for (Entry<Long, PartitionLoadInfo> entry : idToPartitionLoadInfo.entrySet()) {
PartitionLoadInfo partitionLoadInfo = entry.getValue();
if (partitionLoadInfo.isNeedLoad()) {
loadingPartitionIds.remove(entry.getKey());
}
}
}
}
public boolean checkPartitionLoadFinished(long partitionId, List<LoadJob> quorumFinishedLoadJobs) {
readLock();
try {
for (JobState state : JobState.values()) {
if (state == JobState.FINISHED || state == JobState.CANCELLED) {
continue;
}
List<LoadJob> loadJobs = this.getLoadJobs(state);
for (LoadJob loadJob : loadJobs) {
Preconditions.checkNotNull(loadJob.getIdToTableLoadInfo());
for (TableLoadInfo tableLoadInfo : loadJob.getIdToTableLoadInfo().values()) {
if (tableLoadInfo.getIdToPartitionLoadInfo().containsKey(partitionId)) {
if (state == JobState.QUORUM_FINISHED) {
if (quorumFinishedLoadJobs != null) {
quorumFinishedLoadJobs.add(loadJob);
} else {
return false;
}
} else {
return false;
}
}
}
}
}
return true;
} finally {
readUnlock();
}
}
public void unprotectAddDeleteInfo(DeleteInfo deleteInfo) {
long dbId = deleteInfo.getDbId();
List<DeleteInfo> deleteInfos = dbToDeleteInfos.get(dbId);
if (deleteInfos == null) {
deleteInfos = Lists.newArrayList();
dbToDeleteInfos.put(dbId, deleteInfos);
}
deleteInfos.add(deleteInfo);
if (deleteInfo.getAsyncDeleteJob() != null && deleteInfo.getState() == DeleteState.QUORUM_FINISHED) {
AsyncDeleteJob asyncDeleteJob = deleteInfo.getAsyncDeleteJob();
idToQuorumFinishedDeleteJob.put(asyncDeleteJob.getJobId(), asyncDeleteJob);
LOG.info("unprotected add asyncDeleteJob when load image: {}", asyncDeleteJob.getJobId());
}
}
public void unprotectDelete(DeleteInfo deleteInfo, Database db) {
OlapTable table = (OlapTable) db.getTable(deleteInfo.getTableId());
Partition partition = table.getPartition(deleteInfo.getPartitionId());
updatePartitionVersion(partition, deleteInfo.getPartitionVersion(), deleteInfo.getPartitionVersionHash(), -1);
List<ReplicaPersistInfo> replicaInfos = deleteInfo.getReplicaPersistInfos();
if (replicaInfos != null) {
for (ReplicaPersistInfo info : replicaInfos) {
MaterializedIndex index = partition.getIndex(info.getIndexId());
Tablet tablet = index.getTablet(info.getTabletId());
Replica replica = tablet.getReplicaById(info.getReplicaId());
replica.updateVersionInfo(info.getVersion(), info.getVersionHash(),
info.getDataSize(), info.getRowCount());
}
}
if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_11) {
long dbId = deleteInfo.getDbId();
List<DeleteInfo> deleteInfos = dbToDeleteInfos.get(dbId);
if (deleteInfos == null) {
deleteInfos = Lists.newArrayList();
dbToDeleteInfos.put(dbId, deleteInfos);
}
deleteInfos.add(deleteInfo);
}
if (deleteInfo.getAsyncDeleteJob() != null) {
AsyncDeleteJob asyncDeleteJob = deleteInfo.getAsyncDeleteJob();
idToQuorumFinishedDeleteJob.put(asyncDeleteJob.getJobId(), asyncDeleteJob);
LOG.info("unprotected add asyncDeleteJob: {}", asyncDeleteJob.getJobId());
}
}
public void replayFinishAsyncDeleteJob(AsyncDeleteJob deleteJob, Catalog catalog) {
Database db = catalog.getDb(deleteJob.getDbId());
db.writeLock();
try {
writeLock();
try {
Map<Long, ReplicaPersistInfo> replicaInfos = deleteJob.getReplicaPersistInfos();
if (replicaInfos != null) {
for (ReplicaPersistInfo info : replicaInfos.values()) {
OlapTable table = (OlapTable) db.getTable(info.getTableId());
if (table == null) {
LOG.warn("the table[{}] is missing", info.getIndexId());
continue;
}
Partition partition = table.getPartition(info.getPartitionId());
if (partition == null) {
LOG.warn("the partition[{}] is missing", info.getIndexId());
continue;
}
MaterializedIndex index = partition.getIndex(info.getIndexId());
if (index == null) {
LOG.warn("the index[{}] is missing", info.getIndexId());
continue;
}
Tablet tablet = index.getTablet(info.getTabletId());
if (tablet == null) {
LOG.warn("the tablet[{}] is missing", info.getTabletId());
continue;
}
Replica replica = tablet.getReplicaById(info.getReplicaId());
if (replica == null) {
LOG.warn("the replica[{}] is missing", info.getReplicaId());
continue;
}
replica.updateVersionInfo(info.getVersion(), info.getVersionHash(),
info.getDataSize(), info.getRowCount());
}
}
} finally {
writeUnlock();
}
} finally {
db.writeUnlock();
}
removeDeleteJobAndSetState(deleteJob);
LOG.info("unprotected finish asyncDeleteJob: {}", deleteJob.getJobId());
}
public void replayDelete(DeleteInfo deleteInfo, Catalog catalog) {
Database db = catalog.getDb(deleteInfo.getDbId());
db.writeLock();
try {
writeLock();
try {
unprotectDelete(deleteInfo, db);
} finally {
writeUnlock();
}
} finally {
db.writeUnlock();
}
}
private void checkDeleteV2(OlapTable table, Partition partition, List<Predicate> conditions, List<String> deleteConditions, boolean preCheck)
throws DdlException {
PartitionState state = partition.getState();
if (state != PartitionState.NORMAL) {
throw new DdlException("Partition[" + partition.getName() + "]' state is not NORMAL: " + state.name());
}
Map<String, Column> nameToColumn = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
for (Column column : table.getBaseSchema()) {
nameToColumn.put(column.getName(), column);
}
for (Predicate condition : conditions) {
SlotRef slotRef = null;
if (condition instanceof BinaryPredicate) {
BinaryPredicate binaryPredicate = (BinaryPredicate) condition;
slotRef = (SlotRef) binaryPredicate.getChild(0);
} else if (condition instanceof IsNullPredicate) {
IsNullPredicate isNullPredicate = (IsNullPredicate) condition;
slotRef = (SlotRef) isNullPredicate.getChild(0);
}
String columnName = slotRef.getColumnName();
if (!nameToColumn.containsKey(columnName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_BAD_FIELD_ERROR, columnName, table.getName());
}
Column column = nameToColumn.get(columnName);
if (!column.isKey()) {
throw new DdlException("Column[" + columnName + "] is not key column");
}
if (condition instanceof BinaryPredicate) {
String value = null;
try {
BinaryPredicate binaryPredicate = (BinaryPredicate) condition;
value = ((LiteralExpr) binaryPredicate.getChild(1)).getStringValue();
LiteralExpr.create(value, Type.fromPrimitiveType(column.getDataType()));
} catch (AnalysisException e) {
throw new DdlException("Invalid column value[" + value + "]");
}
}
slotRef.setCol(column.getName());
}
Map<Long, List<Column>> indexIdToSchema = table.getIndexIdToSchema();
for (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.VISIBLE)) {
Map<String, Column> indexColNameToColumn = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
for (Column column : indexIdToSchema.get(index.getId())) {
indexColNameToColumn.put(column.getName(), column);
}
String indexName = table.getIndexNameById(index.getId());
for (Predicate condition : conditions) {
String columnName = null;
if (condition instanceof BinaryPredicate) {
BinaryPredicate binaryPredicate = (BinaryPredicate) condition;
columnName = ((SlotRef) binaryPredicate.getChild(0)).getColumnName();
} else if (condition instanceof IsNullPredicate) {
IsNullPredicate isNullPredicate = (IsNullPredicate) condition;
columnName = ((SlotRef) isNullPredicate.getChild(0)).getColumnName();
}
Column column = indexColNameToColumn.get(columnName);
if (column == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_BAD_FIELD_ERROR, columnName, indexName);
}
if (table.getKeysType() == KeysType.DUP_KEYS && !column.isKey()) {
throw new DdlException("Column[" + columnName + "] is not key column in index[" + indexName + "]");
}
}
}
if (deleteConditions == null) {
return;
}
for (Predicate condition : conditions) {
if (condition instanceof BinaryPredicate) {
BinaryPredicate binaryPredicate = (BinaryPredicate) condition;
SlotRef slotRef = (SlotRef) binaryPredicate.getChild(0);
String columnName = slotRef.getColumnName();
StringBuilder sb = new StringBuilder();
sb.append(columnName).append(" ").append(binaryPredicate.getOp().name()).append(" \"")
.append(((LiteralExpr) binaryPredicate.getChild(1)).getStringValue()).append("\"");
deleteConditions.add(sb.toString());
} else if (condition instanceof IsNullPredicate) {
IsNullPredicate isNullPredicate = (IsNullPredicate) condition;
SlotRef slotRef = (SlotRef) isNullPredicate.getChild(0);
String columnName = slotRef.getColumnName();
StringBuilder sb = new StringBuilder();
sb.append(columnName);
if (isNullPredicate.isNotNull()) {
sb.append(" IS NOT NULL");
} else {
sb.append(" IS NULL");
}
deleteConditions.add(sb.toString());
}
}
}
private boolean checkAndAddRunningSyncDeleteJob(long partitionId, String partitionName) throws DdlException {
writeLock();
try {
checkHasRunningSyncDeleteJob(partitionId, partitionName);
return partitionUnderDelete.add(partitionId);
} finally {
writeUnlock();
}
}
private void checkHasRunningSyncDeleteJob(long partitionId, String partitionName) throws DdlException {
readLock();
try {
if (partitionUnderDelete.contains(partitionId)) {
throw new DdlException("Partition[" + partitionName + "] has running delete job. See 'SHOW DELETE'");
}
} finally {
readUnlock();
}
}
private void checkHasRunningAsyncDeleteJob(long partitionId, String partitionName) throws DdlException {
readLock();
try {
for (AsyncDeleteJob job : idToQuorumFinishedDeleteJob.values()) {
if (job.getPartitionId() == partitionId) {
throw new DdlException("Partition[" + partitionName + "] has running async delete job. "
+ "See 'SHOW DELETE'");
}
}
for (long dbId : dbToDeleteJobs.keySet()) {
List<LoadJob> loadJobs = dbToDeleteJobs.get(dbId);
for (LoadJob loadJob : loadJobs) {
if (loadJob.getDeleteInfo().getPartitionId() == partitionId
&& loadJob.getState() == JobState.LOADING) {
throw new DdlException("Partition[" + partitionName + "] has running async delete job. "
+ "See 'SHOW DELETE'");
}
}
}
} finally {
readUnlock();
}
}
public void checkHashRunningDeleteJob(long partitionId, String partitionName) throws DdlException {
checkHasRunningSyncDeleteJob(partitionId, partitionName);
checkHasRunningAsyncDeleteJob(partitionId, partitionName);
}
public void delete(DeleteStmt stmt) throws DdlException {
String dbName = stmt.getDbName();
String tableName = stmt.getTableName();
String partitionName = stmt.getPartitionName();
List<Predicate> conditions = stmt.getDeleteConditions();
Database db = Catalog.getCurrentCatalog().getDb(dbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + dbName);
}
long tableId = -1;
long partitionId = -1;
LoadJob loadDeleteJob = null;
boolean addRunningPartition = false;
db.readLock();
try {
Table table = db.getTable(tableName);
if (table == null) {
throw new DdlException("Table does not exist. name: " + tableName);
}
if (table.getType() != TableType.OLAP) {
throw new DdlException("Not olap type table. type: " + table.getType().name());
}
OlapTable olapTable = (OlapTable) table;
if (olapTable.getState() != OlapTableState.NORMAL) {
throw new DdlException("Table's state is not normal: " + tableName);
}
tableId = olapTable.getId();
if (partitionName == null) {
if (olapTable.getPartitionInfo().getType() == PartitionType.RANGE) {
throw new DdlException("This is a range partitioned table."
+ " You should specify partition in delete stmt");
} else {
partitionName = olapTable.getName();
}
}
Partition partition = olapTable.getPartition(partitionName);
if (partition == null) {
throw new DdlException("Partition does not exist. name: " + partitionName);
}
partitionId = partition.getId();
List<String> deleteConditions = Lists.newArrayList();
checkDeleteV2(olapTable, partition, conditions,
deleteConditions, true);
addRunningPartition = checkAndAddRunningSyncDeleteJob(partitionId, partitionName);
long jobId = Catalog.getCurrentCatalog().getNextId();
String jobLabel = "delete_" + UUID.randomUUID();
DeleteInfo deleteInfo = new DeleteInfo(db.getId(), tableId, tableName,
partition.getId(), partitionName,
-1, 0, deleteConditions);
loadDeleteJob = new LoadJob(jobId, db.getId(), tableId,
partitionId, jobLabel, olapTable.getIndexIdToSchemaHash(), conditions, deleteInfo);
Map<Long, TabletLoadInfo> idToTabletLoadInfo = Maps.newHashMap();
for (MaterializedIndex materializedIndex : partition.getMaterializedIndices(IndexExtState.VISIBLE)) {
for (Tablet tablet : materializedIndex.getTablets()) {
long tabletId = tablet.getId();
TabletLoadInfo tabletLoadInfo = new TabletLoadInfo("", -1);
idToTabletLoadInfo.put(tabletId, tabletLoadInfo);
}
}
loadDeleteJob.setIdToTabletLoadInfo(idToTabletLoadInfo);
loadDeleteJob.setState(JobState.LOADING);
long transactionId = Catalog.getCurrentGlobalTransactionMgr().beginTransaction(db.getId(),
Lists.newArrayList(table.getId()), jobLabel,
new TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()),
LoadJobSourceType.FRONTEND,
Config.stream_load_default_timeout_second);
loadDeleteJob.setTransactionId(transactionId);
addLoadJob(loadDeleteJob, db);
} catch (Throwable t) {
LOG.warn("error occurred during prepare delete", t);
throw new DdlException(t.getMessage(), t);
} finally {
if (addRunningPartition) {
writeLock();
try {
partitionUnderDelete.remove(partitionId);
} finally {
writeUnlock();
}
}
db.readUnlock();
}
try {
long startDeleteTime = System.currentTimeMillis();
long timeout = loadDeleteJob.getDeleteJobTimeout();
while (true) {
db.writeLock();
try {
if (loadDeleteJob.getState() == JobState.FINISHED
|| loadDeleteJob.getState() == JobState.CANCELLED) {
break;
}
if (System.currentTimeMillis() - startDeleteTime > timeout) {
TransactionState transactionState = Catalog.getCurrentGlobalTransactionMgr().getTransactionState(loadDeleteJob.getDbId(),
loadDeleteJob.getTransactionId());
if (transactionState.getTransactionStatus() == TransactionStatus.PREPARE) {
boolean isSuccess = cancelLoadJob(loadDeleteJob, CancelType.TIMEOUT, "load delete job timeout");
if (isSuccess) {
throw new DdlException("timeout when waiting delete");
}
}
}
} finally {
db.writeUnlock();
}
Thread.sleep(1000);
}
} catch (Exception e) {
String failMsg = "delete unknown, " + e.getMessage();
LOG.warn(failMsg, e);
throw new DdlException(failMsg);
} finally {
writeLock();
try {
partitionUnderDelete.remove(partitionId);
} finally {
writeUnlock();
}
}
}
public List<List<Comparable>> getAsyncDeleteJobInfo(long jobId) {
LinkedList<List<Comparable>> infos = new LinkedList<List<Comparable>>();
readLock();
try {
LoadJob job = null;
for (long dbId : dbToDeleteJobs.keySet()) {
List<LoadJob> loadJobs = dbToDeleteJobs.get(dbId);
for (LoadJob loadJob : loadJobs) {
if (loadJob.getId() == jobId) {
job = loadJob;
break;
}
}
}
if (job == null) {
return infos;
}
for (Long tabletId : job.getIdToTabletLoadInfo().keySet()) {
List<Comparable> info = Lists.newArrayList();
info.add(tabletId);
infos.add(info);
}
} finally {
readUnlock();
}
return infos;
}
public long getDeleteJobNumByState(long dbId, JobState state) {
readLock();
try {
List<LoadJob> deleteJobs = dbToDeleteJobs.get(dbId);
if (deleteJobs == null) {
return 0;
} else {
int deleteJobNum = 0;
for (LoadJob job : deleteJobs) {
if (job.getState() == state) {
++deleteJobNum;
}
}
return deleteJobNum;
}
} finally {
readUnlock();
}
}
public int getDeleteInfoNum(long dbId) {
readLock();
try {
List<LoadJob> deleteJobs = dbToDeleteJobs.get(dbId);
if (deleteJobs == null) {
return 0;
} else {
return deleteJobs.size();
}
} finally {
readUnlock();
}
}
public List<List<Comparable>> getDeleteInfosByDb(long dbId, boolean forUser) {
LinkedList<List<Comparable>> infos = new LinkedList<List<Comparable>>();
Database db = Catalog.getCurrentCatalog().getDb(dbId);
if (db == null) {
return infos;
}
String dbName = db.getFullName();
readLock();
try {
List<LoadJob> deleteJobs = dbToDeleteJobs.get(dbId);
if (deleteJobs == null) {
return infos;
}
for (LoadJob loadJob : deleteJobs) {
DeleteInfo deleteInfo = loadJob.getDeleteInfo();
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), dbName,
deleteInfo.getTableName(),
PrivPredicate.LOAD)) {
continue;
}
List<Comparable> info = Lists.newArrayList();
if (!forUser) {
info.add(loadJob.getId());
info.add(deleteInfo.getTableId());
}
info.add(deleteInfo.getTableName());
if (!forUser) {
info.add(deleteInfo.getPartitionId());
}
info.add(deleteInfo.getPartitionName());
info.add(TimeUtils.longToTimeString(deleteInfo.getCreateTimeMs()));
String conds = Joiner.on(", ").join(deleteInfo.getDeleteConditions());
info.add(conds);
if (!forUser) {
info.add(deleteInfo.getPartitionVersion());
info.add(deleteInfo.getPartitionVersionHash());
}
if (loadJob.getState() == JobState.LOADING) {
info.add("DELETING");
} else {
info.add(loadJob.getState().name());
}
infos.add(info);
}
} finally {
readUnlock();
}
int sortIndex;
if (!forUser) {
sortIndex = 4;
} else {
sortIndex = 2;
}
ListComparator<List<Comparable>> comparator = new ListComparator<List<Comparable>>(sortIndex);
Collections.sort(infos, comparator);
return infos;
}
public void removeOldDeleteJobs() {
long currentTimeMs = System.currentTimeMillis();
writeLock();
try {
Iterator<Map.Entry<Long, List<DeleteInfo>>> iter1 = dbToDeleteInfos.entrySet().iterator();
while (iter1.hasNext()) {
Map.Entry<Long, List<DeleteInfo>> entry = iter1.next();
Iterator<DeleteInfo> iter2 = entry.getValue().iterator();
while (iter2.hasNext()) {
DeleteInfo deleteInfo = iter2.next();
if ((currentTimeMs - deleteInfo.getCreateTimeMs()) / 1000 > Config.label_keep_max_second) {
iter2.remove();
}
}
if (entry.getValue().isEmpty()) {
iter1.remove();
}
}
} finally {
writeUnlock();
}
}
public void removeDbDeleteJob(long dbId) {
writeLock();
try {
dbToDeleteInfos.remove(dbId);
} finally {
writeUnlock();
}
}
public LoadJob getLastestFinishedLoadJob(long dbId) {
LoadJob job = null;
readLock();
try {
long maxTime = Long.MIN_VALUE;
List<LoadJob> jobs = dbToLoadJobs.get(dbId);
if (jobs != null) {
for (LoadJob loadJob : jobs) {
if (loadJob.getState() != JobState.QUORUM_FINISHED && loadJob.getState() != JobState.FINISHED) {
continue;
}
if (loadJob.getLoadFinishTimeMs() > maxTime) {
maxTime = loadJob.getLoadFinishTimeMs();
job = loadJob;
}
}
}
} finally {
readUnlock();
}
return job;
}
public DeleteInfo getLastestFinishedDeleteInfo(long dbId) {
DeleteInfo deleteInfo = null;
readLock();
try {
long maxTime = Long.MIN_VALUE;
List<LoadJob> deleteJobs = dbToDeleteJobs.get(dbId);
if (deleteJobs != null) {
for (LoadJob loadJob : deleteJobs) {
if (loadJob.getDeleteInfo().getCreateTimeMs() > maxTime
&& loadJob.getState() == JobState.FINISHED) {
maxTime = loadJob.getDeleteInfo().getCreateTimeMs();
deleteInfo = loadJob.getDeleteInfo();
}
}
}
} finally {
readUnlock();
}
return deleteInfo;
}
public Integer getLoadJobNumByTypeAndState(EtlJobType type, JobState state) {
int num = 0;
readLock();
try {
Map<Long, LoadJob> jobMap = null;
if (state == null || state == JobState.CANCELLED || state == JobState.FINISHED) {
jobMap = idToLoadJob;
} else {
switch (state) {
case PENDING:
jobMap = idToPendingLoadJob;
break;
case ETL:
jobMap = idToEtlLoadJob;
break;
case LOADING:
jobMap = idToLoadingLoadJob;
break;
case QUORUM_FINISHED:
jobMap = idToQuorumFinishedLoadJob;
break;
default:
break;
}
}
Preconditions.checkNotNull(jobMap);
for (LoadJob job : jobMap.values()) {
if (job.getEtlJobType() == type) {
if (state != null && job.getState() != state) {
continue;
}
++num;
}
}
} finally {
readUnlock();
}
return num;
}
} | class Load {
private static final Logger LOG = LogManager.getLogger(Load.class);
public static final String VERSION = "v1";
private static final Map<JobState, Set<JobState>> STATE_CHANGE_MAP = Maps.newHashMap();
public static DppConfig dppDefaultConfig = null;
public static Map<String, DppConfig> clusterToDppConfig = Maps.newHashMap();
private Map<Long, LoadJob> idToLoadJob;
private Map<Long, List<LoadJob>> dbToLoadJobs;
private Map<Long, Map<String, List<LoadJob>>> dbLabelToLoadJobs;
private Map<Long, LoadJob> idToPendingLoadJob;
private Map<Long, LoadJob> idToEtlLoadJob;
private Map<Long, LoadJob> idToLoadingLoadJob;
private Map<Long, LoadJob> idToQuorumFinishedLoadJob;
private Set<Long> loadingPartitionIds;
private Map<Long, Map<String, Long>> dbToMiniLabels;
private Map<Long, List<DeleteInfo>> dbToDeleteInfos;
private Map<Long, List<LoadJob>> dbToDeleteJobs;
private Set<Long> partitionUnderDelete;
private Map<Long, AsyncDeleteJob> idToQuorumFinishedDeleteJob;
private volatile LoadErrorHub.Param loadErrorHubParam = new LoadErrorHub.Param();
private ReentrantReadWriteLock lock;
static {
Set<JobState> pendingDestStates = Sets.newHashSet();
pendingDestStates.add(JobState.ETL);
pendingDestStates.add(JobState.CANCELLED);
STATE_CHANGE_MAP.put(JobState.PENDING, pendingDestStates);
Set<JobState> etlDestStates = Sets.newHashSet();
etlDestStates.add(JobState.LOADING);
etlDestStates.add(JobState.CANCELLED);
STATE_CHANGE_MAP.put(JobState.ETL, etlDestStates);
Set<JobState> loadingDestStates = Sets.newHashSet();
loadingDestStates.add(JobState.FINISHED);
loadingDestStates.add(JobState.QUORUM_FINISHED);
loadingDestStates.add(JobState.CANCELLED);
STATE_CHANGE_MAP.put(JobState.LOADING, loadingDestStates);
Set<JobState> quorumFinishedDestStates = Sets.newHashSet();
quorumFinishedDestStates.add(JobState.FINISHED);
STATE_CHANGE_MAP.put(JobState.QUORUM_FINISHED, quorumFinishedDestStates);
Gson gson = new Gson();
try {
Map<String, String> defaultConfig =
(HashMap<String, String>) gson.fromJson(Config.dpp_default_config_str, HashMap.class);
dppDefaultConfig = DppConfig.create(defaultConfig);
Map<String, Map<String, String>> clusterToConfig =
(HashMap<String, Map<String, String>>) gson.fromJson(Config.dpp_config_str, HashMap.class);
for (Entry<String, Map<String, String>> entry : clusterToConfig.entrySet()) {
String cluster = entry.getKey();
DppConfig dppConfig = dppDefaultConfig.getCopiedDppConfig();
dppConfig.update(DppConfig.create(entry.getValue()));
dppConfig.check();
clusterToDppConfig.put(cluster, dppConfig);
}
if (!clusterToDppConfig.containsKey(Config.dpp_default_cluster)) {
throw new LoadException("Default cluster not exist");
}
} catch (Throwable e) {
LOG.error("dpp default config ill-formed", e);
System.exit(-1);
}
}
public Load() {
idToLoadJob = Maps.newHashMap();
dbToLoadJobs = Maps.newHashMap();
dbLabelToLoadJobs = Maps.newHashMap();
idToPendingLoadJob = Maps.newLinkedHashMap();
idToEtlLoadJob = Maps.newLinkedHashMap();
idToLoadingLoadJob = Maps.newLinkedHashMap();
idToQuorumFinishedLoadJob = Maps.newLinkedHashMap();
loadingPartitionIds = Sets.newHashSet();
dbToMiniLabels = Maps.newHashMap();
dbToDeleteInfos = Maps.newHashMap();
dbToDeleteJobs = Maps.newHashMap();
partitionUnderDelete = Sets.newHashSet();
idToQuorumFinishedDeleteJob = Maps.newLinkedHashMap();
lock = new ReentrantReadWriteLock(true);
}
public void readLock() {
lock.readLock().lock();
}
public void readUnlock() {
lock.readLock().unlock();
}
private void writeLock() {
lock.writeLock().lock();
}
private void writeUnlock() {
lock.writeLock().unlock();
}
@Deprecated
public boolean addMiniLoadJob(TMiniLoadRequest request) throws DdlException {
String fullDbName = request.getDb();
String tableName = request.getTbl();
String label = request.getLabel();
long timestamp = 0;
if (request.isSetTimestamp()) {
timestamp = request.getTimestamp();
}
TNetworkAddress beAddr = request.getBackend();
String filePathsValue = request.getFiles().get(0);
Map<String, String> params = request.getProperties();
LabelName labelName = new LabelName(fullDbName, label);
if (Strings.isNullOrEmpty(filePathsValue)) {
throw new DdlException("File paths are not specified");
}
List<String> filePaths = Arrays.asList(filePathsValue.split(","));
List<String> partitionNames = null;
List<String> columnNames = null;
ColumnSeparator columnSeparator = null;
List<String> hllColumnPairList = null;
String lineDelimiter = null;
String formatType = null;
if (params != null) {
String specifiedPartitions = params.get(LoadStmt.KEY_IN_PARAM_PARTITIONS);
if (!Strings.isNullOrEmpty(specifiedPartitions)) {
partitionNames = Arrays.asList(specifiedPartitions.split(","));
}
String specifiedColumns = params.get(LoadStmt.KEY_IN_PARAM_COLUMNS);
if (!Strings.isNullOrEmpty(specifiedColumns)) {
columnNames = Arrays.asList(specifiedColumns.split(","));
}
final String hll = params.get(LoadStmt.KEY_IN_PARAM_HLL);
if (!Strings.isNullOrEmpty(hll)) {
hllColumnPairList = Arrays.asList(hll.split(":"));
}
String columnSeparatorStr = params.get(LoadStmt.KEY_IN_PARAM_COLUMN_SEPARATOR);
if (columnSeparatorStr != null) {
if (columnSeparatorStr.isEmpty()) {
columnSeparatorStr = "\t";
}
columnSeparator = new ColumnSeparator(columnSeparatorStr);
try {
columnSeparator.analyze();
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
}
lineDelimiter = params.get(LoadStmt.KEY_IN_PARAM_LINE_DELIMITER);
formatType = params.get(LoadStmt.KEY_IN_PARAM_FORMAT_TYPE);
}
DataDescription dataDescription = new DataDescription(
tableName,
partitionNames != null ? new PartitionNames(false, partitionNames) : null,
filePaths,
columnNames,
columnSeparator,
formatType,
false,
null
);
dataDescription.setLineDelimiter(lineDelimiter);
dataDescription.setBeAddr(beAddr);
if (hllColumnPairList != null) {
for (int i = 0; i < hllColumnPairList.size(); i++) {
final String pairStr = hllColumnPairList.get(i);
final List<String> pairList = Arrays.asList(pairStr.split(","));
if (pairList.size() != 2) {
throw new DdlException("hll param format error");
}
final String resultColumn = pairList.get(0);
final String hashColumn = pairList.get(1);
final Pair<String, List<String>> pair = new Pair<String, List<String>>(FunctionSet.HLL_HASH,
Arrays.asList(hashColumn));
dataDescription.addColumnMapping(resultColumn, pair);
}
}
List<DataDescription> dataDescriptions = Lists.newArrayList(dataDescription);
Map<String, String> properties = Maps.newHashMap();
if (params != null) {
String maxFilterRatio = params.get(LoadStmt.MAX_FILTER_RATIO_PROPERTY);
if (!Strings.isNullOrEmpty(maxFilterRatio)) {
properties.put(LoadStmt.MAX_FILTER_RATIO_PROPERTY, maxFilterRatio);
}
String timeout = params.get(LoadStmt.TIMEOUT_PROPERTY);
if (!Strings.isNullOrEmpty(timeout)) {
properties.put(LoadStmt.TIMEOUT_PROPERTY, timeout);
}
}
LoadStmt stmt = new LoadStmt(labelName, dataDescriptions, null, null, properties);
if (!registerMiniLabel(fullDbName, label, timestamp)) {
return false;
}
try {
addLoadJob(stmt, EtlJobType.MINI, timestamp);
return true;
} finally {
deregisterMiniLabel(fullDbName, label);
}
}
public void addLoadJob(LoadStmt stmt, EtlJobType etlJobType, long timestamp) throws DdlException {
String dbName = stmt.getLabel().getDbName();
Database db = Catalog.getCurrentCatalog().getDb(dbName);
if (db == null) {
throw new DdlException("Database[" + dbName + "] does not exist");
}
LoadJob job = createLoadJob(stmt, etlJobType, db, timestamp);
addLoadJob(job, db);
}
private void addLoadJob(LoadJob job, Database db) throws DdlException {
Catalog.getCurrentSystemInfo().checkClusterCapacity(db.getClusterName());
if (!job.isSyncDeleteJob()) {
db.checkDataSizeQuota();
}
db.readLock();
try {
for (Long tblId : job.getIdToTableLoadInfo().keySet()) {
Table tbl = db.getTable(tblId);
if (tbl != null && tbl.getType() == TableType.OLAP
&& ((OlapTable) tbl).getState() == OlapTableState.RESTORE) {
throw new DdlException("Table " + tbl.getName() + " is in restore process. "
+ "Can not load into it");
}
}
} finally {
db.readUnlock();
}
writeLock();
try {
unprotectAddLoadJob(job, false /* not replay */);
MetricRepo.COUNTER_LOAD_ADD.increase(1L);
Catalog.getCurrentCatalog().getEditLog().logLoadStart(job);
} finally {
writeUnlock();
}
LOG.info("add load job. job: {}", job);
}
private LoadJob createLoadJob(LoadStmt stmt, EtlJobType etlJobType,
Database db, long timestamp) throws DdlException {
String label = stmt.getLabel().getLabelName();
List<DataDescription> dataDescriptions = stmt.getDataDescriptions();
Map<String, String> properties = stmt.getProperties();
try {
FeNameFormat.checkLabel(label);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
if (dataDescriptions == null || dataDescriptions.isEmpty()) {
throw new DdlException("No data file in load statement.");
}
LoadJob job = new LoadJob(label);
job.setEtlJobType(etlJobType);
job.setDbId(db.getId());
job.setTimestamp(timestamp);
job.setBrokerDesc(stmt.getBrokerDesc());
if (ConnectContext.get() != null) {
job.setResourceInfo(ConnectContext.get().toResourceCtx());
job.setExecMemLimit(ConnectContext.get().getSessionVariable().getMaxExecMemByte());
}
if (properties != null) {
if (properties.containsKey(LoadStmt.TIMEOUT_PROPERTY)) {
try {
job.setTimeoutSecond(Integer.parseInt(properties.get(LoadStmt.TIMEOUT_PROPERTY)));
} catch (NumberFormatException e) {
throw new DdlException("Timeout is not INT", e);
}
}
if (properties.containsKey(LoadStmt.MAX_FILTER_RATIO_PROPERTY)) {
try {
job.setMaxFilterRatio(Double.parseDouble(properties.get(LoadStmt.MAX_FILTER_RATIO_PROPERTY)));
} catch (NumberFormatException e) {
throw new DdlException("Max filter ratio is not DOUBLE", e);
}
}
if (properties.containsKey(LoadStmt.LOAD_DELETE_FLAG_PROPERTY)) {
throw new DdlException("Do not support load_delete_flag");
}
if (properties.containsKey(LoadStmt.EXEC_MEM_LIMIT)) {
try {
job.setExecMemLimit(Long.parseLong(properties.get(LoadStmt.EXEC_MEM_LIMIT)));
} catch (NumberFormatException e) {
throw new DdlException("Execute memory limit is not Long", e);
}
}
}
Map<Long, TableLoadInfo> idToTableLoadInfo = Maps.newHashMap();
Map<Long, Map<Long, List<Source>>> tableToPartitionSources = Maps.newHashMap();
for (DataDescription dataDescription : dataDescriptions) {
checkAndCreateSource(db, dataDescription, tableToPartitionSources, etlJobType);
job.addTableName(dataDescription.getTableName());
}
for (Entry<Long, Map<Long, List<Source>>> tableEntry : tableToPartitionSources.entrySet()) {
long tableId = tableEntry.getKey();
Map<Long, List<Source>> partitionToSources = tableEntry.getValue();
Map<Long, PartitionLoadInfo> idToPartitionLoadInfo = Maps.newHashMap();
for (Entry<Long, List<Source>> partitionEntry : partitionToSources.entrySet()) {
PartitionLoadInfo info = new PartitionLoadInfo(partitionEntry.getValue());
idToPartitionLoadInfo.put(partitionEntry.getKey(), info);
}
idToTableLoadInfo.put(tableId, new TableLoadInfo(idToPartitionLoadInfo));
}
job.setIdToTableLoadInfo(idToTableLoadInfo);
if (etlJobType == EtlJobType.BROKER) {
BrokerFileGroupAggInfo sourceInfo = new BrokerFileGroupAggInfo();
for (DataDescription dataDescription : dataDescriptions) {
BrokerFileGroup fileGroup = new BrokerFileGroup(dataDescription);
fileGroup.parse(db, dataDescription);
sourceInfo.addFileGroup(fileGroup);
}
job.setPullLoadSourceInfo(sourceInfo);
LOG.info("source info is {}", sourceInfo);
}
if (etlJobType == EtlJobType.MINI) {
Map<Long, MiniEtlTaskInfo> idToEtlTask = Maps.newHashMap();
long etlTaskId = 0;
db.readLock();
try {
for (DataDescription dataDescription : dataDescriptions) {
String tableName = dataDescription.getTableName();
OlapTable table = (OlapTable) db.getTable(tableName);
if (table == null) {
throw new DdlException("Table[" + tableName + "] does not exist");
}
TNetworkAddress beAddress = dataDescription.getBeAddr();
Backend backend = Catalog.getCurrentSystemInfo().getBackendWithBePort(beAddress.getHostname(),
beAddress.getPort());
if (!Catalog.getCurrentSystemInfo().checkBackendAvailable(backend.getId())) {
throw new DdlException("Etl backend is null or not available");
}
MiniEtlTaskInfo taskInfo = new MiniEtlTaskInfo(etlTaskId++, backend.getId(), table.getId());
idToEtlTask.put(taskInfo.getId(), taskInfo);
}
} finally {
db.readUnlock();
}
job.setMiniEtlTasks(idToEtlTask);
job.setPrority(TPriority.HIGH);
if (job.getTimeoutSecond() == 0) {
job.setTimeoutSecond(Config.mini_load_default_timeout_second);
}
} else if (etlJobType == EtlJobType.HADOOP) {
DppConfig dppConfig = dppDefaultConfig.getCopiedDppConfig();
String cluster = stmt.getCluster();
if (cluster == null && properties != null) {
cluster = properties.get(LoadStmt.CLUSTER_PROPERTY);
}
Pair<String, DppConfig> clusterInfo = Catalog.getCurrentCatalog().getAuth().getLoadClusterInfo(
stmt.getUser(), cluster);
cluster = clusterInfo.first;
DppConfig clusterConfig = clusterInfo.second;
if (cluster == null || clusterConfig == null) {
if (cluster == null) {
cluster = Config.dpp_default_cluster;
}
clusterConfig = clusterToDppConfig.get(cluster);
if (clusterConfig == null) {
throw new DdlException("Load cluster[" + cluster + "] does not exist");
}
}
dppConfig.update(clusterConfig);
try {
dppConfig.updateHadoopConfigs(properties);
dppConfig.check();
job.setClusterInfo(cluster, dppConfig);
job.setPrority(dppConfig.getPriority());
} catch (LoadException e) {
throw new DdlException(e.getMessage());
}
if (job.getTimeoutSecond() == 0) {
job.setTimeoutSecond(Config.hadoop_load_default_timeout_second);
}
} else if (etlJobType == EtlJobType.BROKER) {
if (job.getTimeoutSecond() == 0) {
job.setTimeoutSecond(Config.broker_load_default_timeout_second);
}
} else if (etlJobType == EtlJobType.INSERT) {
job.setPrority(TPriority.HIGH);
if (job.getTimeoutSecond() == 0) {
job.setTimeoutSecond(Config.insert_load_default_timeout_second);
}
}
job.setId(Catalog.getCurrentCatalog().getNextId());
return job;
}
/*
* This is only used for hadoop load
*/
public static void checkAndCreateSource(Database db, DataDescription dataDescription,
Map<Long, Map<Long, List<Source>>> tableToPartitionSources, EtlJobType jobType) throws DdlException {
Source source = new Source(dataDescription.getFilePaths());
long tableId = -1;
Set<Long> sourcePartitionIds = Sets.newHashSet();
String tableName = dataDescription.getTableName();
Map<String, Pair<String, List<String>>> columnToFunction = null;
db.readLock();
try {
Table table = db.getTable(tableName);
if (table == null) {
throw new DdlException("Table [" + tableName + "] does not exist");
}
tableId = table.getId();
if (table.getType() != TableType.OLAP) {
throw new DdlException("Table [" + tableName + "] is not olap table");
}
if (((OlapTable) table).getPartitionInfo().isMultiColumnPartition() && jobType == EtlJobType.HADOOP) {
throw new DdlException("Load by hadoop cluster does not support table with multi partition columns."
+ " Table: " + table.getName() + ". Try using broker load. See 'help broker load;'");
}
if (dataDescription.getPartitionNames() != null &&
((OlapTable) table).getPartitionInfo().getType() == PartitionType.UNPARTITIONED) {
ErrorReport.reportDdlException(ErrorCode.ERR_PARTITION_CLAUSE_NO_ALLOWED);
}
if (((OlapTable) table).getState() == OlapTableState.RESTORE) {
throw new DdlException("Table [" + tableName + "] is under restore");
}
if (((OlapTable) table).getKeysType() != KeysType.AGG_KEYS && dataDescription.isNegative()) {
throw new DdlException("Load for AGG_KEYS table should not specify NEGATIVE");
}
List<Column> baseSchema = table.getBaseSchema(false);
dataDescription.fillColumnInfoIfNotSpecified(baseSchema);
List<String> columnNames = Lists.newArrayList();
List<String> assignColumnNames = Lists.newArrayList();
if (dataDescription.getFileFieldNames() != null) {
assignColumnNames.addAll(dataDescription.getFileFieldNames());
if (dataDescription.getColumnsFromPath() != null) {
assignColumnNames.addAll(dataDescription.getColumnsFromPath());
}
}
if (assignColumnNames.isEmpty()) {
for (Column column : baseSchema) {
columnNames.add(column.getName());
}
} else {
for (String assignCol : assignColumnNames) {
if (table.getColumn(assignCol) != null) {
columnNames.add(table.getColumn(assignCol).getName());
} else {
columnNames.add(assignCol);
}
}
}
source.setColumnNames(columnNames);
Map<String, Pair<String, List<String>>> columnToHadoopFunction = dataDescription.getColumnToHadoopFunction();
List<ImportColumnDesc> parsedColumnExprList = dataDescription.getParsedColumnExprList();
Map<String, Expr> parsedColumnExprMap = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
for (ImportColumnDesc importColumnDesc : parsedColumnExprList) {
parsedColumnExprMap.put(importColumnDesc.getColumnName(), importColumnDesc.getExpr());
}
for (Column column : baseSchema) {
String columnName = column.getName();
if (columnNames.contains(columnName)) {
continue;
}
if (parsedColumnExprMap.containsKey(columnName)) {
continue;
}
if (column.getDefaultValue() != null || column.isAllowNull()) {
continue;
}
throw new DdlException("Column has no default value. column: " + columnName);
}
if (dataDescription.isNegative()) {
for (Column column : baseSchema) {
if (!column.isKey() && column.getAggregationType() != AggregateType.SUM) {
throw new DdlException("Column is not SUM AggreateType. column:" + column.getName());
}
}
}
for (Column column : baseSchema) {
if (column.getDataType() == PrimitiveType.HLL) {
if (columnToHadoopFunction != null && !columnToHadoopFunction.containsKey(column.getName())) {
throw new DdlException("Hll column is not assigned. column:" + column.getName());
}
}
}
for (Column column : table.getFullSchema()) {
if (column.isNameWithPrefix(SchemaChangeHandler.SHADOW_NAME_PRFIX)) {
String originCol = column.getNameWithoutPrefix(SchemaChangeHandler.SHADOW_NAME_PRFIX);
if (parsedColumnExprMap.containsKey(originCol)) {
Expr mappingExpr = parsedColumnExprMap.get(originCol);
if (mappingExpr != null) {
/*
* eg:
* (A, C) SET (B = func(xx))
* ->
* (A, C) SET (B = func(xx), __doris_shadow_B = func(xxx))
*/
if (columnToHadoopFunction.containsKey(originCol)) {
columnToHadoopFunction.put(column.getName(), columnToHadoopFunction.get(originCol));
}
ImportColumnDesc importColumnDesc = new ImportColumnDesc(column.getName(), mappingExpr);
parsedColumnExprList.add(importColumnDesc);
} else {
/*
* eg:
* (A, B, C)
* ->
* (A, B, C) SET (__doris_shadow_B = substitute(B))
*/
columnToHadoopFunction.put(column.getName(), Pair.create("substitute", Lists.newArrayList(originCol)));
ImportColumnDesc importColumnDesc = new ImportColumnDesc(column.getName(), new SlotRef(null, originCol));
parsedColumnExprList.add(importColumnDesc);
}
} else {
/*
* There is a case that if user does not specify the related origin column, eg:
* COLUMNS (A, C), and B is not specified, but B is being modified so there is a shadow column '__doris_shadow_B'.
* We can not just add a mapping function "__doris_shadow_B = substitute(B)", because Doris can not find column B.
* In this case, __doris_shadow_B can use its default value, so no need to add it to column mapping
*/
}
}
}
LOG.debug("after add shadow column. parsedColumnExprList: {}, columnToHadoopFunction: {}",
parsedColumnExprList, columnToHadoopFunction);
Map<String, String> columnNameMap = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
for (String columnName : columnNames) {
columnNameMap.put(columnName, columnName);
}
if (columnToHadoopFunction != null) {
columnToFunction = Maps.newHashMap();
for (Entry<String, Pair<String, List<String>>> entry : columnToHadoopFunction.entrySet()) {
String mappingColumnName = entry.getKey();
Column mappingColumn = table.getColumn(mappingColumnName);
if (mappingColumn == null) {
throw new DdlException("Mapping column is not in table. column: " + mappingColumnName);
}
Pair<String, List<String>> function = entry.getValue();
try {
DataDescription.validateMappingFunction(function.first, function.second, columnNameMap,
mappingColumn, dataDescription.isHadoopLoad());
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
columnToFunction.put(mappingColumn.getName(), function);
}
}
OlapTable olapTable = (OlapTable) table;
PartitionNames partitionNames = dataDescription.getPartitionNames();
if (partitionNames == null) {
for (Partition partition : olapTable.getPartitions()) {
sourcePartitionIds.add(partition.getId());
}
} else {
for (String partitionName : partitionNames.getPartitionNames()) {
Partition partition = olapTable.getPartition(partitionName, partitionNames.isTemp());
if (partition == null) {
throw new DdlException("Partition [" + partitionName + "] does not exist");
}
sourcePartitionIds.add(partition.getId());
}
}
} finally {
db.readUnlock();
}
String columnSeparator = dataDescription.getColumnSeparator();
if (!Strings.isNullOrEmpty(columnSeparator)) {
source.setColumnSeparator(columnSeparator);
}
String lineDelimiter = dataDescription.getLineDelimiter();
if (!Strings.isNullOrEmpty(lineDelimiter)) {
source.setLineDelimiter(lineDelimiter);
}
source.setNegative(dataDescription.isNegative());
if (columnToFunction != null) {
source.setColumnToFunction(columnToFunction);
}
Map<Long, List<Source>> partitionToSources = null;
if (tableToPartitionSources.containsKey(tableId)) {
partitionToSources = tableToPartitionSources.get(tableId);
} else {
partitionToSources = Maps.newHashMap();
tableToPartitionSources.put(tableId, partitionToSources);
}
for (long partitionId : sourcePartitionIds) {
List<Source> sources = null;
if (partitionToSources.containsKey(partitionId)) {
sources = partitionToSources.get(partitionId);
} else {
sources = new ArrayList<Source>();
partitionToSources.put(partitionId, sources);
}
sources.add(source);
}
}
/**
* When doing schema change, there may have some 'shadow' columns, with prefix '__doris_shadow_' in
* their names. These columns are invisible to user, but we need to generate data for these columns.
* So we add column mappings for these column.
* eg1:
* base schema is (A, B, C), and B is under schema change, so there will be a shadow column: '__doris_shadow_B'
* So the final column mapping should looks like: (A, B, C, __doris_shadow_B = substitute(B));
*/
public static List<ImportColumnDesc> getSchemaChangeShadowColumnDesc(Table tbl, Map<String, Expr> columnExprMap) {
List<ImportColumnDesc> shadowColumnDescs = Lists.newArrayList();
for (Column column : tbl.getFullSchema()) {
if (!column.isNameWithPrefix(SchemaChangeHandler.SHADOW_NAME_PRFIX)) {
continue;
}
String originCol = column.getNameWithoutPrefix(SchemaChangeHandler.SHADOW_NAME_PRFIX);
if (columnExprMap.containsKey(originCol)) {
Expr mappingExpr = columnExprMap.get(originCol);
if (mappingExpr != null) {
/*
* eg:
* (A, C) SET (B = func(xx))
* ->
* (A, C) SET (B = func(xx), __doris_shadow_B = func(xx))
*/
ImportColumnDesc importColumnDesc = new ImportColumnDesc(column.getName(), mappingExpr);
shadowColumnDescs.add(importColumnDesc);
} else {
/*
* eg:
* (A, B, C)
* ->
* (A, B, C) SET (__doris_shadow_B = B)
*/
ImportColumnDesc importColumnDesc = new ImportColumnDesc(column.getName(),
new SlotRef(null, originCol));
shadowColumnDescs.add(importColumnDesc);
}
} else {
/*
* There is a case that if user does not specify the related origin column, eg:
* COLUMNS (A, C), and B is not specified, but B is being modified so there is a shadow column '__doris_shadow_B'.
* We can not just add a mapping function "__doris_shadow_B = substitute(B)", because Doris can not find column B.
* In this case, __doris_shadow_B can use its default value, so no need to add it to column mapping
*/
}
}
return shadowColumnDescs;
}
/*
* used for spark load job
* not init slot desc and analyze exprs
*/
public static void initColumns(Table tbl, List<ImportColumnDesc> columnExprs,
Map<String, Pair<String, List<String>>> columnToHadoopFunction) throws UserException {
initColumns(tbl, columnExprs, columnToHadoopFunction, null, null, null, null, null, false);
}
/*
* This function should be used for broker load v2 and stream load.
* And it must be called in same db lock when planing.
*/
public static void initColumns(Table tbl, List<ImportColumnDesc> columnExprs,
Map<String, Pair<String, List<String>>> columnToHadoopFunction,
Map<String, Expr> exprsByName, Analyzer analyzer, TupleDescriptor srcTupleDesc,
Map<String, SlotDescriptor> slotDescByName, TBrokerScanRangeParams params) throws UserException {
initColumns(tbl, columnExprs, columnToHadoopFunction, exprsByName, analyzer,
srcTupleDesc, slotDescByName, params, true);
}
/*
* This function will do followings:
* 1. fill the column exprs if user does not specify any column or column mapping.
* 2. For not specified columns, check if they have default value.
* 3. Add any shadow columns if have.
* 4. validate hadoop functions
* 5. init slot descs and expr map for load plan
*/
public static void initColumns(Table tbl, List<ImportColumnDesc> columnExprs,
Map<String, Pair<String, List<String>>> columnToHadoopFunction,
Map<String, Expr> exprsByName, Analyzer analyzer, TupleDescriptor srcTupleDesc,
Map<String, SlotDescriptor> slotDescByName, TBrokerScanRangeParams params,
boolean needInitSlotAndAnalyzeExprs) throws UserException {
for (ImportColumnDesc importColumnDesc : columnExprs) {
if (importColumnDesc.isColumn()) {
continue;
}
String mappingColumnName = importColumnDesc.getColumnName();
if (tbl.getColumn(mappingColumnName) == null) {
throw new DdlException("Mapping column is not in table. column: " + mappingColumnName);
}
}
List<ImportColumnDesc> copiedColumnExprs = Lists.newArrayList(columnExprs);
boolean specifyFileFieldNames = copiedColumnExprs.stream().anyMatch(p -> p.isColumn());
if (!specifyFileFieldNames) {
List<Column> columns = tbl.getBaseSchema(false);
for (Column column : columns) {
ImportColumnDesc columnDesc = new ImportColumnDesc(column.getName());
LOG.debug("add base column {} to stream load task", column.getName());
copiedColumnExprs.add(columnDesc);
}
}
Map<String, Expr> columnExprMap = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
for (ImportColumnDesc importColumnDesc : copiedColumnExprs) {
columnExprMap.put(importColumnDesc.getColumnName(), importColumnDesc.getExpr());
}
for (Column column : tbl.getBaseSchema()) {
String columnName = column.getName();
if (columnExprMap.containsKey(columnName)) {
continue;
}
if (column.getDefaultValue() != null || column.isAllowNull()) {
continue;
}
throw new DdlException("Column has no default value. column: " + columnName);
}
copiedColumnExprs.addAll(getSchemaChangeShadowColumnDesc(tbl, columnExprMap));
if (columnToHadoopFunction != null) {
Map<String, String> columnNameMap = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
for (ImportColumnDesc importColumnDesc : copiedColumnExprs) {
if (importColumnDesc.isColumn()) {
columnNameMap.put(importColumnDesc.getColumnName(), importColumnDesc.getColumnName());
}
}
for (Entry<String, Pair<String, List<String>>> entry : columnToHadoopFunction.entrySet()) {
String mappingColumnName = entry.getKey();
Column mappingColumn = tbl.getColumn(mappingColumnName);
Pair<String, List<String>> function = entry.getValue();
try {
DataDescription.validateMappingFunction(function.first, function.second, columnNameMap,
mappingColumn, false);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
}
}
if (!needInitSlotAndAnalyzeExprs) {
return;
}
for (ImportColumnDesc importColumnDesc : copiedColumnExprs) {
String columnName = importColumnDesc.getColumnName();
String realColName = tbl.getColumn(columnName) == null ? columnName
: tbl.getColumn(columnName).getName();
if (importColumnDesc.getExpr() != null) {
Expr expr = transformHadoopFunctionExpr(tbl, realColName, importColumnDesc.getExpr());
exprsByName.put(realColName, expr);
} else {
SlotDescriptor slotDesc = analyzer.getDescTbl().addSlotDescriptor(srcTupleDesc);
slotDesc.setType(ScalarType.createType(PrimitiveType.VARCHAR));
slotDesc.setIsMaterialized(true);
slotDesc.setIsNullable(true);
slotDesc.setColumn(new Column(realColName, PrimitiveType.VARCHAR));
params.addToSrcSlotIds(slotDesc.getId().asInt());
slotDescByName.put(realColName, slotDesc);
}
}
/*
* The extension column of the materialized view is added to the expression evaluation of load
* To avoid nested expressions. eg : column(a, tmp_c, c = expr(tmp_c)) ,
* __doris_materialized_view_bitmap_union_c need be analyzed after exprsByName
* So the columns of the materialized view are stored separately here
*/
Map<String, Expr> mvDefineExpr = Maps.newHashMap();
for (Column column : tbl.getFullSchema()) {
if (column.getDefineExpr() != null) {
mvDefineExpr.put(column.getName(), column.getDefineExpr());
}
}
LOG.debug("slotDescByName: {}, exprsByName: {}, mvDefineExpr: {}", slotDescByName, exprsByName, mvDefineExpr);
for (Map.Entry<String, Expr> entry : exprsByName.entrySet()) {
ExprSubstitutionMap smap = new ExprSubstitutionMap();
List<SlotRef> slots = Lists.newArrayList();
entry.getValue().collect(SlotRef.class, slots);
for (SlotRef slot : slots) {
SlotDescriptor slotDesc = slotDescByName.get(slot.getColumnName());
if (slotDesc == null) {
throw new UserException("unknown reference column, column=" + entry.getKey()
+ ", reference=" + slot.getColumnName());
}
smap.getLhs().add(slot);
smap.getRhs().add(new SlotRef(slotDesc));
}
Expr expr = entry.getValue().clone(smap);
expr.analyze(analyzer);
List<FunctionCallExpr> funcs = Lists.newArrayList();
expr.collect(FunctionCallExpr.class, funcs);
for (FunctionCallExpr fn : funcs) {
if (fn.isAggregateFunction()) {
throw new AnalysisException("Don't support aggregation function in load expression");
}
}
exprsByName.put(entry.getKey(), expr);
}
for (Map.Entry<String, Expr> entry : mvDefineExpr.entrySet()) {
ExprSubstitutionMap smap = new ExprSubstitutionMap();
List<SlotRef> slots = Lists.newArrayList();
entry.getValue().collect(SlotRef.class, slots);
for (SlotRef slot : slots) {
if (slotDescByName.get(slot.getColumnName()) != null) {
smap.getLhs().add(slot);
smap.getRhs().add(new CastExpr(tbl.getColumn(slot.getColumnName()).getType(),
new SlotRef(slotDescByName.get(slot.getColumnName()))));
} else if (exprsByName.get(slot.getColumnName()) != null) {
smap.getLhs().add(slot);
smap.getRhs().add(new CastExpr(tbl.getColumn(slot.getColumnName()).getType(),
exprsByName.get(slot.getColumnName())));
} else {
throw new UserException("unknown reference column, column=" + entry.getKey()
+ ", reference=" + slot.getColumnName());
}
}
Expr expr = entry.getValue().clone(smap);
expr.analyze(analyzer);
exprsByName.put(entry.getKey(), expr);
}
LOG.debug("after init column, exprMap: {}", exprsByName);
}
/**
* This method is used to transform hadoop function.
* The hadoop function includes: replace_value, strftime, time_format, alignment_timestamp, default_value, now.
* It rewrites those function with real function name and param.
* For the other function, the expr only go through this function and the origin expr is returned.
*
* @param columnName
* @param originExpr
* @return
* @throws UserException
*/
private static Expr transformHadoopFunctionExpr(Table tbl, String columnName, Expr originExpr)
throws UserException {
Column column = tbl.getColumn(columnName);
if (column == null) {
return originExpr;
}
if (originExpr instanceof FunctionCallExpr) {
FunctionCallExpr funcExpr = (FunctionCallExpr) originExpr;
String funcName = funcExpr.getFnName().getFunction();
if (funcName.equalsIgnoreCase("replace_value")) {
List<Expr> exprs = Lists.newArrayList();
SlotRef slotRef = new SlotRef(null, columnName);
/*
* We will convert this based on different cases:
* case 1: k1 = replace_value(null, anyval);
* to: k1 = if (k1 is not null, k1, anyval);
*
* case 2: k1 = replace_value(anyval1, anyval2);
* to: k1 = if (k1 is not null, if(k1 != anyval1, k1, anyval2), null);
*/
if (funcExpr.getChild(0) instanceof NullLiteral) {
exprs.add(new IsNullPredicate(slotRef, true));
exprs.add(slotRef);
if (funcExpr.hasChild(1)) {
exprs.add(funcExpr.getChild(1));
} else {
if (column.getDefaultValue() != null) {
exprs.add(new StringLiteral(column.getDefaultValue()));
} else {
if (column.isAllowNull()) {
exprs.add(NullLiteral.create(Type.VARCHAR));
} else {
throw new UserException("Column(" + columnName + ") has no default value.");
}
}
}
} else {
exprs.add(new IsNullPredicate(slotRef, true));
List<Expr> innerIfExprs = Lists.newArrayList();
innerIfExprs.add(new BinaryPredicate(BinaryPredicate.Operator.NE, slotRef, funcExpr.getChild(0)));
innerIfExprs.add(slotRef);
if (funcExpr.hasChild(1)) {
innerIfExprs.add(funcExpr.getChild(1));
} else {
if (column.getDefaultValue() != null) {
innerIfExprs.add(new StringLiteral(column.getDefaultValue()));
} else {
if (column.isAllowNull()) {
innerIfExprs.add(NullLiteral.create(Type.VARCHAR));
} else {
throw new UserException("Column(" + columnName + ") has no default value.");
}
}
}
FunctionCallExpr innerIfFn = new FunctionCallExpr("if", innerIfExprs);
exprs.add(innerIfFn);
exprs.add(NullLiteral.create(Type.VARCHAR));
}
LOG.debug("replace_value expr: {}", exprs);
FunctionCallExpr newFn = new FunctionCallExpr("if", exprs);
return newFn;
} else if (funcName.equalsIgnoreCase("strftime")) {
FunctionName fromUnixName = new FunctionName("FROM_UNIXTIME");
List<Expr> fromUnixArgs = Lists.newArrayList(funcExpr.getChild(1));
FunctionCallExpr fromUnixFunc = new FunctionCallExpr(
fromUnixName, new FunctionParams(false, fromUnixArgs));
return fromUnixFunc;
} else if (funcName.equalsIgnoreCase("time_format")) {
FunctionName strToDateName = new FunctionName("STR_TO_DATE");
List<Expr> strToDateExprs = Lists.newArrayList(funcExpr.getChild(2), funcExpr.getChild(1));
FunctionCallExpr strToDateFuncExpr = new FunctionCallExpr(
strToDateName, new FunctionParams(false, strToDateExprs));
FunctionName dateFormatName = new FunctionName("DATE_FORMAT");
List<Expr> dateFormatArgs = Lists.newArrayList(strToDateFuncExpr, funcExpr.getChild(0));
FunctionCallExpr dateFormatFunc = new FunctionCallExpr(
dateFormatName, new FunctionParams(false, dateFormatArgs));
return dateFormatFunc;
} else if (funcName.equalsIgnoreCase("alignment_timestamp")) {
/*
* change to:
* UNIX_TIMESTAMP(DATE_FORMAT(FROM_UNIXTIME(ts), "%Y-01-01 00:00:00"));
*
*/
FunctionName fromUnixName = new FunctionName("FROM_UNIXTIME");
List<Expr> fromUnixArgs = Lists.newArrayList(funcExpr.getChild(1));
FunctionCallExpr fromUnixFunc = new FunctionCallExpr(
fromUnixName, new FunctionParams(false, fromUnixArgs));
StringLiteral precision = (StringLiteral) funcExpr.getChild(0);
StringLiteral format;
if (precision.getStringValue().equalsIgnoreCase("year")) {
format = new StringLiteral("%Y-01-01 00:00:00");
} else if (precision.getStringValue().equalsIgnoreCase("month")) {
format = new StringLiteral("%Y-%m-01 00:00:00");
} else if (precision.getStringValue().equalsIgnoreCase("day")) {
format = new StringLiteral("%Y-%m-%d 00:00:00");
} else if (precision.getStringValue().equalsIgnoreCase("hour")) {
format = new StringLiteral("%Y-%m-%d %H:00:00");
} else {
throw new UserException("Unknown precision(" + precision.getStringValue() + ")");
}
FunctionName dateFormatName = new FunctionName("DATE_FORMAT");
List<Expr> dateFormatArgs = Lists.newArrayList(fromUnixFunc, format);
FunctionCallExpr dateFormatFunc = new FunctionCallExpr(
dateFormatName, new FunctionParams(false, dateFormatArgs));
FunctionName unixTimeName = new FunctionName("UNIX_TIMESTAMP");
List<Expr> unixTimeArgs = Lists.newArrayList();
unixTimeArgs.add(dateFormatFunc);
FunctionCallExpr unixTimeFunc = new FunctionCallExpr(
unixTimeName, new FunctionParams(false, unixTimeArgs));
return unixTimeFunc;
} else if (funcName.equalsIgnoreCase("default_value")) {
return funcExpr.getChild(0);
} else if (funcName.equalsIgnoreCase("now")) {
FunctionName nowFunctionName = new FunctionName("NOW");
FunctionCallExpr newFunc = new FunctionCallExpr(nowFunctionName, new FunctionParams(null));
return newFunc;
} else if (funcName.equalsIgnoreCase("substitute")) {
return funcExpr.getChild(0);
}
}
return originExpr;
}
public void unprotectAddLoadJob(LoadJob job, boolean isReplay) throws DdlException {
long jobId = job.getId();
long dbId = job.getDbId();
String label = job.getLabel();
if (!isReplay && getAllUnfinishedLoadJob() > Config.max_unfinished_load_job) {
throw new DdlException(
"Number of unfinished load jobs exceed the max number: " + Config.max_unfinished_load_job);
}
if (!job.isSyncDeleteJob()) {
boolean checkMini = true;
if (job.getEtlJobType() == EtlJobType.MINI) {
checkMini = false;
}
unprotectIsLabelUsed(dbId, label, -1, checkMini);
Map<String, List<LoadJob>> labelToLoadJobs = null;
if (dbLabelToLoadJobs.containsKey(dbId)) {
labelToLoadJobs = dbLabelToLoadJobs.get(dbId);
} else {
labelToLoadJobs = Maps.newHashMap();
dbLabelToLoadJobs.put(dbId, labelToLoadJobs);
}
List<LoadJob> labelLoadJobs = null;
if (labelToLoadJobs.containsKey(label)) {
labelLoadJobs = labelToLoadJobs.get(label);
} else {
labelLoadJobs = Lists.newArrayList();
labelToLoadJobs.put(label, labelLoadJobs);
}
List<LoadJob> dbLoadJobs = null;
if (dbToLoadJobs.containsKey(dbId)) {
dbLoadJobs = dbToLoadJobs.get(dbId);
} else {
dbLoadJobs = Lists.newArrayList();
dbToLoadJobs.put(dbId, dbLoadJobs);
}
idToLoadJob.put(jobId, job);
dbLoadJobs.add(job);
labelLoadJobs.add(job);
} else {
List<LoadJob> dbDeleteJobs = null;
if (dbToDeleteJobs.containsKey(dbId)) {
dbDeleteJobs = dbToDeleteJobs.get(dbId);
} else {
dbDeleteJobs = Lists.newArrayList();
dbToDeleteJobs.put(dbId, dbDeleteJobs);
}
idToLoadJob.put(jobId, job);
dbDeleteJobs.add(job);
}
switch (job.getState()) {
case PENDING:
idToPendingLoadJob.put(jobId, job);
break;
case ETL:
idToEtlLoadJob.put(jobId, job);
break;
case LOADING:
idToLoadingLoadJob.put(jobId, job);
recoverLoadingPartitions(job);
break;
case QUORUM_FINISHED:
idToQuorumFinishedLoadJob.put(jobId, job);
break;
case FINISHED:
break;
case CANCELLED:
break;
default:
Preconditions.checkNotNull(null, "Should not be here");
}
}
private long getAllUnfinishedLoadJob() {
return idToPendingLoadJob.size() + idToEtlLoadJob.size() + idToLoadingLoadJob.size()
+ idToQuorumFinishedLoadJob.size();
}
public void replayAddLoadJob(LoadJob job) throws DdlException {
writeLock();
try {
unprotectAddLoadJob(job, true /* replay */);
} finally {
writeUnlock();
}
}
public void unprotectEtlLoadJob(LoadJob job) {
long jobId = job.getId();
idToPendingLoadJob.remove(jobId);
idToEtlLoadJob.put(jobId, job);
replaceLoadJob(job);
}
public void replayEtlLoadJob(LoadJob job) throws DdlException {
writeLock();
try {
unprotectEtlLoadJob(job);
} finally {
writeUnlock();
}
}
public void unprotectLoadingLoadJob(LoadJob job) {
long jobId = job.getId();
idToEtlLoadJob.remove(jobId);
idToLoadingLoadJob.put(jobId, job);
recoverLoadingPartitions(job);
replaceLoadJob(job);
}
public void replayLoadingLoadJob(LoadJob job) throws DdlException {
writeLock();
try {
unprotectLoadingLoadJob(job);
} finally {
writeUnlock();
}
}
public boolean registerMiniLabel(String fullDbName, String label, long timestamp) throws DdlException {
Database db = Catalog.getCurrentCatalog().getDb(fullDbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + fullDbName);
}
long dbId = db.getId();
writeLock();
try {
if (unprotectIsLabelUsed(dbId, label, timestamp, true)) {
return false;
}
Map<String, Long> miniLabels = null;
if (dbToMiniLabels.containsKey(dbId)) {
miniLabels = dbToMiniLabels.get(dbId);
} else {
miniLabels = Maps.newHashMap();
dbToMiniLabels.put(dbId, miniLabels);
}
miniLabels.put(label, timestamp);
return true;
} finally {
writeUnlock();
}
}
public void deregisterMiniLabel(String fullDbName, String label) throws DdlException {
Database db = Catalog.getCurrentCatalog().getDb(fullDbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + fullDbName);
}
long dbId = db.getId();
writeLock();
try {
if (!dbToMiniLabels.containsKey(dbId)) {
return;
}
Map<String, Long> miniLabels = dbToMiniLabels.get(dbId);
miniLabels.remove(label);
if (miniLabels.isEmpty()) {
dbToMiniLabels.remove(dbId);
}
} finally {
writeUnlock();
}
}
public boolean isLabelUsed(long dbId, String label) throws DdlException {
readLock();
try {
return unprotectIsLabelUsed(dbId, label, -1, true);
} finally {
readUnlock();
}
}
/*
* 1. if label is already used, and this is not a retry request,
* throw exception ("Label already used")
* 2. if label is already used, but this is a retry request,
* return true
* 3. if label is not used, return false
* 4. throw exception if encounter error.
*/
private boolean unprotectIsLabelUsed(long dbId, String label, long timestamp, boolean checkMini)
throws DdlException {
if (dbLabelToLoadJobs.containsKey(dbId)) {
Map<String, List<LoadJob>> labelToLoadJobs = dbLabelToLoadJobs.get(dbId);
if (labelToLoadJobs.containsKey(label)) {
List<LoadJob> labelLoadJobs = labelToLoadJobs.get(label);
for (LoadJob oldJob : labelLoadJobs) {
JobState oldJobState = oldJob.getState();
if (oldJobState != JobState.CANCELLED) {
if (timestamp == -1) {
throw new LabelAlreadyUsedException(label);
} else {
if (timestamp == oldJob.getTimestamp()) {
LOG.info("get a retry request with label: {}, timestamp: {}. return ok",
label, timestamp);
return true;
} else {
throw new LabelAlreadyUsedException(label);
}
}
}
}
}
}
if (checkMini) {
if (dbToMiniLabels.containsKey(dbId)) {
Map<String, Long> uncommittedLabels = dbToMiniLabels.get(dbId);
if (uncommittedLabels.containsKey(label)) {
if (timestamp == -1) {
throw new LabelAlreadyUsedException(label);
} else {
if (timestamp == uncommittedLabels.get(label)) {
LOG.info("get a retry mini load request with label: {}, timestamp: {}. return ok",
label, timestamp);
return true;
} else {
throw new LabelAlreadyUsedException(label);
}
}
}
}
}
return false;
}
public boolean isLabelExist(String dbName, String labelValue, boolean isAccurateMatch) throws DdlException {
Database db = Catalog.getCurrentCatalog().getDb(dbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + dbName);
}
readLock();
try {
Map<String, List<LoadJob>> labelToLoadJobs = dbLabelToLoadJobs.get(db.getId());
if (labelToLoadJobs == null) {
return false;
}
List<LoadJob> loadJobs = Lists.newArrayList();
if (isAccurateMatch) {
if (labelToLoadJobs.containsKey(labelValue)) {
loadJobs.addAll(labelToLoadJobs.get(labelValue));
}
} else {
for (Map.Entry<String, List<LoadJob>> entry : labelToLoadJobs.entrySet()) {
if (entry.getKey().contains(labelValue)) {
loadJobs.addAll(entry.getValue());
}
}
}
if (loadJobs.isEmpty()) {
return false;
}
if (loadJobs.stream().filter(entity -> entity.getState() != JobState.CANCELLED).count() == 0) {
return false;
}
return true;
} finally {
readUnlock();
}
}
public boolean cancelLoadJob(CancelLoadStmt stmt) throws DdlException {
String dbName = stmt.getDbName();
String label = stmt.getLabel();
Database db = Catalog.getCurrentCatalog().getDb(dbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + dbName);
}
LoadJob job = null;
readLock();
try {
Map<String, List<LoadJob>> labelToLoadJobs = dbLabelToLoadJobs.get(db.getId());
if (labelToLoadJobs == null) {
throw new DdlException("Load job does not exist");
}
List<LoadJob> loadJobs = labelToLoadJobs.get(label);
if (loadJobs == null) {
throw new DdlException("Load job does not exist");
}
job = loadJobs.get(loadJobs.size() - 1);
JobState state = job.getState();
if (state == JobState.CANCELLED) {
throw new DdlException("Load job has been cancelled");
} else if (state == JobState.QUORUM_FINISHED || state == JobState.FINISHED) {
throw new DdlException("Load job has been finished");
}
} finally {
readUnlock();
}
Set<String> tableNames = job.getTableNames();
if (tableNames.isEmpty()) {
if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), dbName,
PrivPredicate.LOAD)) {
ErrorReport.reportDdlException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "CANCEL LOAD");
}
} else {
for (String tblName : tableNames) {
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), dbName, tblName,
PrivPredicate.LOAD)) {
ErrorReport.reportDdlException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "CANCEL LOAD",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(), tblName);
}
}
}
List<String> failedMsg = Lists.newArrayList();
if (!cancelLoadJob(job, CancelType.USER_CANCEL, "user cancel", failedMsg)) {
throw new DdlException("Cancel load job fail: " + (failedMsg.isEmpty() ? "Unknown reason" : failedMsg.get(0)));
}
return true;
}
public boolean cancelLoadJob(LoadJob job, CancelType cancelType, String msg) {
return cancelLoadJob(job, cancelType, msg, null);
}
public boolean cancelLoadJob(LoadJob job, CancelType cancelType, String msg, List<String> failedMsg) {
LOG.info("try to cancel load job: {}", job);
JobState srcState = job.getState();
if (!updateLoadJobState(job, JobState.CANCELLED, cancelType, msg, failedMsg)) {
LOG.warn("cancel load job failed. job: {}", job);
return false;
}
if (job.getHadoopDppConfig() != null) {
clearJob(job, srcState);
}
if (job.getBrokerDesc() != null) {
if (srcState == JobState.ETL) {
Catalog.getCurrentCatalog().getPullLoadJobMgr().cancelJob(job.getId());
}
}
LOG.info("cancel load job success. job: {}", job);
return true;
}
public void unprotectCancelLoadJob(LoadJob job) {
long jobId = job.getId();
LoadJob oldJob = idToLoadJob.get(jobId);
if (oldJob == null) {
LOG.warn("cancel job does not exist. id: {}", jobId);
return;
}
switch (oldJob.getState()) {
case PENDING:
idToPendingLoadJob.remove(jobId);
break;
case ETL:
idToEtlLoadJob.remove(jobId);
break;
case LOADING:
idToLoadingLoadJob.remove(jobId);
removeLoadingPartitions(oldJob);
break;
default:
LOG.warn("cancel job has wrong src state: {}", oldJob.getState().name());
return;
}
replaceLoadJob(job);
}
public void replayCancelLoadJob(LoadJob job) {
writeLock();
try {
unprotectCancelLoadJob(job);
} finally {
writeUnlock();
}
}
public void removeDeleteJobAndSetState(AsyncDeleteJob job) {
job.clearTasks();
writeLock();
try {
idToQuorumFinishedDeleteJob.remove(job.getJobId());
List<DeleteInfo> deleteInfos = dbToDeleteInfos.get(job.getDbId());
Preconditions.checkNotNull(deleteInfos);
for (DeleteInfo deleteInfo : deleteInfos) {
if (deleteInfo.getJobId() == job.getJobId()) {
deleteInfo.getAsyncDeleteJob().setState(DeleteState.FINISHED);
LOG.info("replay set async delete job to finished: {}", job.getJobId());
}
}
} finally {
writeUnlock();
}
}
public List<AsyncDeleteJob> getQuorumFinishedDeleteJobs() {
List<AsyncDeleteJob> jobs = Lists.newArrayList();
Collection<AsyncDeleteJob> stateJobs = null;
readLock();
try {
stateJobs = idToQuorumFinishedDeleteJob.values();
if (stateJobs != null) {
jobs.addAll(stateJobs);
}
} finally {
readUnlock();
}
return jobs;
}
public int getLoadJobNumber() {
readLock();
try {
if (idToLoadJob == null) {
return 0;
}
int loadJobNum = 0;
for (LoadJob loadJob : idToLoadJob.values()) {
if (!loadJob.isSyncDeleteJob()) {
++loadJobNum;
}
}
return loadJobNum;
} finally {
readUnlock();
}
}
public Map<Long, LoadJob> getIdToLoadJob() {
return idToLoadJob;
}
public Map<Long, List<LoadJob>> getDbToLoadJobs() {
return dbToLoadJobs;
}
public Map<Long, List<LoadJob>> getDbToDeleteJobs() {
return dbToDeleteJobs;
}
public Map<Long, List<DeleteInfo>> getDbToDeleteInfos() {
return dbToDeleteInfos;
}
public Set<Long> getTxnIdsByDb(Long dbId) {
Set<Long> txnIds = Sets.newHashSet();
readLock();
try {
List<LoadJob> jobs = dbToLoadJobs.get(dbId);
if (jobs != null) {
for (LoadJob loadJob : jobs) {
txnIds.add(loadJob.getTransactionId());
}
}
} finally {
readUnlock();
}
return txnIds;
}
public List<LoadJob> getDbLoadJobs(long dbId) {
readLock();
try {
return dbToLoadJobs.get(dbId);
} finally {
readUnlock();
}
}
public List<LoadJob> getLoadJobs(JobState jobState) {
List<LoadJob> jobs = new ArrayList<LoadJob>();
Collection<LoadJob> stateJobs = null;
readLock();
try {
switch (jobState) {
case PENDING:
stateJobs = idToPendingLoadJob.values();
break;
case ETL:
stateJobs = idToEtlLoadJob.values();
break;
case LOADING:
stateJobs = idToLoadingLoadJob.values();
break;
case QUORUM_FINISHED:
stateJobs = idToQuorumFinishedLoadJob.values();
break;
default:
break;
}
if (stateJobs != null) {
jobs.addAll(stateJobs);
}
} finally {
readUnlock();
}
return jobs;
}
public long getLoadJobNum(JobState jobState, long dbId) {
readLock();
try {
List<LoadJob> loadJobs = this.dbToLoadJobs.get(dbId);
if (loadJobs == null) {
return 0;
}
int jobNum = 0;
for (LoadJob job : loadJobs) {
if (job.getState() == jobState) {
++jobNum;
}
}
return jobNum;
} finally {
readUnlock();
}
}
public LoadJob getLoadJob(long jobId) {
readLock();
try {
return idToLoadJob.get(jobId);
} finally {
readUnlock();
}
}
public AsyncDeleteJob getAsyncDeleteJob(long jobId) {
readLock();
try {
return idToQuorumFinishedDeleteJob.get(jobId);
} finally {
readUnlock();
}
}
public List<AsyncDeleteJob> getCopiedAsyncDeleteJobs() {
readLock();
try {
return Lists.newArrayList(idToQuorumFinishedDeleteJob.values());
} finally {
readUnlock();
}
}
public LinkedList<List<Comparable>> getLoadJobInfosByDb(long dbId, String dbName, String labelValue,
boolean accurateMatch, Set<JobState> states) {
LinkedList<List<Comparable>> loadJobInfos = new LinkedList<List<Comparable>>();
readLock();
try {
List<LoadJob> loadJobs = this.dbToLoadJobs.get(dbId);
if (loadJobs == null) {
return loadJobInfos;
}
long start = System.currentTimeMillis();
LOG.debug("begin to get load job info, size: {}", loadJobs.size());
for (LoadJob loadJob : loadJobs) {
String label = loadJob.getLabel();
JobState state = loadJob.getState();
if (labelValue != null) {
if (accurateMatch) {
if (!label.equals(labelValue)) {
continue;
}
} else {
if (!label.contains(labelValue)) {
continue;
}
}
}
if (states != null) {
if (!states.contains(state)) {
continue;
}
}
Set<String> tableNames = loadJob.getTableNames();
if (tableNames.isEmpty()) {
if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), dbName,
PrivPredicate.LOAD)) {
continue;
}
} else {
boolean auth = true;
for (String tblName : tableNames) {
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), dbName,
tblName, PrivPredicate.LOAD)) {
auth = false;
break;
}
}
if (!auth) {
continue;
}
}
List<Comparable> jobInfo = new ArrayList<Comparable>();
jobInfo.add(loadJob.getId());
jobInfo.add(label);
jobInfo.add(state.name());
switch (loadJob.getState()) {
case PENDING:
jobInfo.add("ETL:0%; LOAD:0%");
break;
case ETL:
jobInfo.add("ETL:" + loadJob.getProgress() + "%; LOAD:0%");
break;
case LOADING:
jobInfo.add("ETL:100%; LOAD:" + loadJob.getProgress() + "%");
break;
case QUORUM_FINISHED:
jobInfo.add("ETL:100%; LOAD:100%");
break;
case FINISHED:
jobInfo.add("ETL:100%; LOAD:100%");
break;
case CANCELLED:
jobInfo.add("ETL:N/A; LOAD:N/A");
break;
default:
jobInfo.add("ETL:N/A; LOAD:N/A");
break;
}
jobInfo.add(loadJob.getEtlJobType().name());
EtlStatus status = loadJob.getEtlJobStatus();
if (status == null || status.getState() == TEtlState.CANCELLED) {
jobInfo.add(FeConstants.null_string);
} else {
Map<String, String> counters = status.getCounters();
List<String> info = Lists.newArrayList();
for (String key : counters.keySet()) {
if (key.equalsIgnoreCase("HDFS bytes read")
|| key.equalsIgnoreCase("Map input records")
|| key.startsWith("dpp.")
|| loadJob.getEtlJobType() == EtlJobType.MINI) {
info.add(key + "=" + counters.get(key));
}
}
if (info.isEmpty()) {
jobInfo.add(FeConstants.null_string);
} else {
jobInfo.add(StringUtils.join(info, "; "));
}
}
jobInfo.add("cluster:" + loadJob.getHadoopCluster()
+ "; timeout(s):" + loadJob.getTimeoutSecond()
+ "; max_filter_ratio:" + loadJob.getMaxFilterRatio());
if (loadJob.getState() == JobState.CANCELLED) {
FailMsg failMsg = loadJob.getFailMsg();
jobInfo.add("type:" + failMsg.getCancelType() + "; msg:" + failMsg.getMsg());
} else {
jobInfo.add(FeConstants.null_string);
}
jobInfo.add(TimeUtils.longToTimeString(loadJob.getCreateTimeMs()));
jobInfo.add(TimeUtils.longToTimeString(loadJob.getEtlStartTimeMs()));
jobInfo.add(TimeUtils.longToTimeString(loadJob.getEtlFinishTimeMs()));
jobInfo.add(TimeUtils.longToTimeString(loadJob.getLoadStartTimeMs()));
jobInfo.add(TimeUtils.longToTimeString(loadJob.getLoadFinishTimeMs()));
jobInfo.add(status.getTrackingUrl());
loadJobInfos.add(jobInfo);
}
LOG.debug("finished to get load job info, cost: {}", (System.currentTimeMillis() - start));
} finally {
readUnlock();
}
return loadJobInfos;
}
public long getLatestJobIdByLabel(long dbId, String labelValue) {
LoadJob job = null;
long jobId = 0;
readLock();
try {
List<LoadJob> loadJobs = this.dbToLoadJobs.get(dbId);
if (loadJobs == null) {
return 0;
}
for (LoadJob loadJob : loadJobs) {
String label = loadJob.getLabel();
if (labelValue != null) {
if (!label.equals(labelValue)) {
continue;
}
}
long currJobId = loadJob.getId();
if (currJobId > jobId) {
jobId = currJobId;
job = loadJob;
}
}
} finally {
readUnlock();
}
return jobId;
}
public List<List<Comparable>> getLoadJobUnfinishedInfo(long jobId) {
LinkedList<List<Comparable>> infos = new LinkedList<List<Comparable>>();
TabletInvertedIndex invertedIndex = Catalog.getCurrentInvertedIndex();
LoadJob loadJob = getLoadJob(jobId);
if (loadJob == null
|| (loadJob.getState() != JobState.LOADING && loadJob.getState() != JobState.QUORUM_FINISHED)) {
return infos;
}
long dbId = loadJob.getDbId();
Database db = Catalog.getCurrentCatalog().getDb(dbId);
if (db == null) {
return infos;
}
db.readLock();
try {
readLock();
try {
Map<Long, TabletLoadInfo> tabletMap = loadJob.getIdToTabletLoadInfo();
for (long tabletId : tabletMap.keySet()) {
TabletMeta tabletMeta = invertedIndex.getTabletMeta(tabletId);
if (tabletMeta == null) {
continue;
}
long tableId = tabletMeta.getTableId();
OlapTable table = (OlapTable) db.getTable(tableId);
if (table == null) {
continue;
}
long partitionId = tabletMeta.getPartitionId();
Partition partition = table.getPartition(partitionId);
if (partition == null) {
continue;
}
long indexId = tabletMeta.getIndexId();
MaterializedIndex index = partition.getIndex(indexId);
if (index == null) {
continue;
}
Tablet tablet = index.getTablet(tabletId);
if (tablet == null) {
continue;
}
PartitionLoadInfo partitionLoadInfo = loadJob.getPartitionLoadInfo(tableId, partitionId);
long version = partitionLoadInfo.getVersion();
long versionHash = partitionLoadInfo.getVersionHash();
for (Replica replica : tablet.getReplicas()) {
if (replica.checkVersionCatchUp(version, versionHash, false)) {
continue;
}
List<Comparable> info = Lists.newArrayList();
info.add(replica.getBackendId());
info.add(tabletId);
info.add(replica.getId());
info.add(replica.getVersion());
info.add(replica.getVersionHash());
info.add(partitionId);
info.add(version);
info.add(versionHash);
infos.add(info);
}
}
} finally {
readUnlock();
}
} finally {
db.readUnlock();
}
ListComparator<List<Comparable>> comparator = new ListComparator<List<Comparable>>(3, 0);
Collections.sort(infos, comparator);
return infos;
}
public LoadErrorHub.Param getLoadErrorHubInfo() {
return loadErrorHubParam;
}
public void setLoadErrorHubInfo(LoadErrorHub.Param info) {
this.loadErrorHubParam = info;
}
public void setLoadErrorHubInfo(Map<String, String> properties) throws DdlException {
String type = properties.get("type");
if (type.equalsIgnoreCase("MYSQL")) {
String host = properties.get("host");
if (Strings.isNullOrEmpty(host)) {
throw new DdlException("mysql host is missing");
}
int port = -1;
try {
port = Integer.valueOf(properties.get("port"));
} catch (NumberFormatException e) {
throw new DdlException("invalid mysql port: " + properties.get("port"));
}
String user = properties.get("user");
if (Strings.isNullOrEmpty(user)) {
throw new DdlException("mysql user name is missing");
}
String db = properties.get("database");
if (Strings.isNullOrEmpty(db)) {
throw new DdlException("mysql database is missing");
}
String tbl = properties.get("table");
if (Strings.isNullOrEmpty(tbl)) {
throw new DdlException("mysql table is missing");
}
String pwd = Strings.nullToEmpty(properties.get("password"));
MysqlLoadErrorHub.MysqlParam param = new MysqlLoadErrorHub.MysqlParam(host, port, user, pwd, db, tbl);
loadErrorHubParam = LoadErrorHub.Param.createMysqlParam(param);
} else if (type.equalsIgnoreCase("BROKER")) {
String brokerName = properties.get("name");
if (Strings.isNullOrEmpty(brokerName)) {
throw new DdlException("broker name is missing");
}
properties.remove("name");
if (!Catalog.getCurrentCatalog().getBrokerMgr().contaisnBroker(brokerName)) {
throw new DdlException("broker does not exist: " + brokerName);
}
String path = properties.get("path");
if (Strings.isNullOrEmpty(path)) {
throw new DdlException("broker path is missing");
}
properties.remove("path");
BlobStorage blobStorage = new BlobStorage(brokerName, properties);
Status st = blobStorage.checkPathExist(path);
if (!st.ok()) {
throw new DdlException("failed to visit path: " + path + ", err: " + st.getErrMsg());
}
BrokerLoadErrorHub.BrokerParam param = new BrokerLoadErrorHub.BrokerParam(brokerName, path, properties);
loadErrorHubParam = LoadErrorHub.Param.createBrokerParam(param);
} else if (type.equalsIgnoreCase("null")) {
loadErrorHubParam = LoadErrorHub.Param.createNullParam();
}
Catalog.getCurrentCatalog().getEditLog().logSetLoadErrorHub(loadErrorHubParam);
LOG.info("set load error hub info: {}", loadErrorHubParam);
}
public static class JobInfo {
public String dbName;
public Set<String> tblNames = Sets.newHashSet();
public String label;
public String clusterName;
public JobState state;
public String failMsg;
public String trackingUrl;
public JobInfo(String dbName, String label, String clusterName) {
this.dbName = dbName;
this.label = label;
this.clusterName = clusterName;
}
}
public void getJobInfo(JobInfo info) throws DdlException, MetaNotFoundException {
String fullDbName = ClusterNamespace.getFullName(info.clusterName, info.dbName);
info.dbName = fullDbName;
Database db = Catalog.getCurrentCatalog().getDb(fullDbName);
if (db == null) {
throw new MetaNotFoundException("Unknown database(" + info.dbName + ")");
}
readLock();
try {
Map<String, List<LoadJob>> labelToLoadJobs = dbLabelToLoadJobs.get(db.getId());
if (labelToLoadJobs == null) {
throw new DdlException("No jobs belong to database(" + info.dbName + ")");
}
List<LoadJob> loadJobs = labelToLoadJobs.get(info.label);
if (loadJobs == null) {
throw new DdlException("Unknown job(" + info.label + ")");
}
LoadJob job = loadJobs.get(loadJobs.size() - 1);
if (!job.getTableNames().isEmpty()) {
info.tblNames.addAll(job.getTableNames());
}
info.state = job.getState();
if (info.state == JobState.QUORUM_FINISHED) {
info.state = JobState.FINISHED;
}
info.failMsg = job.getFailMsg().getMsg();
info.trackingUrl = job.getEtlJobStatus().getTrackingUrl();
} finally {
readUnlock();
}
}
public void unprotectQuorumLoadJob(LoadJob job, Database db) {
if (job.getTransactionId() < 0) {
removeLoadingPartitions(job);
Map<Long, ReplicaPersistInfo> replicaInfos = job.getReplicaPersistInfos();
if (replicaInfos != null) {
for (ReplicaPersistInfo info : replicaInfos.values()) {
OlapTable table = (OlapTable) db.getTable(info.getTableId());
if (table == null) {
LOG.warn("the table[{}] is missing", info.getIndexId());
continue;
}
Partition partition = table.getPartition(info.getPartitionId());
if (partition == null) {
LOG.warn("the partition[{}] is missing", info.getIndexId());
continue;
}
MaterializedIndex index = partition.getIndex(info.getIndexId());
if (index == null) {
LOG.warn("the index[{}] is missing", info.getIndexId());
continue;
}
Tablet tablet = index.getTablet(info.getTabletId());
if (tablet == null) {
LOG.warn("the tablet[{}] is missing", info.getTabletId());
continue;
}
Replica replica = tablet.getReplicaById(info.getReplicaId());
if (replica == null) {
LOG.warn("the replica[{}] is missing", info.getReplicaId());
continue;
}
replica.updateVersionInfo(info.getVersion(), info.getVersionHash(),
info.getDataSize(), info.getRowCount());
}
}
long jobId = job.getId();
Map<Long, TableLoadInfo> idToTableLoadInfo = job.getIdToTableLoadInfo();
if (idToTableLoadInfo != null) {
for (Entry<Long, TableLoadInfo> tableEntry : idToTableLoadInfo.entrySet()) {
long tableId = tableEntry.getKey();
OlapTable table = (OlapTable) db.getTable(tableId);
TableLoadInfo tableLoadInfo = tableEntry.getValue();
for (Entry<Long, PartitionLoadInfo> entry : tableLoadInfo.getIdToPartitionLoadInfo().entrySet()) {
long partitionId = entry.getKey();
Partition partition = table.getPartition(partitionId);
PartitionLoadInfo partitionLoadInfo = entry.getValue();
if (!partitionLoadInfo.isNeedLoad()) {
continue;
}
updatePartitionVersion(partition, partitionLoadInfo.getVersion(),
partitionLoadInfo.getVersionHash(), jobId);
for (MaterializedIndex materializedIndex : partition.getMaterializedIndices(IndexExtState.ALL)) {
long indexRowCount = 0L;
for (Tablet tablet : materializedIndex.getTablets()) {
long tabletRowCount = 0L;
for (Replica replica : tablet.getReplicas()) {
long replicaRowCount = replica.getRowCount();
if (replicaRowCount > tabletRowCount) {
tabletRowCount = replicaRowCount;
}
}
indexRowCount += tabletRowCount;
}
materializedIndex.setRowCount(indexRowCount);
}
}
}
}
idToLoadingLoadJob.remove(jobId);
idToQuorumFinishedLoadJob.put(jobId, job);
}
replaceLoadJob(job);
}
public void replayQuorumLoadJob(LoadJob job, Catalog catalog) throws DdlException {
Database db = catalog.getDb(job.getDbId());
db.writeLock();
try {
writeLock();
try {
unprotectQuorumLoadJob(job, db);
} finally {
writeUnlock();
}
} finally {
db.writeUnlock();
}
}
public void unprotectFinishLoadJob(LoadJob job, Database db) {
long jobId = job.getId();
if (job.getTransactionId() < 0) {
idToQuorumFinishedLoadJob.remove(jobId);
Map<Long, ReplicaPersistInfo> replicaInfos = job.getReplicaPersistInfos();
if (replicaInfos != null) {
for (ReplicaPersistInfo info : replicaInfos.values()) {
OlapTable table = (OlapTable) db.getTable(info.getTableId());
if (table == null) {
LOG.warn("the table[{}] is missing", info.getIndexId());
continue;
}
Partition partition = table.getPartition(info.getPartitionId());
if (partition == null) {
LOG.warn("the partition[{}] is missing", info.getIndexId());
continue;
}
MaterializedIndex index = partition.getIndex(info.getIndexId());
if (index == null) {
LOG.warn("the index[{}] is missing", info.getIndexId());
continue;
}
Tablet tablet = index.getTablet(info.getTabletId());
if (tablet == null) {
LOG.warn("the tablet[{}] is missing", info.getTabletId());
continue;
}
Replica replica = tablet.getReplicaById(info.getReplicaId());
if (replica == null) {
LOG.warn("the replica[{}] is missing", info.getReplicaId());
continue;
}
replica.updateVersionInfo(info.getVersion(), info.getVersionHash(),
info.getDataSize(), info.getRowCount());
}
}
} else {
idToPendingLoadJob.remove(jobId);
idToLoadingLoadJob.remove(jobId);
job.setProgress(100);
job.setLoadFinishTimeMs(System.currentTimeMillis());
}
replaceLoadJob(job);
}
public void replayFinishLoadJob(LoadJob job, Catalog catalog) {
Database db = catalog.getDb(job.getDbId());
db.writeLock();
try {
writeLock();
try {
unprotectFinishLoadJob(job, db);
} finally {
writeUnlock();
}
} finally {
db.writeUnlock();
}
}
public void replayClearRollupInfo(ReplicaPersistInfo info, Catalog catalog) {
Database db = catalog.getDb(info.getDbId());
db.writeLock();
try {
OlapTable olapTable = (OlapTable) db.getTable(info.getTableId());
Partition partition = olapTable.getPartition(info.getPartitionId());
MaterializedIndex index = partition.getIndex(info.getIndexId());
index.clearRollupIndexInfo();
} finally {
db.writeUnlock();
}
}
private void replaceLoadJob(LoadJob job) {
long jobId = job.getId();
if (!idToLoadJob.containsKey(jobId)) {
LOG.warn("Does not find load job in idToLoadJob. JobId : {}", jobId);
return;
}
idToLoadJob.put(jobId, job);
if (!job.isSyncDeleteJob()) {
List<LoadJob> jobs = dbToLoadJobs.get(job.getDbId());
if (jobs == null) {
LOG.warn("Does not find db in dbToLoadJobs. DbId : {}",
job.getDbId());
return;
}
int pos = 0;
for (LoadJob oneJob : jobs) {
if (oneJob.getId() == jobId) {
break;
}
pos++;
}
if (pos == jobs.size()) {
LOG.warn("Does not find load job for db. DbId : {}, jobId : {}",
job.getDbId(), jobId);
return;
}
jobs.remove(pos);
jobs.add(pos, job);
if (dbLabelToLoadJobs.get(job.getDbId()) == null) {
LOG.warn("Does not find db in dbLabelToLoadJobs. DbId : {}",
job.getDbId());
return;
}
jobs = dbLabelToLoadJobs.get(job.getDbId()).get(job.getLabel());
if (jobs == null) {
LOG.warn("Does not find label for db. label : {}, DbId : {}",
job.getLabel(), job.getDbId());
return;
}
pos = 0;
for (LoadJob oneJob : jobs) {
if (oneJob.getId() == jobId) {
break;
}
pos++;
}
if (pos == jobs.size()) {
LOG.warn("Does not find load job for label. label : {}, DbId : {}",
job.getLabel(), job.getDbId());
return;
}
jobs.remove(pos);
jobs.add(pos, job);
} else {
List<LoadJob> jobs = dbToDeleteJobs.get(job.getDbId());
if (jobs == null) {
LOG.warn("Does not find db in dbToDeleteJobs. DbId : {}",
job.getDbId());
return;
}
int pos = 0;
for (LoadJob oneJob : jobs) {
if (oneJob.getId() == jobId) {
break;
}
pos++;
}
if (pos == jobs.size()) {
LOG.warn("Does not find delete load job for db. DbId : {}, jobId : {}",
job.getDbId(), jobId);
return;
}
jobs.remove(pos);
jobs.add(pos, job);
}
}
public void removeDbLoadJob(long dbId) {
writeLock();
try {
if (dbToLoadJobs.containsKey(dbId)) {
List<LoadJob> dbLoadJobs = dbToLoadJobs.remove(dbId);
for (LoadJob job : dbLoadJobs) {
JobState state = job.getState();
if (state == JobState.CANCELLED || state == JobState.FINISHED) {
idToLoadJob.remove(job.getId());
}
}
}
if (dbLabelToLoadJobs.containsKey(dbId)) {
dbLabelToLoadJobs.remove(dbId);
}
if (dbToDeleteJobs.containsKey(dbId)) {
dbToDeleteJobs.remove(dbId);
}
} finally {
writeUnlock();
}
}
public void removeOldLoadJobs() {
long currentTimeMs = System.currentTimeMillis();
writeLock();
try {
Iterator<Map.Entry<Long, LoadJob>> iter = idToLoadJob.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<Long, LoadJob> entry = iter.next();
LoadJob job = entry.getValue();
if ((currentTimeMs - job.getCreateTimeMs()) / 1000 > Config.label_keep_max_second
&& (job.getState() == JobState.FINISHED || job.getState() == JobState.CANCELLED)) {
long dbId = job.getDbId();
String label = job.getLabel();
iter.remove();
List<LoadJob> loadJobs = dbToLoadJobs.get(dbId);
if (loadJobs != null) {
loadJobs.remove(job);
if (loadJobs.size() == 0) {
dbToLoadJobs.remove(dbId);
}
}
List<LoadJob> deleteJobs = dbToDeleteJobs.get(dbId);
if (deleteJobs != null) {
deleteJobs.remove(job);
if (deleteJobs.size() == 0) {
dbToDeleteJobs.remove(dbId);
}
}
Map<String, List<LoadJob>> mapLabelToJobs = dbLabelToLoadJobs.get(dbId);
if (mapLabelToJobs != null) {
loadJobs = mapLabelToJobs.get(label);
if (loadJobs != null) {
loadJobs.remove(job);
if (loadJobs.size() == 0) {
mapLabelToJobs.remove(label);
if (mapLabelToJobs.size() == 0) {
dbLabelToLoadJobs.remove(dbId);
}
}
}
}
}
}
} finally {
writeUnlock();
}
}
public void clearJob(LoadJob job, JobState srcState) {
JobState state = job.getState();
if (state != JobState.CANCELLED && state != JobState.FINISHED) {
LOG.warn("job state error. state: {}", state);
return;
}
EtlJobType etlJobType = job.getEtlJobType();
switch (etlJobType) {
case HADOOP:
DppScheduler dppScheduler = new DppScheduler(job.getHadoopDppConfig());
if (state == JobState.CANCELLED && srcState == JobState.ETL) {
try {
dppScheduler.killEtlJob(job.getHadoopEtlJobId());
} catch (Exception e) {
LOG.warn("kill etl job error", e);
}
}
DppConfig dppConfig = job.getHadoopDppConfig();
String outputPath = DppScheduler.getEtlOutputPath(dppConfig.getFsDefaultName(),
dppConfig.getOutputPath(), job.getDbId(), job.getLabel(), "");
try {
dppScheduler.deleteEtlOutputPath(outputPath);
} catch (Exception e) {
LOG.warn("delete etl output path error", e);
}
break;
case MINI:
for (MiniEtlTaskInfo taskInfo : job.getMiniEtlTasks().values()) {
long backendId = taskInfo.getBackendId();
Backend backend = Catalog.getCurrentSystemInfo().getBackend(backendId);
if (backend == null) {
LOG.warn("backend does not exist. id: {}", backendId);
break;
}
long dbId = job.getDbId();
Database db = Catalog.getCurrentCatalog().getDb(dbId);
if (db == null) {
LOG.warn("db does not exist. id: {}", dbId);
break;
}
AgentClient client = new AgentClient(backend.getHost(), backend.getBePort());
client.deleteEtlFiles(dbId, job.getId(), db.getFullName(), job.getLabel());
}
break;
case INSERT:
break;
case BROKER:
break;
case DELETE:
break;
default:
LOG.warn("unknown etl job type. type: {}, job id: {}", etlJobType.name(), job.getId());
break;
}
}
public boolean updateLoadJobState(LoadJob job, JobState destState) {
return updateLoadJobState(job, destState, CancelType.UNKNOWN, null, null);
}
public boolean updateLoadJobState(LoadJob job, JobState destState, CancelType cancelType, String msg,
List<String> failedMsg) {
boolean result = true;
JobState srcState = null;
long jobId = job.getId();
long dbId = job.getDbId();
Database db = Catalog.getCurrentCatalog().getDb(dbId);
String errMsg = msg;
if (db == null) {
errMsg = "db does not exist. id: " + dbId;
LOG.warn(errMsg);
writeLock();
try {
processCancelled(job, cancelType, errMsg, failedMsg);
} finally {
writeUnlock();
}
} else {
db.writeLock();
try {
writeLock();
try {
srcState = job.getState();
if (!STATE_CHANGE_MAP.containsKey(srcState)) {
LOG.warn("src state error. src state: {}", srcState.name());
return false;
}
Set<JobState> destStates = STATE_CHANGE_MAP.get(srcState);
if (!destStates.contains(destState)) {
LOG.warn("state change error. src state: {}, dest state: {}",
srcState.name(), destState.name());
return false;
}
switch (destState) {
case ETL:
idToPendingLoadJob.remove(jobId);
idToEtlLoadJob.put(jobId, job);
job.setProgress(0);
job.setEtlStartTimeMs(System.currentTimeMillis());
job.setState(destState);
Catalog.getCurrentCatalog().getEditLog().logLoadEtl(job);
break;
case LOADING:
idToEtlLoadJob.remove(jobId);
idToLoadingLoadJob.put(jobId, job);
job.setProgress(0);
job.setLoadStartTimeMs(System.currentTimeMillis());
job.setState(destState);
Catalog.getCurrentCatalog().getEditLog().logLoadLoading(job);
break;
case QUORUM_FINISHED:
if (processQuorumFinished(job, db)) {
Catalog.getCurrentCatalog().getEditLog().logLoadQuorum(job);
} else {
errMsg = "process loading finished fail";
processCancelled(job, cancelType, errMsg, failedMsg);
}
break;
case FINISHED:
if (job.getTransactionId() > 0) {
idToPendingLoadJob.remove(jobId);
idToLoadingLoadJob.remove(jobId);
job.setProgress(100);
job.setLoadFinishTimeMs(System.currentTimeMillis());
if (job.isSyncDeleteJob()) {
TransactionState transactionState = Catalog.getCurrentGlobalTransactionMgr()
.getTransactionState(job.getDbId(), job.getTransactionId());
DeleteInfo deleteInfo = job.getDeleteInfo();
TableCommitInfo tableCommitInfo = transactionState.getTableCommitInfo(deleteInfo.getTableId());
PartitionCommitInfo partitionCommitInfo = tableCommitInfo.getPartitionCommitInfo(deleteInfo.getPartitionId());
deleteInfo.updatePartitionVersionInfo(partitionCommitInfo.getVersion(),
partitionCommitInfo.getVersionHash());
}
}
MetricRepo.COUNTER_LOAD_FINISHED.increase(1L);
idToLoadingLoadJob.remove(jobId);
idToQuorumFinishedLoadJob.remove(jobId);
job.setState(destState);
for (PushTask pushTask : job.getPushTasks()) {
AgentTaskQueue.removePushTask(pushTask.getBackendId(), pushTask.getSignature(),
pushTask.getVersion(), pushTask.getVersionHash(),
pushTask.getPushType(), pushTask.getTaskType());
}
if (!job.isSyncDeleteJob()) {
job.clearRedundantInfoForHistoryJob();
}
Catalog.getCurrentCatalog().getEditLog().logLoadDone(job);
break;
case CANCELLED:
processCancelled(job, cancelType, errMsg, failedMsg);
break;
default:
Preconditions.checkState(false, "wrong job state: " + destState.name());
break;
}
} finally {
writeUnlock();
}
} finally {
db.writeUnlock();
}
}
if (destState != job.getState()) {
result = false;
}
return result;
}
private boolean processQuorumFinished(LoadJob job, Database db) {
long jobId = job.getId();
removeLoadingPartitions(job);
Map<Long, TableLoadInfo> idToTableLoadInfo = job.getIdToTableLoadInfo();
for (Entry<Long, TableLoadInfo> tableEntry : idToTableLoadInfo.entrySet()) {
long tableId = tableEntry.getKey();
OlapTable table = (OlapTable) db.getTable(tableId);
if (table == null) {
LOG.warn("table does not exist, id: {}", tableId);
return false;
}
TableLoadInfo tableLoadInfo = tableEntry.getValue();
for (Entry<Long, PartitionLoadInfo> partitionEntry : tableLoadInfo.getIdToPartitionLoadInfo().entrySet()) {
long partitionId = partitionEntry.getKey();
PartitionLoadInfo partitionLoadInfo = partitionEntry.getValue();
if (!partitionLoadInfo.isNeedLoad()) {
continue;
}
Partition partition = table.getPartition(partitionId);
if (partition == null) {
LOG.warn("partition does not exist, id: {}", partitionId);
return false;
}
}
}
for (Entry<Long, TableLoadInfo> tableEntry : idToTableLoadInfo.entrySet()) {
long tableId = tableEntry.getKey();
OlapTable table = (OlapTable) db.getTable(tableId);
TableLoadInfo tableLoadInfo = tableEntry.getValue();
for (Entry<Long, PartitionLoadInfo> entry : tableLoadInfo.getIdToPartitionLoadInfo().entrySet()) {
long partitionId = entry.getKey();
Partition partition = table.getPartition(partitionId);
PartitionLoadInfo partitionLoadInfo = entry.getValue();
if (!partitionLoadInfo.isNeedLoad()) {
continue;
}
updatePartitionVersion(partition, partitionLoadInfo.getVersion(),
partitionLoadInfo.getVersionHash(), jobId);
for (MaterializedIndex materializedIndex : partition.getMaterializedIndices(IndexExtState.ALL)) {
long tableRowCount = 0L;
for (Tablet tablet : materializedIndex.getTablets()) {
long tabletRowCount = 0L;
for (Replica replica : tablet.getReplicas()) {
long replicaRowCount = replica.getRowCount();
if (replicaRowCount > tabletRowCount) {
tabletRowCount = replicaRowCount;
}
}
tableRowCount += tabletRowCount;
}
materializedIndex.setRowCount(tableRowCount);
}
}
}
idToPendingLoadJob.remove(jobId);
idToLoadingLoadJob.remove(jobId);
idToQuorumFinishedLoadJob.put(jobId, job);
job.setProgress(100);
job.setLoadFinishTimeMs(System.currentTimeMillis());
job.setState(JobState.QUORUM_FINISHED);
return true;
}
private void updatePartitionVersion(Partition partition, long version, long versionHash, long jobId) {
long partitionId = partition.getId();
partition.updateVisibleVersionAndVersionHash(version, versionHash);
LOG.info("update partition version success. version: {}, version hash: {}, job id: {}, partition id: {}",
version, versionHash, jobId, partitionId);
}
private boolean processCancelled(LoadJob job, CancelType cancelType, String msg, List<String> failedMsg) {
long jobId = job.getId();
JobState srcState = job.getState();
CancelType tmpCancelType = CancelType.UNKNOWN;
try {
Catalog.getCurrentGlobalTransactionMgr().abortTransaction(
job.getDbId(),
job.getTransactionId(),
job.getFailMsg().toString());
} catch (TransactionNotFoundException e) {
LOG.info("transaction not found when try to abort it: {}", e.getTransactionId());
} catch (Exception e) {
LOG.info("errors while abort transaction", e);
if (failedMsg != null) {
failedMsg.add("Abort tranaction failed: " + e.getMessage());
}
return false;
}
switch (srcState) {
case PENDING:
idToPendingLoadJob.remove(jobId);
tmpCancelType = CancelType.ETL_SUBMIT_FAIL;
break;
case ETL:
idToEtlLoadJob.remove(jobId);
tmpCancelType = CancelType.ETL_RUN_FAIL;
break;
case LOADING:
removeLoadingPartitions(job);
idToLoadingLoadJob.remove(jobId);
tmpCancelType = CancelType.LOAD_RUN_FAIL;
break;
case QUORUM_FINISHED:
idToQuorumFinishedLoadJob.remove(jobId);
tmpCancelType = CancelType.LOAD_RUN_FAIL;
break;
default:
Preconditions.checkState(false, "wrong job state: " + srcState.name());
break;
}
CancelType newCancelType = cancelType;
if (newCancelType == CancelType.UNKNOWN) {
newCancelType = tmpCancelType;
}
FailMsg failMsg = new FailMsg(newCancelType, msg);
job.setFailMsg(failMsg);
job.setLoadFinishTimeMs(System.currentTimeMillis());
job.setState(JobState.CANCELLED);
if (srcState == JobState.LOADING || srcState == JobState.QUORUM_FINISHED) {
for (PushTask pushTask : job.getPushTasks()) {
AgentTaskQueue.removePushTask(pushTask.getBackendId(), pushTask.getSignature(),
pushTask.getVersion(), pushTask.getVersionHash(),
pushTask.getPushType(), pushTask.getTaskType());
}
}
job.clearRedundantInfoForHistoryJob();
Catalog.getCurrentCatalog().getEditLog().logLoadCancel(job);
return true;
}
public boolean addLoadingPartitions(Set<Long> partitionIds) {
writeLock();
try {
for (long partitionId : partitionIds) {
if (loadingPartitionIds.contains(partitionId)) {
LOG.info("partition {} is loading", partitionId);
return false;
}
}
loadingPartitionIds.addAll(partitionIds);
return true;
} finally {
writeUnlock();
}
}
private void recoverLoadingPartitions(LoadJob job) {
if (job.getTransactionId() > 0) {
return;
}
for (TableLoadInfo tableLoadInfo : job.getIdToTableLoadInfo().values()) {
Map<Long, PartitionLoadInfo> idToPartitionLoadInfo = tableLoadInfo.getIdToPartitionLoadInfo();
for (Entry<Long, PartitionLoadInfo> entry : idToPartitionLoadInfo.entrySet()) {
PartitionLoadInfo partitionLoadInfo = entry.getValue();
if (partitionLoadInfo.isNeedLoad()) {
loadingPartitionIds.add(entry.getKey());
}
}
}
}
public void removeLoadingPartitions(Set<Long> partitionIds) {
writeLock();
try {
loadingPartitionIds.removeAll(partitionIds);
} finally {
writeUnlock();
}
}
private void removeLoadingPartitions(LoadJob job) {
for (TableLoadInfo tableLoadInfo : job.getIdToTableLoadInfo().values()) {
Map<Long, PartitionLoadInfo> idToPartitionLoadInfo = tableLoadInfo.getIdToPartitionLoadInfo();
for (Entry<Long, PartitionLoadInfo> entry : idToPartitionLoadInfo.entrySet()) {
PartitionLoadInfo partitionLoadInfo = entry.getValue();
if (partitionLoadInfo.isNeedLoad()) {
loadingPartitionIds.remove(entry.getKey());
}
}
}
}
public boolean checkPartitionLoadFinished(long partitionId, List<LoadJob> quorumFinishedLoadJobs) {
readLock();
try {
for (JobState state : JobState.values()) {
if (state == JobState.FINISHED || state == JobState.CANCELLED) {
continue;
}
List<LoadJob> loadJobs = this.getLoadJobs(state);
for (LoadJob loadJob : loadJobs) {
Preconditions.checkNotNull(loadJob.getIdToTableLoadInfo());
for (TableLoadInfo tableLoadInfo : loadJob.getIdToTableLoadInfo().values()) {
if (tableLoadInfo.getIdToPartitionLoadInfo().containsKey(partitionId)) {
if (state == JobState.QUORUM_FINISHED) {
if (quorumFinishedLoadJobs != null) {
quorumFinishedLoadJobs.add(loadJob);
} else {
return false;
}
} else {
return false;
}
}
}
}
}
return true;
} finally {
readUnlock();
}
}
public void unprotectAddDeleteInfo(DeleteInfo deleteInfo) {
long dbId = deleteInfo.getDbId();
List<DeleteInfo> deleteInfos = dbToDeleteInfos.get(dbId);
if (deleteInfos == null) {
deleteInfos = Lists.newArrayList();
dbToDeleteInfos.put(dbId, deleteInfos);
}
deleteInfos.add(deleteInfo);
if (deleteInfo.getAsyncDeleteJob() != null && deleteInfo.getState() == DeleteState.QUORUM_FINISHED) {
AsyncDeleteJob asyncDeleteJob = deleteInfo.getAsyncDeleteJob();
idToQuorumFinishedDeleteJob.put(asyncDeleteJob.getJobId(), asyncDeleteJob);
LOG.info("unprotected add asyncDeleteJob when load image: {}", asyncDeleteJob.getJobId());
}
}
public void unprotectDelete(DeleteInfo deleteInfo, Database db) {
OlapTable table = (OlapTable) db.getTable(deleteInfo.getTableId());
Partition partition = table.getPartition(deleteInfo.getPartitionId());
updatePartitionVersion(partition, deleteInfo.getPartitionVersion(), deleteInfo.getPartitionVersionHash(), -1);
List<ReplicaPersistInfo> replicaInfos = deleteInfo.getReplicaPersistInfos();
if (replicaInfos != null) {
for (ReplicaPersistInfo info : replicaInfos) {
MaterializedIndex index = partition.getIndex(info.getIndexId());
Tablet tablet = index.getTablet(info.getTabletId());
Replica replica = tablet.getReplicaById(info.getReplicaId());
replica.updateVersionInfo(info.getVersion(), info.getVersionHash(),
info.getDataSize(), info.getRowCount());
}
}
if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_11) {
long dbId = deleteInfo.getDbId();
List<DeleteInfo> deleteInfos = dbToDeleteInfos.get(dbId);
if (deleteInfos == null) {
deleteInfos = Lists.newArrayList();
dbToDeleteInfos.put(dbId, deleteInfos);
}
deleteInfos.add(deleteInfo);
}
if (deleteInfo.getAsyncDeleteJob() != null) {
AsyncDeleteJob asyncDeleteJob = deleteInfo.getAsyncDeleteJob();
idToQuorumFinishedDeleteJob.put(asyncDeleteJob.getJobId(), asyncDeleteJob);
LOG.info("unprotected add asyncDeleteJob: {}", asyncDeleteJob.getJobId());
}
}
public void replayFinishAsyncDeleteJob(AsyncDeleteJob deleteJob, Catalog catalog) {
Database db = catalog.getDb(deleteJob.getDbId());
db.writeLock();
try {
writeLock();
try {
Map<Long, ReplicaPersistInfo> replicaInfos = deleteJob.getReplicaPersistInfos();
if (replicaInfos != null) {
for (ReplicaPersistInfo info : replicaInfos.values()) {
OlapTable table = (OlapTable) db.getTable(info.getTableId());
if (table == null) {
LOG.warn("the table[{}] is missing", info.getIndexId());
continue;
}
Partition partition = table.getPartition(info.getPartitionId());
if (partition == null) {
LOG.warn("the partition[{}] is missing", info.getIndexId());
continue;
}
MaterializedIndex index = partition.getIndex(info.getIndexId());
if (index == null) {
LOG.warn("the index[{}] is missing", info.getIndexId());
continue;
}
Tablet tablet = index.getTablet(info.getTabletId());
if (tablet == null) {
LOG.warn("the tablet[{}] is missing", info.getTabletId());
continue;
}
Replica replica = tablet.getReplicaById(info.getReplicaId());
if (replica == null) {
LOG.warn("the replica[{}] is missing", info.getReplicaId());
continue;
}
replica.updateVersionInfo(info.getVersion(), info.getVersionHash(),
info.getDataSize(), info.getRowCount());
}
}
} finally {
writeUnlock();
}
} finally {
db.writeUnlock();
}
removeDeleteJobAndSetState(deleteJob);
LOG.info("unprotected finish asyncDeleteJob: {}", deleteJob.getJobId());
}
public void replayDelete(DeleteInfo deleteInfo, Catalog catalog) {
Database db = catalog.getDb(deleteInfo.getDbId());
db.writeLock();
try {
writeLock();
try {
unprotectDelete(deleteInfo, db);
} finally {
writeUnlock();
}
} finally {
db.writeUnlock();
}
}
private void checkDeleteV2(OlapTable table, Partition partition, List<Predicate> conditions, List<String> deleteConditions, boolean preCheck)
throws DdlException {
PartitionState state = partition.getState();
if (state != PartitionState.NORMAL) {
throw new DdlException("Partition[" + partition.getName() + "]' state is not NORMAL: " + state.name());
}
Map<String, Column> nameToColumn = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
for (Column column : table.getBaseSchema()) {
nameToColumn.put(column.getName(), column);
}
for (Predicate condition : conditions) {
SlotRef slotRef = null;
if (condition instanceof BinaryPredicate) {
BinaryPredicate binaryPredicate = (BinaryPredicate) condition;
slotRef = (SlotRef) binaryPredicate.getChild(0);
} else if (condition instanceof IsNullPredicate) {
IsNullPredicate isNullPredicate = (IsNullPredicate) condition;
slotRef = (SlotRef) isNullPredicate.getChild(0);
}
String columnName = slotRef.getColumnName();
if (!nameToColumn.containsKey(columnName)) {
ErrorReport.reportDdlException(ErrorCode.ERR_BAD_FIELD_ERROR, columnName, table.getName());
}
Column column = nameToColumn.get(columnName);
if (!column.isKey()) {
throw new DdlException("Column[" + columnName + "] is not key column");
}
if (condition instanceof BinaryPredicate) {
String value = null;
try {
BinaryPredicate binaryPredicate = (BinaryPredicate) condition;
value = ((LiteralExpr) binaryPredicate.getChild(1)).getStringValue();
LiteralExpr.create(value, Type.fromPrimitiveType(column.getDataType()));
} catch (AnalysisException e) {
throw new DdlException("Invalid column value[" + value + "]");
}
}
slotRef.setCol(column.getName());
}
Map<Long, List<Column>> indexIdToSchema = table.getIndexIdToSchema();
for (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.VISIBLE)) {
Map<String, Column> indexColNameToColumn = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
for (Column column : indexIdToSchema.get(index.getId())) {
indexColNameToColumn.put(column.getName(), column);
}
String indexName = table.getIndexNameById(index.getId());
for (Predicate condition : conditions) {
String columnName = null;
if (condition instanceof BinaryPredicate) {
BinaryPredicate binaryPredicate = (BinaryPredicate) condition;
columnName = ((SlotRef) binaryPredicate.getChild(0)).getColumnName();
} else if (condition instanceof IsNullPredicate) {
IsNullPredicate isNullPredicate = (IsNullPredicate) condition;
columnName = ((SlotRef) isNullPredicate.getChild(0)).getColumnName();
}
Column column = indexColNameToColumn.get(columnName);
if (column == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_BAD_FIELD_ERROR, columnName, indexName);
}
if (table.getKeysType() == KeysType.DUP_KEYS && !column.isKey()) {
throw new DdlException("Column[" + columnName + "] is not key column in index[" + indexName + "]");
}
}
}
if (deleteConditions == null) {
return;
}
for (Predicate condition : conditions) {
if (condition instanceof BinaryPredicate) {
BinaryPredicate binaryPredicate = (BinaryPredicate) condition;
SlotRef slotRef = (SlotRef) binaryPredicate.getChild(0);
String columnName = slotRef.getColumnName();
StringBuilder sb = new StringBuilder();
sb.append(columnName).append(" ").append(binaryPredicate.getOp().name()).append(" \"")
.append(((LiteralExpr) binaryPredicate.getChild(1)).getStringValue()).append("\"");
deleteConditions.add(sb.toString());
} else if (condition instanceof IsNullPredicate) {
IsNullPredicate isNullPredicate = (IsNullPredicate) condition;
SlotRef slotRef = (SlotRef) isNullPredicate.getChild(0);
String columnName = slotRef.getColumnName();
StringBuilder sb = new StringBuilder();
sb.append(columnName);
if (isNullPredicate.isNotNull()) {
sb.append(" IS NOT NULL");
} else {
sb.append(" IS NULL");
}
deleteConditions.add(sb.toString());
}
}
}
private boolean checkAndAddRunningSyncDeleteJob(long partitionId, String partitionName) throws DdlException {
writeLock();
try {
checkHasRunningSyncDeleteJob(partitionId, partitionName);
return partitionUnderDelete.add(partitionId);
} finally {
writeUnlock();
}
}
private void checkHasRunningSyncDeleteJob(long partitionId, String partitionName) throws DdlException {
readLock();
try {
if (partitionUnderDelete.contains(partitionId)) {
throw new DdlException("Partition[" + partitionName + "] has running delete job. See 'SHOW DELETE'");
}
} finally {
readUnlock();
}
}
private void checkHasRunningAsyncDeleteJob(long partitionId, String partitionName) throws DdlException {
readLock();
try {
for (AsyncDeleteJob job : idToQuorumFinishedDeleteJob.values()) {
if (job.getPartitionId() == partitionId) {
throw new DdlException("Partition[" + partitionName + "] has running async delete job. "
+ "See 'SHOW DELETE'");
}
}
for (long dbId : dbToDeleteJobs.keySet()) {
List<LoadJob> loadJobs = dbToDeleteJobs.get(dbId);
for (LoadJob loadJob : loadJobs) {
if (loadJob.getDeleteInfo().getPartitionId() == partitionId
&& loadJob.getState() == JobState.LOADING) {
throw new DdlException("Partition[" + partitionName + "] has running async delete job. "
+ "See 'SHOW DELETE'");
}
}
}
} finally {
readUnlock();
}
}
public void checkHashRunningDeleteJob(long partitionId, String partitionName) throws DdlException {
checkHasRunningSyncDeleteJob(partitionId, partitionName);
checkHasRunningAsyncDeleteJob(partitionId, partitionName);
}
public void delete(DeleteStmt stmt) throws DdlException {
String dbName = stmt.getDbName();
String tableName = stmt.getTableName();
String partitionName = stmt.getPartitionName();
List<Predicate> conditions = stmt.getDeleteConditions();
Database db = Catalog.getCurrentCatalog().getDb(dbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + dbName);
}
long tableId = -1;
long partitionId = -1;
LoadJob loadDeleteJob = null;
boolean addRunningPartition = false;
db.readLock();
try {
Table table = db.getTable(tableName);
if (table == null) {
throw new DdlException("Table does not exist. name: " + tableName);
}
if (table.getType() != TableType.OLAP) {
throw new DdlException("Not olap type table. type: " + table.getType().name());
}
OlapTable olapTable = (OlapTable) table;
if (olapTable.getState() != OlapTableState.NORMAL) {
throw new DdlException("Table's state is not normal: " + tableName);
}
tableId = olapTable.getId();
if (partitionName == null) {
if (olapTable.getPartitionInfo().getType() == PartitionType.RANGE) {
throw new DdlException("This is a range partitioned table."
+ " You should specify partition in delete stmt");
} else {
partitionName = olapTable.getName();
}
}
Partition partition = olapTable.getPartition(partitionName);
if (partition == null) {
throw new DdlException("Partition does not exist. name: " + partitionName);
}
partitionId = partition.getId();
List<String> deleteConditions = Lists.newArrayList();
checkDeleteV2(olapTable, partition, conditions,
deleteConditions, true);
addRunningPartition = checkAndAddRunningSyncDeleteJob(partitionId, partitionName);
long jobId = Catalog.getCurrentCatalog().getNextId();
String jobLabel = "delete_" + UUID.randomUUID();
DeleteInfo deleteInfo = new DeleteInfo(db.getId(), tableId, tableName,
partition.getId(), partitionName,
-1, 0, deleteConditions);
loadDeleteJob = new LoadJob(jobId, db.getId(), tableId,
partitionId, jobLabel, olapTable.getIndexIdToSchemaHash(), conditions, deleteInfo);
Map<Long, TabletLoadInfo> idToTabletLoadInfo = Maps.newHashMap();
for (MaterializedIndex materializedIndex : partition.getMaterializedIndices(IndexExtState.VISIBLE)) {
for (Tablet tablet : materializedIndex.getTablets()) {
long tabletId = tablet.getId();
TabletLoadInfo tabletLoadInfo = new TabletLoadInfo("", -1);
idToTabletLoadInfo.put(tabletId, tabletLoadInfo);
}
}
loadDeleteJob.setIdToTabletLoadInfo(idToTabletLoadInfo);
loadDeleteJob.setState(JobState.LOADING);
long transactionId = Catalog.getCurrentGlobalTransactionMgr().beginTransaction(db.getId(),
Lists.newArrayList(table.getId()), jobLabel,
new TxnCoordinator(TxnSourceType.FE, FrontendOptions.getLocalHostAddress()),
LoadJobSourceType.FRONTEND,
Config.stream_load_default_timeout_second);
loadDeleteJob.setTransactionId(transactionId);
addLoadJob(loadDeleteJob, db);
} catch (Throwable t) {
LOG.warn("error occurred during prepare delete", t);
throw new DdlException(t.getMessage(), t);
} finally {
if (addRunningPartition) {
writeLock();
try {
partitionUnderDelete.remove(partitionId);
} finally {
writeUnlock();
}
}
db.readUnlock();
}
try {
long startDeleteTime = System.currentTimeMillis();
long timeout = loadDeleteJob.getDeleteJobTimeout();
while (true) {
db.writeLock();
try {
if (loadDeleteJob.getState() == JobState.FINISHED
|| loadDeleteJob.getState() == JobState.CANCELLED) {
break;
}
if (System.currentTimeMillis() - startDeleteTime > timeout) {
TransactionState transactionState = Catalog.getCurrentGlobalTransactionMgr().getTransactionState(loadDeleteJob.getDbId(),
loadDeleteJob.getTransactionId());
if (transactionState.getTransactionStatus() == TransactionStatus.PREPARE) {
boolean isSuccess = cancelLoadJob(loadDeleteJob, CancelType.TIMEOUT, "load delete job timeout");
if (isSuccess) {
throw new DdlException("timeout when waiting delete");
}
}
}
} finally {
db.writeUnlock();
}
Thread.sleep(1000);
}
} catch (Exception e) {
String failMsg = "delete unknown, " + e.getMessage();
LOG.warn(failMsg, e);
throw new DdlException(failMsg);
} finally {
writeLock();
try {
partitionUnderDelete.remove(partitionId);
} finally {
writeUnlock();
}
}
}
public List<List<Comparable>> getAsyncDeleteJobInfo(long jobId) {
LinkedList<List<Comparable>> infos = new LinkedList<List<Comparable>>();
readLock();
try {
LoadJob job = null;
for (long dbId : dbToDeleteJobs.keySet()) {
List<LoadJob> loadJobs = dbToDeleteJobs.get(dbId);
for (LoadJob loadJob : loadJobs) {
if (loadJob.getId() == jobId) {
job = loadJob;
break;
}
}
}
if (job == null) {
return infos;
}
for (Long tabletId : job.getIdToTabletLoadInfo().keySet()) {
List<Comparable> info = Lists.newArrayList();
info.add(tabletId);
infos.add(info);
}
} finally {
readUnlock();
}
return infos;
}
public long getDeleteJobNumByState(long dbId, JobState state) {
readLock();
try {
List<LoadJob> deleteJobs = dbToDeleteJobs.get(dbId);
if (deleteJobs == null) {
return 0;
} else {
int deleteJobNum = 0;
for (LoadJob job : deleteJobs) {
if (job.getState() == state) {
++deleteJobNum;
}
}
return deleteJobNum;
}
} finally {
readUnlock();
}
}
public int getDeleteInfoNum(long dbId) {
readLock();
try {
List<LoadJob> deleteJobs = dbToDeleteJobs.get(dbId);
if (deleteJobs == null) {
return 0;
} else {
return deleteJobs.size();
}
} finally {
readUnlock();
}
}
public List<List<Comparable>> getDeleteInfosByDb(long dbId, boolean forUser) {
LinkedList<List<Comparable>> infos = new LinkedList<List<Comparable>>();
Database db = Catalog.getCurrentCatalog().getDb(dbId);
if (db == null) {
return infos;
}
String dbName = db.getFullName();
readLock();
try {
List<LoadJob> deleteJobs = dbToDeleteJobs.get(dbId);
if (deleteJobs == null) {
return infos;
}
for (LoadJob loadJob : deleteJobs) {
DeleteInfo deleteInfo = loadJob.getDeleteInfo();
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), dbName,
deleteInfo.getTableName(),
PrivPredicate.LOAD)) {
continue;
}
List<Comparable> info = Lists.newArrayList();
if (!forUser) {
info.add(loadJob.getId());
info.add(deleteInfo.getTableId());
}
info.add(deleteInfo.getTableName());
if (!forUser) {
info.add(deleteInfo.getPartitionId());
}
info.add(deleteInfo.getPartitionName());
info.add(TimeUtils.longToTimeString(deleteInfo.getCreateTimeMs()));
String conds = Joiner.on(", ").join(deleteInfo.getDeleteConditions());
info.add(conds);
if (!forUser) {
info.add(deleteInfo.getPartitionVersion());
info.add(deleteInfo.getPartitionVersionHash());
}
if (loadJob.getState() == JobState.LOADING) {
info.add("DELETING");
} else {
info.add(loadJob.getState().name());
}
infos.add(info);
}
} finally {
readUnlock();
}
int sortIndex;
if (!forUser) {
sortIndex = 4;
} else {
sortIndex = 2;
}
ListComparator<List<Comparable>> comparator = new ListComparator<List<Comparable>>(sortIndex);
Collections.sort(infos, comparator);
return infos;
}
public void removeOldDeleteJobs() {
long currentTimeMs = System.currentTimeMillis();
writeLock();
try {
Iterator<Map.Entry<Long, List<DeleteInfo>>> iter1 = dbToDeleteInfos.entrySet().iterator();
while (iter1.hasNext()) {
Map.Entry<Long, List<DeleteInfo>> entry = iter1.next();
Iterator<DeleteInfo> iter2 = entry.getValue().iterator();
while (iter2.hasNext()) {
DeleteInfo deleteInfo = iter2.next();
if ((currentTimeMs - deleteInfo.getCreateTimeMs()) / 1000 > Config.label_keep_max_second) {
iter2.remove();
}
}
if (entry.getValue().isEmpty()) {
iter1.remove();
}
}
} finally {
writeUnlock();
}
}
public void removeDbDeleteJob(long dbId) {
writeLock();
try {
dbToDeleteInfos.remove(dbId);
} finally {
writeUnlock();
}
}
public LoadJob getLastestFinishedLoadJob(long dbId) {
LoadJob job = null;
readLock();
try {
long maxTime = Long.MIN_VALUE;
List<LoadJob> jobs = dbToLoadJobs.get(dbId);
if (jobs != null) {
for (LoadJob loadJob : jobs) {
if (loadJob.getState() != JobState.QUORUM_FINISHED && loadJob.getState() != JobState.FINISHED) {
continue;
}
if (loadJob.getLoadFinishTimeMs() > maxTime) {
maxTime = loadJob.getLoadFinishTimeMs();
job = loadJob;
}
}
}
} finally {
readUnlock();
}
return job;
}
public DeleteInfo getLastestFinishedDeleteInfo(long dbId) {
DeleteInfo deleteInfo = null;
readLock();
try {
long maxTime = Long.MIN_VALUE;
List<LoadJob> deleteJobs = dbToDeleteJobs.get(dbId);
if (deleteJobs != null) {
for (LoadJob loadJob : deleteJobs) {
if (loadJob.getDeleteInfo().getCreateTimeMs() > maxTime
&& loadJob.getState() == JobState.FINISHED) {
maxTime = loadJob.getDeleteInfo().getCreateTimeMs();
deleteInfo = loadJob.getDeleteInfo();
}
}
}
} finally {
readUnlock();
}
return deleteInfo;
}
public Integer getLoadJobNumByTypeAndState(EtlJobType type, JobState state) {
int num = 0;
readLock();
try {
Map<Long, LoadJob> jobMap = null;
if (state == null || state == JobState.CANCELLED || state == JobState.FINISHED) {
jobMap = idToLoadJob;
} else {
switch (state) {
case PENDING:
jobMap = idToPendingLoadJob;
break;
case ETL:
jobMap = idToEtlLoadJob;
break;
case LOADING:
jobMap = idToLoadingLoadJob;
break;
case QUORUM_FINISHED:
jobMap = idToQuorumFinishedLoadJob;
break;
default:
break;
}
}
Preconditions.checkNotNull(jobMap);
for (LoadJob job : jobMap.values()) {
if (job.getEtlJobType() == type) {
if (state != null && job.getState() != state) {
continue;
}
++num;
}
}
} finally {
readUnlock();
}
return num;
}
} | |
Do we need to new up `DataLakeFileInputStreamOptions` here? If it's null shouldn't `options.isUpn()` be null? Could we just make the if check below `if (options != null && options.isUpn() != null) {` | public DataLakeFileOpenInputStreamResult openInputStream(DataLakeFileInputStreamOptions options, Context context) {
Context newContext;
options = options == null ? new DataLakeFileInputStreamOptions() : options;
if (options.isUpn() != null) {
HttpHeaders headers = new HttpHeaders();
headers.set("x-ms-upn", options.isUpn() ? "true" : "false");
if (context == null) {
newContext = new Context(AddHeadersFromContextPolicy.AZURE_REQUEST_HTTP_HEADERS_KEY, headers);
} else {
newContext = context.addData(AddHeadersFromContextPolicy.AZURE_REQUEST_HTTP_HEADERS_KEY, headers);
}
} else {
newContext = null;
}
BlobInputStreamOptions convertedOptions = Transforms.toBlobInputStreamOptions(options);
BlobInputStream inputStream = blockBlobClient.openInputStream(convertedOptions, newContext);
return new InternalDataLakeFileOpenInputStreamResult(inputStream,
Transforms.toPathProperties(inputStream.getProperties()));
} | options = options == null ? new DataLakeFileInputStreamOptions() : options; | public DataLakeFileOpenInputStreamResult openInputStream(DataLakeFileInputStreamOptions options, Context context) {
context = BuilderHelper.addUpnHeader(() -> (options == null) ? null : options.isUpn(), context);
BlobInputStreamOptions convertedOptions = Transforms.toBlobInputStreamOptions(options);
BlobInputStream inputStream = blockBlobClient.openInputStream(convertedOptions, context);
return new InternalDataLakeFileOpenInputStreamResult(inputStream,
Transforms.toPathProperties(inputStream.getProperties()));
} | class DataLakeFileClient extends DataLakePathClient {
/**
* Indicates the maximum number of bytes that can be sent in a call to upload.
*/
private static final long MAX_APPEND_FILE_BYTES = DataLakeFileAsyncClient.MAX_APPEND_FILE_BYTES;
private static final ClientLogger LOGGER = new ClientLogger(DataLakeFileClient.class);
private final DataLakeFileAsyncClient dataLakeFileAsyncClient;
DataLakeFileClient(DataLakeFileAsyncClient pathAsyncClient, BlockBlobClient blockBlobClient) {
super(pathAsyncClient, blockBlobClient);
this.dataLakeFileAsyncClient = pathAsyncClient;
}
private DataLakeFileClient(DataLakePathClient dataLakePathClient) {
super(dataLakePathClient.dataLakePathAsyncClient, dataLakePathClient.blockBlobClient);
this.dataLakeFileAsyncClient = new DataLakeFileAsyncClient(dataLakePathClient.dataLakePathAsyncClient);
}
/**
* Gets the URL of the file represented by this client on the Data Lake service.
*
* @return the URL.
*/
public String getFileUrl() {
return getPathUrl();
}
/**
* Gets the path of this file, not including the name of the resource itself.
*
* @return The path of the file.
*/
public String getFilePath() {
return getObjectPath();
}
/**
* Gets the name of this file, not including its full path.
*
* @return The name of the file.
*/
public String getFileName() {
return getObjectName();
}
/**
* Creates a new {@link DataLakeFileClient} with the specified {@code customerProvidedKey}.
*
* @param customerProvidedKey the {@link CustomerProvidedKey} for the blob,
* pass {@code null} to use no customer provided key.
* @return a {@link DataLakeFileClient} with the specified {@code customerProvidedKey}.
*/
public DataLakeFileClient getCustomerProvidedKeyClient(CustomerProvidedKey customerProvidedKey) {
return new DataLakeFileClient(dataLakeFileAsyncClient.getCustomerProvidedKeyAsyncClient(customerProvidedKey),
blockBlobClient.getCustomerProvidedKeyClient(Transforms.toBlobCustomerProvidedKey(customerProvidedKey)));
}
/**
* Deletes a file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.delete -->
* <pre>
* client.delete&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.delete -->
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void delete() {
deleteWithResponse(null, null, Context.NONE).getValue();
}
/**
* Deletes a file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.deleteWithResponse
* <pre>
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
*
* client.deleteWithResponse&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.deleteWithResponse
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @param requestConditions {@link DataLakeRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response containing status code and HTTP headers.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> deleteWithResponse(DataLakeRequestConditions requestConditions, Duration timeout,
Context context) {
Mono<Response<Void>> response = dataLakePathAsyncClient.deleteWithResponse(null, requestConditions, context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Deletes a file if it exists.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExists -->
* <pre>
* client.deleteIfExists&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExists -->
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
* @return {@code true} if file is successfully deleted, {@code false} if the file does not exist.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public boolean deleteIfExists() {
return deleteIfExistsWithResponse(new DataLakePathDeleteOptions(), null, Context.NONE).getValue();
}
/**
* Deletes a file if it exists.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExistsWithResponse
* <pre>
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
* DataLakePathDeleteOptions options = new DataLakePathDeleteOptions&
* .setRequestConditions&
*
* Response<Boolean> response = client.deleteIfExistsWithResponse&
* if &
* System.out.println&
* &
* System.out.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExistsWithResponse
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @param options {@link DataLakePathDeleteOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response containing status code and HTTP headers. If {@link Response}'s status code is 200, the file
* was successfully deleted. If status code is 404, the file does not exist.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Boolean> deleteIfExistsWithResponse(DataLakePathDeleteOptions options, Duration timeout,
Context context) {
return StorageImplUtils.blockWithOptionalTimeout(dataLakeFileAsyncClient
.deleteIfExistsWithResponse(options, context), timeout);
}
/**
* Creates a new file. By default, this method will not overwrite an existing file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload
* <pre>
* try &
* client.upload&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark
* support.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data provided in the {@link InputStream}.
* @return Information about the uploaded path.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathInfo upload(InputStream data, long length) {
return upload(data, length, false);
}
/**
* Creates a new file. By default, this method will not overwrite an existing file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload
* <pre>
* try &
* client.upload&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark
* support.
* @return Information about the uploaded path.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathInfo upload(BinaryData data) {
return upload(data, false);
}
/**
* Creates a new file, or updates the content of an existing file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload
* <pre>
* try &
* boolean overwrite = false;
* client.upload&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark
* support.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data provided in the {@link InputStream}.
* @param overwrite Whether to overwrite, should data exist on the file.
* @return Information about the uploaded path.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathInfo upload(InputStream data, long length, boolean overwrite) {
DataLakeRequestConditions requestConditions = new DataLakeRequestConditions();
if (!overwrite) {
requestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return uploadWithResponse(new FileParallelUploadOptions(data, length).setRequestConditions(requestConditions),
null, Context.NONE).getValue();
}
/**
* Creates a new file, or updates the content of an existing file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload
* <pre>
* try &
* boolean overwrite = false;
* client.upload&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark
* support.
* @param overwrite Whether to overwrite, should data exist on the file.
* @return Information about the uploaded path.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathInfo upload(BinaryData data, boolean overwrite) {
DataLakeRequestConditions requestConditions = new DataLakeRequestConditions();
if (!overwrite) {
requestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return uploadWithResponse(new FileParallelUploadOptions(data).setRequestConditions(requestConditions),
null, Context.NONE).getValue();
}
/**
* Creates a new file.
* To avoid overwriting, pass "*" to {@link DataLakeRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadWithResponse
* <pre>
* PathHttpHeaders headers = new PathHttpHeaders&
* .setContentMd5&
* .setContentLanguage&
* .setContentType&
*
* Map<String, String> metadata = Collections.singletonMap&
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
* .setIfUnmodifiedSince&
* Long blockSize = 100L * 1024L * 1024L; &
* ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions&
*
* try &
* client.uploadWithResponse&
* .setParallelTransferOptions&
* .setMetadata&
* .setPermissions&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadWithResponse
*
* @param options {@link FileParallelUploadOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return Information about the uploaded path.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<PathInfo> uploadWithResponse(FileParallelUploadOptions options, Duration timeout,
Context context) {
Objects.requireNonNull(options);
Mono<Response<PathInfo>> upload = this.dataLakeFileAsyncClient.uploadWithResponse(options)
.contextWrite(FluxUtil.toReactorContext(context));
try {
return StorageImplUtils.blockWithOptionalTimeout(upload, timeout);
} catch (UncheckedIOException e) {
throw LOGGER.logExceptionAsError(e);
}
}
/**
* Creates a file, with the content of the specified file. By default, this method will not overwrite an
* existing file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile
* <pre>
* try &
* client.uploadFromFile&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void uploadFromFile(String filePath) {
uploadFromFile(filePath, false);
}
/**
* Creates a file, with the content of the specified file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile
* <pre>
* try &
* boolean overwrite = false;
* client.uploadFromFile&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param overwrite Whether to overwrite, should the file already exist
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void uploadFromFile(String filePath, boolean overwrite) {
DataLakeRequestConditions requestConditions = null;
if (!overwrite) {
if (UploadUtils.shouldUploadInChunks(filePath, ModelHelper.FILE_DEFAULT_MAX_SINGLE_UPLOAD_SIZE, LOGGER)
&& exists()) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
requestConditions = new DataLakeRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
uploadFromFile(filePath, null, null, null, requestConditions, null);
}
/**
* Creates a file, with the content of the specified file.
* <p>
* To avoid overwriting, pass "*" to {@link DataLakeRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile
* <pre>
* PathHttpHeaders headers = new PathHttpHeaders&
* .setContentMd5&
* .setContentLanguage&
* .setContentType&
*
* Map<String, String> metadata = Collections.singletonMap&
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
* .setIfUnmodifiedSince&
* Long blockSize = 100L * 1024L * 1024L; &
* ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions&
*
* try &
* client.uploadFromFile&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading.
* @param headers {@link PathHttpHeaders}
* @param metadata Metadata to associate with the resource. If there is leading or trailing whitespace in any
* metadata key or value, it must be removed or encoded.
* @param requestConditions {@link DataLakeRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void uploadFromFile(String filePath, ParallelTransferOptions parallelTransferOptions,
PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions,
Duration timeout) {
Mono<Void> upload = this.dataLakeFileAsyncClient.uploadFromFile(
filePath, parallelTransferOptions, headers, metadata, requestConditions);
try {
StorageImplUtils.blockWithOptionalTimeout(upload, timeout);
} catch (UncheckedIOException e) {
throw LOGGER.logExceptionAsError(e);
}
}
/**
* Creates a file, with the content of the specified file.
* <p>
* To avoid overwriting, pass "*" to {@link DataLakeRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFileWithResponse
* <pre>
* PathHttpHeaders headers = new PathHttpHeaders&
* .setContentMd5&
* .setContentLanguage&
* .setContentType&
*
* Map<String, String> metadata = Collections.singletonMap&
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
* .setIfUnmodifiedSince&
* Long blockSize = 100L * 1024L * 1024L; &
* ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions&
*
* try &
* Response<PathInfo> response = client.uploadFromFileWithResponse&
* metadata, requestConditions, timeout, new Context&
* System.out.printf&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFileWithResponse
*
* @param filePath Path of the file to upload
* @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading.
* @param headers {@link PathHttpHeaders}
* @param metadata Metadata to associate with the resource. If there is leading or trailing whitespace in any
* metadata key or value, it must be removed or encoded.
* @param requestConditions {@link DataLakeRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return Response containing information about the uploaded path.
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<PathInfo> uploadFromFileWithResponse(String filePath, ParallelTransferOptions parallelTransferOptions,
PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions,
Duration timeout, Context context) {
Mono<Response<PathInfo>> upload = this.dataLakeFileAsyncClient.uploadFromFileWithResponse(
filePath, parallelTransferOptions, headers, metadata, requestConditions)
.contextWrite(FluxUtil.toReactorContext(context));
try {
return StorageImplUtils.blockWithOptionalTimeout(upload, timeout);
} catch (UncheckedIOException e) {
throw LOGGER.logExceptionAsError(e);
}
}
/**
* Appends data to the specified resource to later be flushed (written) by a call to flush
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.append
* <pre>
* client.append&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.append
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param data The data to write to the file.
* @param fileOffset The position where the data is to be appended.
* @param length The exact length of the data.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void append(InputStream data, long fileOffset, long length) {
appendWithResponse(data, fileOffset, length, null, null, Context.NONE);
}
/**
* Appends data to the specified resource to later be flushed (written) by a call to flush
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.append
* <pre>
* client.append&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.append
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param data The data to write to the file.
* @param fileOffset The position where the data is to be appended.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void append(BinaryData data, long fileOffset) {
appendWithResponse(data, fileOffset, null, null, null, Context.NONE);
}
/**
* Appends data to the specified resource to later be flushed (written) by a call to flush
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
* <pre>
* FileRange range = new FileRange&
* DownloadRetryOptions options = new DownloadRetryOptions&
* byte[] contentMd5 = new byte[0]; &
*
* Response<Void> response = client.appendWithResponse&
* new Context&
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param data The data to write to the file.
* @param fileOffset The position where the data is to be appended.
* @param length The exact length of the data.
* @param contentMd5 An MD5 hash of the content of the data. If specified, the service will calculate the MD5 of the
* received data and fail the request if it does not match the provided MD5.
* @param leaseId By setting lease id, requests will fail if the provided lease does not match the active lease on
* the file.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> appendWithResponse(InputStream data, long fileOffset, long length,
byte[] contentMd5, String leaseId, Duration timeout, Context context) {
DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions()
.setLeaseId(leaseId)
.setContentHash(contentMd5)
.setFlush(null);
return appendWithResponse(data, fileOffset, length, appendOptions, timeout, context);
}
/**
* Appends data to the specified resource to later be flushed (written) by a call to flush
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
* <pre>
* FileRange range = new FileRange&
* byte[] contentMd5 = new byte[0]; &
* DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions&
* .setLeaseId&
* .setContentHash&
* .setFlush&
* Response<Void> response = client.appendWithResponse&
* new Context&
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param data The data to write to the file.
* @param fileOffset The position where the data is to be appended.
* @param length The exact length of the data.
* @param appendOptions {@link DataLakeFileAppendOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> appendWithResponse(InputStream data, long fileOffset, long length,
DataLakeFileAppendOptions appendOptions, Duration timeout, Context context) {
Objects.requireNonNull(data);
Flux<ByteBuffer> fbb = Utility.convertStreamToByteBuffer(data, length,
BlobAsyncClient.BLOB_DEFAULT_UPLOAD_BLOCK_SIZE, true);
Mono<Response<Void>> response = dataLakeFileAsyncClient.appendWithResponse(fbb, fileOffset, length,
appendOptions, context);
try {
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
} catch (UncheckedIOException e) {
throw LOGGER.logExceptionAsError(e);
}
}
/**
* Appends data to the specified resource to later be flushed (written) by a call to flush
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
* <pre>
* FileRange range = new FileRange&
* DownloadRetryOptions options = new DownloadRetryOptions&
* byte[] contentMd5 = new byte[0]; &
*
* Response<Void> response = client.appendWithResponse&
* new Context&
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param data The data to write to the file.
* @param fileOffset The position where the data is to be appended.
* @param contentMd5 An MD5 hash of the content of the data. If specified, the service will calculate the MD5 of the
* received data and fail the request if it does not match the provided MD5.
* @param leaseId By setting lease id, requests will fail if the provided lease does not match the active lease on
* the file.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> appendWithResponse(BinaryData data, long fileOffset, byte[] contentMd5, String leaseId,
Duration timeout, Context context) {
Objects.requireNonNull(data);
Flux<ByteBuffer> fluxData = data.toFluxByteBuffer();
DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions()
.setLeaseId(leaseId)
.setContentHash(contentMd5)
.setFlush(null);
Mono<Response<Void>> response = dataLakeFileAsyncClient.appendWithResponse(fluxData, fileOffset,
data.getLength(), appendOptions, context);
try {
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
} catch (UncheckedIOException e) {
throw LOGGER.logExceptionAsError(e);
}
}
/**
* Appends data to the specified resource to later be flushed (written) by a call to flush
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
* <pre>
* BinaryData binaryData = BinaryData.fromStream&
* FileRange range = new FileRange&
* byte[] contentMd5 = new byte[0]; &
* DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions&
* .setLeaseId&
* .setContentHash&
* .setFlush&
* Response<Void> response = client.appendWithResponse&
* new Context&
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param data The data to write to the file.
* @param fileOffset The position where the data is to be appended.
* @param appendOptions {@link DataLakeFileAppendOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> appendWithResponse(BinaryData data, long fileOffset,
DataLakeFileAppendOptions appendOptions, Duration timeout, Context context) {
Objects.requireNonNull(data);
Flux<ByteBuffer> fluxData = data.toFluxByteBuffer();
Mono<Response<Void>> response = dataLakeFileAsyncClient.appendWithResponse(fluxData, fileOffset,
data.getLength(), appendOptions, context);
try {
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
} catch (UncheckedIOException e) {
throw LOGGER.logExceptionAsError(e);
}
}
/**
* Flushes (writes) data previously appended to the file through a call to append.
* The previously uploaded data must be contiguous.
* <p>By default this method will not overwrite existing data.</p>
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flush
* <pre>
* client.flush&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flush
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param position The length of the file after all data has been written.
* @return Information about the created resource.
* @deprecated See {@link
*/
@ServiceMethod(returns = ReturnType.SINGLE)
@Deprecated
public PathInfo flush(long position) {
return flush(position, false);
}
/**
* Flushes (writes) data previously appended to the file through a call to append.
* The previously uploaded data must be contiguous.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flush
* <pre>
* boolean overwrite = true;
* client.flush&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flush
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param position The length of the file after all data has been written.
* @param overwrite Whether to overwrite, should data exist on the file.
*
* @return Information about the created resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathInfo flush(long position, boolean overwrite) {
DataLakeRequestConditions requestConditions = new DataLakeRequestConditions();
if (!overwrite) {
requestConditions = new DataLakeRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return flushWithResponse(position, false, false, null, requestConditions, null, Context.NONE).getValue();
}
/**
* Flushes (writes) data previously appended to the file through a call to append.
* The previously uploaded data must be contiguous.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse
* <pre>
* FileRange range = new FileRange&
* DownloadRetryOptions options = new DownloadRetryOptions&
* byte[] contentMd5 = new byte[0]; &
* boolean retainUncommittedData = false;
* boolean close = false;
* PathHttpHeaders httpHeaders = new PathHttpHeaders&
* .setContentLanguage&
* .setContentType&
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
*
* Response<PathInfo> response = client.flushWithResponse&
* requestConditions, timeout, new Context&
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param position The length of the file after all data has been written.
* @param retainUncommittedData Whether uncommitted data is to be retained after the operation.
* @param close Whether a file changed event raised indicates completion (true) or modification (false).
* @param httpHeaders {@link PathHttpHeaders httpHeaders}
* @param requestConditions {@link DataLakeRequestConditions requestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response containing the information of the created resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<PathInfo> flushWithResponse(long position, boolean retainUncommittedData, boolean close,
PathHttpHeaders httpHeaders, DataLakeRequestConditions requestConditions, Duration timeout, Context context) {
DataLakeFileFlushOptions flushOptions = new DataLakeFileFlushOptions()
.setUncommittedDataRetained(retainUncommittedData)
.setClose(close)
.setPathHttpHeaders(httpHeaders)
.setRequestConditions(requestConditions);
return flushWithResponse(position, flushOptions, timeout, context);
}
/**
* Flushes (writes) data previously appended to the file through a call to append.
* The previously uploaded data must be contiguous.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse
* <pre>
* FileRange range = new FileRange&
* DownloadRetryOptions options = new DownloadRetryOptions&
* byte[] contentMd5 = new byte[0]; &
* boolean retainUncommittedData = false;
* boolean close = false;
* PathHttpHeaders httpHeaders = new PathHttpHeaders&
* .setContentLanguage&
* .setContentType&
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
*
* Integer leaseDuration = 15;
*
* DataLakeFileFlushOptions flushOptions = new DataLakeFileFlushOptions&
* .setUncommittedDataRetained&
* .setClose&
* .setPathHttpHeaders&
* .setRequestConditions&
* .setLeaseAction&
* .setLeaseDuration&
* .setProposedLeaseId&
*
* Response<PathInfo> response = client.flushWithResponse&
* new Context&
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param position The length of the file after all data has been written.
* @param flushOptions {@link DataLakeFileFlushOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response containing the information of the created resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<PathInfo> flushWithResponse(long position, DataLakeFileFlushOptions flushOptions, Duration timeout,
Context context) {
Mono<Response<PathInfo>> response = dataLakeFileAsyncClient.flushWithResponse(position, flushOptions, context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Reads the entire file into an output stream.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.read
* <pre>
* client.read&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.read
*
* <p>For more information, see the
* <a href="https:
*
* @param stream A non-null {@link OutputStream} instance where the downloaded data will be written.
* @throws UncheckedIOException If an I/O error occurs.
* @throws NullPointerException if {@code stream} is null
*/
public void read(OutputStream stream) {
readWithResponse(stream, null, null, null, false, null, Context.NONE);
}
/**
* Reads a range of bytes from a file into an output stream.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readWithResponse
* <pre>
* FileRange range = new FileRange&
* DownloadRetryOptions options = new DownloadRetryOptions&
*
* System.out.printf&
* client.readWithResponse&
* timeout, new Context&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param stream A non-null {@link OutputStream} instance where the downloaded data will be written.
* @param range {@link FileRange}
* @param options {@link DownloadRetryOptions}
* @param requestConditions {@link DataLakeRequestConditions}
* @param getRangeContentMd5 Whether the contentMD5 for the specified file range should be returned.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response containing status code and HTTP headers.
* @throws UncheckedIOException If an I/O error occurs.
* @throws NullPointerException if {@code stream} is null
*/
public FileReadResponse readWithResponse(OutputStream stream, FileRange range, DownloadRetryOptions options,
DataLakeRequestConditions requestConditions, boolean getRangeContentMd5, Duration timeout, Context context) {
return DataLakeImplUtils.returnOrConvertException(() -> {
BlobDownloadResponse response = blockBlobClient.downloadWithResponse(stream, Transforms.toBlobRange(range),
Transforms.toBlobDownloadRetryOptions(options), Transforms.toBlobRequestConditions(requestConditions),
getRangeContentMd5, timeout, context);
return Transforms.toFileReadResponse(response);
}, LOGGER);
}
/**
* Opens a file input stream to download the file. Locks on ETags.
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openInputStream -->
* <pre>
* DataLakeFileOpenInputStreamResult inputStream = client.openInputStream&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openInputStream -->
*
* @return An {@link InputStream} object that represents the stream to use for reading from the file.
* @throws DataLakeStorageException If a storage service error occurred.
*/
public DataLakeFileOpenInputStreamResult openInputStream() {
return openInputStream(null);
}
/**
* Opens a file input stream to download the specified range of the file. Defaults to ETag locking if the option
* is not specified.
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openInputStream
* <pre>
* DataLakeFileInputStreamOptions options = new DataLakeFileInputStreamOptions&
* .setRequestConditions&
* DataLakeFileOpenInputStreamResult streamResult = client.openInputStream&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openInputStream
*
* @param options {@link DataLakeFileInputStreamOptions}
* @return A {@link DataLakeFileOpenInputStreamResult} object that contains the stream to use for reading from the file.
* @throws DataLakeStorageException If a storage service error occurred.
*/
public DataLakeFileOpenInputStreamResult openInputStream(DataLakeFileInputStreamOptions options) {
return openInputStream(options, Context.NONE);
}
/**
* Opens a file input stream to download the specified range of the file. Defaults to ETag locking if the option
* is not specified.
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openInputStream
* <pre>
* options = new DataLakeFileInputStreamOptions&
* .setRequestConditions&
* DataLakeFileOpenInputStreamResult stream = client.openInputStream&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openInputStream
*
* @param options {@link DataLakeFileInputStreamOptions}
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link DataLakeFileOpenInputStreamResult} object that contains the stream to use for reading from the file.
* @throws DataLakeStorageException If a storage service error occurred.
*/
/**
* Creates and opens an output stream to write data to the file. If the file already exists on the service, it
* will be overwritten.
*
* @return The {@link OutputStream} that can be used to write to the file.
* @throws DataLakeStorageException If a storage service error occurred.
*/
public OutputStream getOutputStream() {
return getOutputStream(null);
}
/**
* Creates and opens an output stream to write data to the file. If the file already exists on the service, it
* will be overwritten.
* <p>
* To avoid overwriting, pass "*" to {@link DataLakeRequestConditions
* </p>
*
* @param options {@link DataLakeFileOutputStreamOptions}
* @return The {@link OutputStream} that can be used to write to the file.
* @throws DataLakeStorageException If a storage service error occurred.
*/
public OutputStream getOutputStream(DataLakeFileOutputStreamOptions options) {
return getOutputStream(options, null);
}
/**
* Creates and opens an output stream to write data to the file. If the file already exists on the service, it
* will be overwritten.
* <p>
* To avoid overwriting, pass "*" to {@link DataLakeRequestConditions
* </p>
*
* @param options {@link DataLakeFileOutputStreamOptions}
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return The {@link OutputStream} that can be used to write to the file.
* @throws DataLakeStorageException If a storage service error occurred.
*/
public OutputStream getOutputStream(DataLakeFileOutputStreamOptions options, Context context) {
BlockBlobOutputStreamOptions convertedOptions = Transforms.toBlockBlobOutputStreamOptions(options);
return blockBlobClient.getBlobOutputStream(convertedOptions, context);
}
/**
* Reads the entire file into a file specified by the path.
*
* <p>The file will be created and must not exist, if the file already exists a {@link FileAlreadyExistsException}
* will be thrown.</p>
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile
* <pre>
* client.readToFile&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile
*
* <p>For more information, see the
* <a href="https:
*
* @param filePath A {@link String} representing the filePath where the downloaded data will be written.
* @return The file properties and metadata.
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathProperties readToFile(String filePath) {
return readToFile(filePath, false);
}
/**
* Reads the entire file into a file specified by the path.
*
* <p>The file will be created and must not exist, if the file already exists a {@link FileAlreadyExistsException}
* will be thrown.</p>
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile
* <pre>
* client.readToFile&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile
*
* <p>For more information, see the
* <a href="https:
*
* @param options {@link ReadToFileOptions}
* @return The file properties and metadata.
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathProperties readToFile(ReadToFileOptions options) {
return readToFile(options, false);
}
/**
* Reads the entire file into a file specified by the path.
*
* <p>If overwrite is set to false, the file will be created and must not exist, if the file already exists a
* {@link FileAlreadyExistsException} will be thrown.</p>
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile
* <pre>
* boolean overwrite = false; &
* client.readToFile&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile
*
* <p>For more information, see the
* <a href="https:
*
* @param filePath A {@link String} representing the filePath where the downloaded data will be written.
* @param overwrite Whether to overwrite the file, should the file exist.
* @return The file properties and metadata.
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathProperties readToFile(String filePath, boolean overwrite) {
Set<OpenOption> openOptions = null;
if (overwrite) {
openOptions = new HashSet<>();
openOptions.add(StandardOpenOption.CREATE);
openOptions.add(StandardOpenOption.TRUNCATE_EXISTING);
openOptions.add(StandardOpenOption.READ);
openOptions.add(StandardOpenOption.WRITE);
}
return readToFileWithResponse(filePath, null, null, null, null, false, openOptions, null, Context.NONE)
.getValue();
}
/**
* Reads the entire file into a file specified by the path.
*
* <p>If overwrite is set to false, the file will be created and must not exist, if the file already exists a
* {@link FileAlreadyExistsException} will be thrown.</p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile
* <pre>
* boolean overwrite1 = false; &
* client.readToFile&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile
*
* <p>For more information, see the
* <a href="https:
*
* @param options {@link ReadToFileOptions}
* @param overwrite Whether to overwrite the file, should the file exist.
* @return The file properties and metadata.
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathProperties readToFile(ReadToFileOptions options, boolean overwrite) {
Set<OpenOption> openOptions = null;
if (overwrite) {
openOptions = new HashSet<>();
openOptions.add(StandardOpenOption.CREATE);
openOptions.add(StandardOpenOption.TRUNCATE_EXISTING);
openOptions.add(StandardOpenOption.READ);
openOptions.add(StandardOpenOption.WRITE);
options.setOpenOptions(openOptions);
}
return readToFileWithResponse(options, null, Context.NONE)
.getValue();
}
/**
* Reads the entire file into a file specified by the path.
*
* <p>By default the file will be created and must not exist, if the file already exists a
* {@link FileAlreadyExistsException} will be thrown. To override this behavior, provide appropriate
* {@link OpenOption OpenOptions} </p>
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse
* <pre>
* FileRange fileRange = new FileRange&
* DownloadRetryOptions downloadRetryOptions = new DownloadRetryOptions&
* Set<OpenOption> openOptions = new HashSet<>&
* StandardOpenOption.WRITE, StandardOpenOption.READ&
*
* client.readToFileWithResponse&
* downloadRetryOptions, null, false, openOptions, timeout, new Context&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param filePath A {@link String} representing the filePath where the downloaded data will be written.
* @param range {@link FileRange}
* @param parallelTransferOptions {@link ParallelTransferOptions} to use to download to file. Number of parallel
* transfers parameter is ignored.
* @param downloadRetryOptions {@link DownloadRetryOptions}
* @param requestConditions {@link DataLakeRequestConditions}
* @param rangeGetContentMd5 Whether the contentMD5 for the specified file range should be returned.
* @param openOptions {@link OpenOption OpenOptions} to use to configure how to open or create the file.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A response containing the file properties and metadata.
* @throws UncheckedIOException If an I/O error occurs.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<PathProperties> readToFileWithResponse(String filePath, FileRange range,
ParallelTransferOptions parallelTransferOptions, DownloadRetryOptions downloadRetryOptions,
DataLakeRequestConditions requestConditions, boolean rangeGetContentMd5, Set<OpenOption> openOptions,
Duration timeout, Context context) {
return DataLakeImplUtils.returnOrConvertException(() -> {
Response<BlobProperties> response = blockBlobClient.downloadToFileWithResponse(
new BlobDownloadToFileOptions(filePath)
.setRange(Transforms.toBlobRange(range)).setParallelTransferOptions(parallelTransferOptions)
.setDownloadRetryOptions(Transforms.toBlobDownloadRetryOptions(downloadRetryOptions))
.setRequestConditions(Transforms.toBlobRequestConditions(requestConditions))
.setRetrieveContentRangeMd5(rangeGetContentMd5).setOpenOptions(openOptions), timeout,
context);
return new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue(), response));
}, LOGGER);
}
/**
* Reads the entire file into a file specified by the path.
*
* <p>By default the file will be created and must not exist, if the file already exists a
* {@link FileAlreadyExistsException} will be thrown. To override this behavior, provide appropriate
* {@link OpenOption OpenOptions} </p>
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse
* <pre>
* ReadToFileOptions options = new ReadToFileOptions&
* options.setRange&
* options.setDownloadRetryOptions&
* options.setOpenOptions&
* StandardOpenOption.WRITE, StandardOpenOption.READ&
* options.setParallelTransferOptions&
* options.setDataLakeRequestConditions&
* options.setRangeGetContentMd5&
*
* client.readToFileWithResponse&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse
*
* @param options {@link ReadToFileOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A response containing the file properties and metadata.
* @throws UncheckedIOException If an I/O error occurs.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<PathProperties> readToFileWithResponse(ReadToFileOptions options, Duration timeout, Context context) {
Context newContext;
options = options == null ? new ReadToFileOptions() : options;
if (options.isUpn() != null) {
HttpHeaders headers = new HttpHeaders();
headers.set("x-ms-upn", options.isUpn() ? "true" : "false");
if (context == null) {
newContext = new Context(AddHeadersFromContextPolicy.AZURE_REQUEST_HTTP_HEADERS_KEY, headers);
} else {
newContext = context.addData(AddHeadersFromContextPolicy.AZURE_REQUEST_HTTP_HEADERS_KEY, headers);
}
} else {
newContext = null;
}
ReadToFileOptions finalOptions = options;
return DataLakeImplUtils.returnOrConvertException(() -> {
Response<BlobProperties> response = blockBlobClient.downloadToFileWithResponse(
new BlobDownloadToFileOptions(finalOptions.getFilePath())
.setRange(Transforms.toBlobRange(finalOptions.getRange()))
.setParallelTransferOptions(finalOptions.getParallelTransferOptions())
.setDownloadRetryOptions(Transforms.toBlobDownloadRetryOptions(finalOptions.getDownloadRetryOptions()))
.setRequestConditions(Transforms.toBlobRequestConditions(finalOptions.getDataLakeRequestConditions()))
.setRetrieveContentRangeMd5(finalOptions.isRangeGetContentMd5())
.setOpenOptions(finalOptions.getOpenOptions()), timeout, newContext);
return new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue(), response));
}, LOGGER);
}
/**
* Moves the file to another location within the file system.
* For more information see the
* <a href="https:
* Docs</a>.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeDirectoryAsyncClient.rename
* <pre>
* DataLakeDirectoryAsyncClient renamedClient = client.rename&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeDirectoryAsyncClient.rename
*
* @param destinationFileSystem The file system of the destination within the account.
* {@code null} for the current file system.
* @param destinationPath Relative path from the file system to rename the file to, excludes the file system name.
* For example if you want to move a file with fileSystem = "myfilesystem", path = "mydir/hello.txt" to another path
* in myfilesystem (ex: newdir/hi.txt) then set the destinationPath = "newdir/hi.txt"
* @return A {@link DataLakeFileClient} used to interact with the new file created.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DataLakeFileClient rename(String destinationFileSystem, String destinationPath) {
return renameWithResponse(destinationFileSystem, destinationPath, null, null, null, null).getValue();
}
/**
* Moves the file to another location within the file system.
* For more information, see the
* <a href="https:
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.renameWithResponse
* <pre>
* DataLakeRequestConditions sourceRequestConditions = new DataLakeRequestConditions&
* .setLeaseId&
* DataLakeRequestConditions destinationRequestConditions = new DataLakeRequestConditions&
*
* DataLakeFileClient newRenamedClient = client.renameWithResponse&
* sourceRequestConditions, destinationRequestConditions, timeout, new Context&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.renameWithResponse
*
* @param destinationFileSystem The file system of the destination within the account.
* {@code null} for the current file system.
* @param destinationPath Relative path from the file system to rename the file to, excludes the file system name.
* For example if you want to move a file with fileSystem = "myfilesystem", path = "mydir/hello.txt" to another path
* in myfilesystem (ex: newdir/hi.txt) then set the destinationPath = "newdir/hi.txt"
* @param sourceRequestConditions {@link DataLakeRequestConditions} against the source.
* @param destinationRequestConditions {@link DataLakeRequestConditions} against the destination.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} whose {@link Response
* used to interact with the file created.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<DataLakeFileClient> renameWithResponse(String destinationFileSystem, String destinationPath,
DataLakeRequestConditions sourceRequestConditions, DataLakeRequestConditions destinationRequestConditions,
Duration timeout, Context context) {
Mono<Response<DataLakeFileClient>> response =
dataLakeFileAsyncClient.renameWithResponse(destinationFileSystem, destinationPath,
sourceRequestConditions, destinationRequestConditions, context)
.map(asyncResponse ->
new SimpleResponse<>(asyncResponse.getRequest(), asyncResponse.getStatusCode(),
asyncResponse.getHeaders(),
new DataLakeFileClient(new DataLakeFileAsyncClient(asyncResponse.getValue()),
new SpecializedBlobClientBuilder()
.blobAsyncClient(asyncResponse.getValue().blockBlobAsyncClient)
.buildBlockBlobClient())));
Response<DataLakeFileClient> resp = StorageImplUtils.blockWithOptionalTimeout(response, timeout);
return new SimpleResponse<>(resp, new DataLakeFileClient(resp.getValue()));
}
/**
* Opens an input stream to query the file.
*
* <p>For more information, see the
* <a href="https:
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream
* <pre>
* String expression = "SELECT * from BlobStorage";
* InputStream inputStream = client.openQueryInputStream&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream
*
* @param expression The query expression.
* @return An <code>InputStream</code> object that represents the stream to use for reading the query response.
*/
public InputStream openQueryInputStream(String expression) {
return openQueryInputStreamWithResponse(new FileQueryOptions(expression)).getValue();
}
/**
* Opens an input stream to query the file.
*
* <p>For more information, see the
* <a href="https:
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream
* <pre>
* String expression = "SELECT * from BlobStorage";
* FileQuerySerialization input = new FileQueryDelimitedSerialization&
* .setColumnSeparator&
* .setEscapeChar&
* .setRecordSeparator&
* .setHeadersPresent&
* .setFieldQuote&
* FileQuerySerialization output = new FileQueryJsonSerialization&
* .setRecordSeparator&
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
* Consumer<FileQueryError> errorConsumer = System.out::println;
* Consumer<FileQueryProgress> progressConsumer = progress -> System.out.println&
* + progress.getBytesScanned&
* FileQueryOptions queryOptions = new FileQueryOptions&
* .setInputSerialization&
* .setOutputSerialization&
* .setRequestConditions&
* .setErrorConsumer&
* .setProgressConsumer&
*
* InputStream inputStream = client.openQueryInputStreamWithResponse&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream
*
* @param queryOptions {@link FileQueryOptions The query options}.
* @return A response containing status code and HTTP headers including an <code>InputStream</code> object
* that represents the stream to use for reading the query response.
*/
public Response<InputStream> openQueryInputStreamWithResponse(FileQueryOptions queryOptions) {
FileQueryAsyncResponse response = dataLakeFileAsyncClient.queryWithResponse(queryOptions)
.block();
if (response == null) {
throw LOGGER.logExceptionAsError(new IllegalStateException("Query response cannot be null"));
}
return new ResponseBase<>(response.getRequest(), response.getStatusCode(), response.getHeaders(),
new FluxInputStream(response.getValue()), response.getDeserializedHeaders());
}
/**
* Queries an entire file into an output stream.
*
* <p>For more information, see the
* <a href="https:
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.query
* <pre>
* ByteArrayOutputStream queryData = new ByteArrayOutputStream&
* String expression = "SELECT * from BlobStorage";
* client.query&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.query
*
* @param stream A non-null {@link OutputStream} instance where the downloaded data will be written.
* @param expression The query expression.
* @throws UncheckedIOException If an I/O error occurs.
* @throws NullPointerException if {@code stream} is null.
*/
public void query(OutputStream stream, String expression) {
queryWithResponse(new FileQueryOptions(expression, stream), null, Context.NONE);
}
/**
* Queries an entire file into an output stream.
*
* <p>For more information, see the
* <a href="https:
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.queryWithResponse
* <pre>
* ByteArrayOutputStream queryData = new ByteArrayOutputStream&
* String expression = "SELECT * from BlobStorage";
* FileQueryJsonSerialization input = new FileQueryJsonSerialization&
* .setRecordSeparator&
* FileQueryDelimitedSerialization output = new FileQueryDelimitedSerialization&
* .setEscapeChar&
* .setColumnSeparator&
* .setRecordSeparator&
* .setFieldQuote&
* .setHeadersPresent&
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* Consumer<FileQueryError> errorConsumer = System.out::println;
* Consumer<FileQueryProgress> progressConsumer = progress -> System.out.println&
* + progress.getBytesScanned&
* FileQueryOptions queryOptions = new FileQueryOptions&
* .setInputSerialization&
* .setOutputSerialization&
* .setRequestConditions&
* .setErrorConsumer&
* .setProgressConsumer&
* System.out.printf&
* client.queryWithResponse&
* .getStatusCode&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.queryWithResponse
*
* @param queryOptions {@link FileQueryOptions The query options}.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A response containing status code and HTTP headers.
* @throws UncheckedIOException If an I/O error occurs.
* @throws NullPointerException if {@code stream} is null.
*/
public FileQueryResponse queryWithResponse(FileQueryOptions queryOptions, Duration timeout, Context context) {
return DataLakeImplUtils.returnOrConvertException(() -> {
BlobQueryResponse response = blockBlobClient.queryWithResponse(
Transforms.toBlobQueryOptions(queryOptions), timeout, context);
return Transforms.toFileQueryResponse(response);
}, LOGGER);
}
/**
* Schedules the file for deletion.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletion
* <pre>
* FileScheduleDeletionOptions options = new FileScheduleDeletionOptions&
* client.scheduleDeletion&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletion
*
* @param options Schedule deletion parameters.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void scheduleDeletion(FileScheduleDeletionOptions options) {
this.scheduleDeletionWithResponse(options, null, Context.NONE);
}
/**
* Schedules the file for deletion.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletionWithResponse
* <pre>
* FileScheduleDeletionOptions options = new FileScheduleDeletionOptions&
* Context context = new Context&
*
* client.scheduleDeletionWithResponse&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletionWithResponse
*
* @param options Schedule deletion parameters.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A response containing status code and HTTP headers.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> scheduleDeletionWithResponse(FileScheduleDeletionOptions options,
Duration timeout, Context context) {
Mono<Response<Void>> response = this.dataLakeFileAsyncClient.scheduleDeletionWithResponse(options, context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
} | class DataLakeFileClient extends DataLakePathClient {
/**
* Indicates the maximum number of bytes that can be sent in a call to upload.
*/
private static final long MAX_APPEND_FILE_BYTES = DataLakeFileAsyncClient.MAX_APPEND_FILE_BYTES;
private static final ClientLogger LOGGER = new ClientLogger(DataLakeFileClient.class);
private final DataLakeFileAsyncClient dataLakeFileAsyncClient;
DataLakeFileClient(DataLakeFileAsyncClient pathAsyncClient, BlockBlobClient blockBlobClient) {
super(pathAsyncClient, blockBlobClient);
this.dataLakeFileAsyncClient = pathAsyncClient;
}
private DataLakeFileClient(DataLakePathClient dataLakePathClient) {
super(dataLakePathClient.dataLakePathAsyncClient, dataLakePathClient.blockBlobClient);
this.dataLakeFileAsyncClient = new DataLakeFileAsyncClient(dataLakePathClient.dataLakePathAsyncClient);
}
/**
* Gets the URL of the file represented by this client on the Data Lake service.
*
* @return the URL.
*/
public String getFileUrl() {
return getPathUrl();
}
/**
* Gets the path of this file, not including the name of the resource itself.
*
* @return The path of the file.
*/
public String getFilePath() {
return getObjectPath();
}
/**
* Gets the name of this file, not including its full path.
*
* @return The name of the file.
*/
public String getFileName() {
return getObjectName();
}
/**
* Creates a new {@link DataLakeFileClient} with the specified {@code customerProvidedKey}.
*
* @param customerProvidedKey the {@link CustomerProvidedKey} for the blob,
* pass {@code null} to use no customer provided key.
* @return a {@link DataLakeFileClient} with the specified {@code customerProvidedKey}.
*/
public DataLakeFileClient getCustomerProvidedKeyClient(CustomerProvidedKey customerProvidedKey) {
return new DataLakeFileClient(dataLakeFileAsyncClient.getCustomerProvidedKeyAsyncClient(customerProvidedKey),
blockBlobClient.getCustomerProvidedKeyClient(Transforms.toBlobCustomerProvidedKey(customerProvidedKey)));
}
/**
* Deletes a file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.delete -->
* <pre>
* client.delete&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.delete -->
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void delete() {
deleteWithResponse(null, null, Context.NONE).getValue();
}
/**
* Deletes a file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.deleteWithResponse
* <pre>
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
*
* client.deleteWithResponse&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.deleteWithResponse
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @param requestConditions {@link DataLakeRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response containing status code and HTTP headers.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> deleteWithResponse(DataLakeRequestConditions requestConditions, Duration timeout,
Context context) {
Mono<Response<Void>> response = dataLakePathAsyncClient.deleteWithResponse(null, requestConditions, context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Deletes a file if it exists.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExists -->
* <pre>
* client.deleteIfExists&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExists -->
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
* @return {@code true} if file is successfully deleted, {@code false} if the file does not exist.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public boolean deleteIfExists() {
return deleteIfExistsWithResponse(new DataLakePathDeleteOptions(), null, Context.NONE).getValue();
}
/**
* Deletes a file if it exists.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExistsWithResponse
* <pre>
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
* DataLakePathDeleteOptions options = new DataLakePathDeleteOptions&
* .setRequestConditions&
*
* Response<Boolean> response = client.deleteIfExistsWithResponse&
* if &
* System.out.println&
* &
* System.out.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExistsWithResponse
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @param options {@link DataLakePathDeleteOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response containing status code and HTTP headers. If {@link Response}'s status code is 200, the file
* was successfully deleted. If status code is 404, the file does not exist.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Boolean> deleteIfExistsWithResponse(DataLakePathDeleteOptions options, Duration timeout,
Context context) {
return StorageImplUtils.blockWithOptionalTimeout(dataLakeFileAsyncClient
.deleteIfExistsWithResponse(options, context), timeout);
}
/**
* Creates a new file. By default, this method will not overwrite an existing file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload
* <pre>
* try &
* client.upload&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark
* support.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data provided in the {@link InputStream}.
* @return Information about the uploaded path.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathInfo upload(InputStream data, long length) {
return upload(data, length, false);
}
/**
* Creates a new file. By default, this method will not overwrite an existing file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload
* <pre>
* try &
* client.upload&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark
* support.
* @return Information about the uploaded path.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathInfo upload(BinaryData data) {
return upload(data, false);
}
/**
* Creates a new file, or updates the content of an existing file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload
* <pre>
* try &
* boolean overwrite = false;
* client.upload&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark
* support.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data provided in the {@link InputStream}.
* @param overwrite Whether to overwrite, should data exist on the file.
* @return Information about the uploaded path.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathInfo upload(InputStream data, long length, boolean overwrite) {
DataLakeRequestConditions requestConditions = new DataLakeRequestConditions();
if (!overwrite) {
requestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return uploadWithResponse(new FileParallelUploadOptions(data, length).setRequestConditions(requestConditions),
null, Context.NONE).getValue();
}
/**
* Creates a new file, or updates the content of an existing file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.upload
* <pre>
* try &
* boolean overwrite = false;
* client.upload&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.upload
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark
* support.
* @param overwrite Whether to overwrite, should data exist on the file.
* @return Information about the uploaded path.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathInfo upload(BinaryData data, boolean overwrite) {
DataLakeRequestConditions requestConditions = new DataLakeRequestConditions();
if (!overwrite) {
requestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return uploadWithResponse(new FileParallelUploadOptions(data).setRequestConditions(requestConditions),
null, Context.NONE).getValue();
}
/**
* Creates a new file.
* To avoid overwriting, pass "*" to {@link DataLakeRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadWithResponse
* <pre>
* PathHttpHeaders headers = new PathHttpHeaders&
* .setContentMd5&
* .setContentLanguage&
* .setContentType&
*
* Map<String, String> metadata = Collections.singletonMap&
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
* .setIfUnmodifiedSince&
* Long blockSize = 100L * 1024L * 1024L; &
* ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions&
*
* try &
* client.uploadWithResponse&
* .setParallelTransferOptions&
* .setMetadata&
* .setPermissions&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadWithResponse
*
* @param options {@link FileParallelUploadOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return Information about the uploaded path.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<PathInfo> uploadWithResponse(FileParallelUploadOptions options, Duration timeout,
Context context) {
Objects.requireNonNull(options);
Mono<Response<PathInfo>> upload = this.dataLakeFileAsyncClient.uploadWithResponse(options)
.contextWrite(FluxUtil.toReactorContext(context));
try {
return StorageImplUtils.blockWithOptionalTimeout(upload, timeout);
} catch (UncheckedIOException e) {
throw LOGGER.logExceptionAsError(e);
}
}
/**
* Creates a file, with the content of the specified file. By default, this method will not overwrite an
* existing file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile
* <pre>
* try &
* client.uploadFromFile&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void uploadFromFile(String filePath) {
uploadFromFile(filePath, false);
}
/**
* Creates a file, with the content of the specified file.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile
* <pre>
* try &
* boolean overwrite = false;
* client.uploadFromFile&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param overwrite Whether to overwrite, should the file already exist
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void uploadFromFile(String filePath, boolean overwrite) {
DataLakeRequestConditions requestConditions = null;
if (!overwrite) {
if (UploadUtils.shouldUploadInChunks(filePath, ModelHelper.FILE_DEFAULT_MAX_SINGLE_UPLOAD_SIZE, LOGGER)
&& exists()) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
requestConditions = new DataLakeRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
uploadFromFile(filePath, null, null, null, requestConditions, null);
}
/**
* Creates a file, with the content of the specified file.
* <p>
* To avoid overwriting, pass "*" to {@link DataLakeRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile
* <pre>
* PathHttpHeaders headers = new PathHttpHeaders&
* .setContentMd5&
* .setContentLanguage&
* .setContentType&
*
* Map<String, String> metadata = Collections.singletonMap&
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
* .setIfUnmodifiedSince&
* Long blockSize = 100L * 1024L * 1024L; &
* ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions&
*
* try &
* client.uploadFromFile&
* System.out.println&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading.
* @param headers {@link PathHttpHeaders}
* @param metadata Metadata to associate with the resource. If there is leading or trailing whitespace in any
* metadata key or value, it must be removed or encoded.
* @param requestConditions {@link DataLakeRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void uploadFromFile(String filePath, ParallelTransferOptions parallelTransferOptions,
PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions,
Duration timeout) {
Mono<Void> upload = this.dataLakeFileAsyncClient.uploadFromFile(
filePath, parallelTransferOptions, headers, metadata, requestConditions);
try {
StorageImplUtils.blockWithOptionalTimeout(upload, timeout);
} catch (UncheckedIOException e) {
throw LOGGER.logExceptionAsError(e);
}
}
/**
* Creates a file, with the content of the specified file.
* <p>
* To avoid overwriting, pass "*" to {@link DataLakeRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFileWithResponse
* <pre>
* PathHttpHeaders headers = new PathHttpHeaders&
* .setContentMd5&
* .setContentLanguage&
* .setContentType&
*
* Map<String, String> metadata = Collections.singletonMap&
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
* .setIfUnmodifiedSince&
* Long blockSize = 100L * 1024L * 1024L; &
* ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions&
*
* try &
* Response<PathInfo> response = client.uploadFromFileWithResponse&
* metadata, requestConditions, timeout, new Context&
* System.out.printf&
* &
* System.err.printf&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFileWithResponse
*
* @param filePath Path of the file to upload
* @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading.
* @param headers {@link PathHttpHeaders}
* @param metadata Metadata to associate with the resource. If there is leading or trailing whitespace in any
* metadata key or value, it must be removed or encoded.
* @param requestConditions {@link DataLakeRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return Response containing information about the uploaded path.
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<PathInfo> uploadFromFileWithResponse(String filePath, ParallelTransferOptions parallelTransferOptions,
PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions,
Duration timeout, Context context) {
Mono<Response<PathInfo>> upload = this.dataLakeFileAsyncClient.uploadFromFileWithResponse(
filePath, parallelTransferOptions, headers, metadata, requestConditions)
.contextWrite(FluxUtil.toReactorContext(context));
try {
return StorageImplUtils.blockWithOptionalTimeout(upload, timeout);
} catch (UncheckedIOException e) {
throw LOGGER.logExceptionAsError(e);
}
}
/**
* Appends data to the specified resource to later be flushed (written) by a call to flush
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.append
* <pre>
* client.append&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.append
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param data The data to write to the file.
* @param fileOffset The position where the data is to be appended.
* @param length The exact length of the data.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void append(InputStream data, long fileOffset, long length) {
appendWithResponse(data, fileOffset, length, null, null, Context.NONE);
}
/**
* Appends data to the specified resource to later be flushed (written) by a call to flush
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.append
* <pre>
* client.append&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.append
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param data The data to write to the file.
* @param fileOffset The position where the data is to be appended.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void append(BinaryData data, long fileOffset) {
appendWithResponse(data, fileOffset, null, null, null, Context.NONE);
}
/**
* Appends data to the specified resource to later be flushed (written) by a call to flush
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
* <pre>
* FileRange range = new FileRange&
* DownloadRetryOptions options = new DownloadRetryOptions&
* byte[] contentMd5 = new byte[0]; &
*
* Response<Void> response = client.appendWithResponse&
* new Context&
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param data The data to write to the file.
* @param fileOffset The position where the data is to be appended.
* @param length The exact length of the data.
* @param contentMd5 An MD5 hash of the content of the data. If specified, the service will calculate the MD5 of the
* received data and fail the request if it does not match the provided MD5.
* @param leaseId By setting lease id, requests will fail if the provided lease does not match the active lease on
* the file.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> appendWithResponse(InputStream data, long fileOffset, long length,
byte[] contentMd5, String leaseId, Duration timeout, Context context) {
DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions()
.setLeaseId(leaseId)
.setContentHash(contentMd5)
.setFlush(null);
return appendWithResponse(data, fileOffset, length, appendOptions, timeout, context);
}
/**
* Appends data to the specified resource to later be flushed (written) by a call to flush
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
* <pre>
* FileRange range = new FileRange&
* byte[] contentMd5 = new byte[0]; &
* DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions&
* .setLeaseId&
* .setContentHash&
* .setFlush&
* Response<Void> response = client.appendWithResponse&
* new Context&
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param data The data to write to the file.
* @param fileOffset The position where the data is to be appended.
* @param length The exact length of the data.
* @param appendOptions {@link DataLakeFileAppendOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> appendWithResponse(InputStream data, long fileOffset, long length,
DataLakeFileAppendOptions appendOptions, Duration timeout, Context context) {
Objects.requireNonNull(data);
Flux<ByteBuffer> fbb = Utility.convertStreamToByteBuffer(data, length,
BlobAsyncClient.BLOB_DEFAULT_UPLOAD_BLOCK_SIZE, true);
Mono<Response<Void>> response = dataLakeFileAsyncClient.appendWithResponse(fbb, fileOffset, length,
appendOptions, context);
try {
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
} catch (UncheckedIOException e) {
throw LOGGER.logExceptionAsError(e);
}
}
/**
* Appends data to the specified resource to later be flushed (written) by a call to flush
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
* <pre>
* FileRange range = new FileRange&
* DownloadRetryOptions options = new DownloadRetryOptions&
* byte[] contentMd5 = new byte[0]; &
*
* Response<Void> response = client.appendWithResponse&
* new Context&
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param data The data to write to the file.
* @param fileOffset The position where the data is to be appended.
* @param contentMd5 An MD5 hash of the content of the data. If specified, the service will calculate the MD5 of the
* received data and fail the request if it does not match the provided MD5.
* @param leaseId By setting lease id, requests will fail if the provided lease does not match the active lease on
* the file.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> appendWithResponse(BinaryData data, long fileOffset, byte[] contentMd5, String leaseId,
Duration timeout, Context context) {
Objects.requireNonNull(data);
Flux<ByteBuffer> fluxData = data.toFluxByteBuffer();
DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions()
.setLeaseId(leaseId)
.setContentHash(contentMd5)
.setFlush(null);
Mono<Response<Void>> response = dataLakeFileAsyncClient.appendWithResponse(fluxData, fileOffset,
data.getLength(), appendOptions, context);
try {
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
} catch (UncheckedIOException e) {
throw LOGGER.logExceptionAsError(e);
}
}
/**
* Appends data to the specified resource to later be flushed (written) by a call to flush
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
* <pre>
* BinaryData binaryData = BinaryData.fromStream&
* FileRange range = new FileRange&
* byte[] contentMd5 = new byte[0]; &
* DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions&
* .setLeaseId&
* .setContentHash&
* .setFlush&
* Response<Void> response = client.appendWithResponse&
* new Context&
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param data The data to write to the file.
* @param fileOffset The position where the data is to be appended.
* @param appendOptions {@link DataLakeFileAppendOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> appendWithResponse(BinaryData data, long fileOffset,
DataLakeFileAppendOptions appendOptions, Duration timeout, Context context) {
Objects.requireNonNull(data);
Flux<ByteBuffer> fluxData = data.toFluxByteBuffer();
Mono<Response<Void>> response = dataLakeFileAsyncClient.appendWithResponse(fluxData, fileOffset,
data.getLength(), appendOptions, context);
try {
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
} catch (UncheckedIOException e) {
throw LOGGER.logExceptionAsError(e);
}
}
/**
* Flushes (writes) data previously appended to the file through a call to append.
* The previously uploaded data must be contiguous.
* <p>By default this method will not overwrite existing data.</p>
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flush
* <pre>
* client.flush&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flush
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param position The length of the file after all data has been written.
* @return Information about the created resource.
* @deprecated See {@link
*/
@ServiceMethod(returns = ReturnType.SINGLE)
@Deprecated
public PathInfo flush(long position) {
return flush(position, false);
}
/**
* Flushes (writes) data previously appended to the file through a call to append.
* The previously uploaded data must be contiguous.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flush
* <pre>
* boolean overwrite = true;
* client.flush&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flush
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param position The length of the file after all data has been written.
* @param overwrite Whether to overwrite, should data exist on the file.
*
* @return Information about the created resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathInfo flush(long position, boolean overwrite) {
DataLakeRequestConditions requestConditions = new DataLakeRequestConditions();
if (!overwrite) {
requestConditions = new DataLakeRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return flushWithResponse(position, false, false, null, requestConditions, null, Context.NONE).getValue();
}
/**
* Flushes (writes) data previously appended to the file through a call to append.
* The previously uploaded data must be contiguous.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse
* <pre>
* FileRange range = new FileRange&
* DownloadRetryOptions options = new DownloadRetryOptions&
* byte[] contentMd5 = new byte[0]; &
* boolean retainUncommittedData = false;
* boolean close = false;
* PathHttpHeaders httpHeaders = new PathHttpHeaders&
* .setContentLanguage&
* .setContentType&
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
*
* Response<PathInfo> response = client.flushWithResponse&
* requestConditions, timeout, new Context&
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param position The length of the file after all data has been written.
* @param retainUncommittedData Whether uncommitted data is to be retained after the operation.
* @param close Whether a file changed event raised indicates completion (true) or modification (false).
* @param httpHeaders {@link PathHttpHeaders httpHeaders}
* @param requestConditions {@link DataLakeRequestConditions requestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response containing the information of the created resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<PathInfo> flushWithResponse(long position, boolean retainUncommittedData, boolean close,
PathHttpHeaders httpHeaders, DataLakeRequestConditions requestConditions, Duration timeout, Context context) {
DataLakeFileFlushOptions flushOptions = new DataLakeFileFlushOptions()
.setUncommittedDataRetained(retainUncommittedData)
.setClose(close)
.setPathHttpHeaders(httpHeaders)
.setRequestConditions(requestConditions);
return flushWithResponse(position, flushOptions, timeout, context);
}
/**
* Flushes (writes) data previously appended to the file through a call to append.
* The previously uploaded data must be contiguous.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse
* <pre>
* FileRange range = new FileRange&
* DownloadRetryOptions options = new DownloadRetryOptions&
* byte[] contentMd5 = new byte[0]; &
* boolean retainUncommittedData = false;
* boolean close = false;
* PathHttpHeaders httpHeaders = new PathHttpHeaders&
* .setContentLanguage&
* .setContentType&
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
*
* Integer leaseDuration = 15;
*
* DataLakeFileFlushOptions flushOptions = new DataLakeFileFlushOptions&
* .setUncommittedDataRetained&
* .setClose&
* .setPathHttpHeaders&
* .setRequestConditions&
* .setLeaseAction&
* .setLeaseDuration&
* .setProposedLeaseId&
*
* Response<PathInfo> response = client.flushWithResponse&
* new Context&
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse
*
* <p>For more information, see the
* <a href="https:
* Docs</a></p>
*
* @param position The length of the file after all data has been written.
* @param flushOptions {@link DataLakeFileFlushOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response containing the information of the created resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<PathInfo> flushWithResponse(long position, DataLakeFileFlushOptions flushOptions, Duration timeout,
Context context) {
Mono<Response<PathInfo>> response = dataLakeFileAsyncClient.flushWithResponse(position, flushOptions, context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Reads the entire file into an output stream.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.read
* <pre>
* client.read&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.read
*
* <p>For more information, see the
* <a href="https:
*
* @param stream A non-null {@link OutputStream} instance where the downloaded data will be written.
* @throws UncheckedIOException If an I/O error occurs.
* @throws NullPointerException if {@code stream} is null
*/
public void read(OutputStream stream) {
readWithResponse(stream, null, null, null, false, null, Context.NONE);
}
/**
* Reads a range of bytes from a file into an output stream.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readWithResponse
* <pre>
* FileRange range = new FileRange&
* DownloadRetryOptions options = new DownloadRetryOptions&
*
* System.out.printf&
* client.readWithResponse&
* timeout, new Context&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param stream A non-null {@link OutputStream} instance where the downloaded data will be written.
* @param range {@link FileRange}
* @param options {@link DownloadRetryOptions}
* @param requestConditions {@link DataLakeRequestConditions}
* @param getRangeContentMd5 Whether the contentMD5 for the specified file range should be returned.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A response containing status code and HTTP headers.
* @throws UncheckedIOException If an I/O error occurs.
* @throws NullPointerException if {@code stream} is null
*/
public FileReadResponse readWithResponse(OutputStream stream, FileRange range, DownloadRetryOptions options,
DataLakeRequestConditions requestConditions, boolean getRangeContentMd5, Duration timeout, Context context) {
return DataLakeImplUtils.returnOrConvertException(() -> {
BlobDownloadResponse response = blockBlobClient.downloadWithResponse(stream, Transforms.toBlobRange(range),
Transforms.toBlobDownloadRetryOptions(options), Transforms.toBlobRequestConditions(requestConditions),
getRangeContentMd5, timeout, context);
return Transforms.toFileReadResponse(response);
}, LOGGER);
}
/**
* Opens a file input stream to download the file. Locks on ETags.
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openInputStream -->
* <pre>
* DataLakeFileOpenInputStreamResult inputStream = client.openInputStream&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openInputStream -->
*
* @return An {@link InputStream} object that represents the stream to use for reading from the file.
* @throws DataLakeStorageException If a storage service error occurred.
*/
public DataLakeFileOpenInputStreamResult openInputStream() {
return openInputStream(null);
}
/**
* Opens a file input stream to download the specified range of the file. Defaults to ETag locking if the option
* is not specified.
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openInputStream
* <pre>
* DataLakeFileInputStreamOptions options = new DataLakeFileInputStreamOptions&
* .setRequestConditions&
* DataLakeFileOpenInputStreamResult streamResult = client.openInputStream&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openInputStream
*
* @param options {@link DataLakeFileInputStreamOptions}
* @return A {@link DataLakeFileOpenInputStreamResult} object that contains the stream to use for reading from the file.
* @throws DataLakeStorageException If a storage service error occurred.
*/
public DataLakeFileOpenInputStreamResult openInputStream(DataLakeFileInputStreamOptions options) {
return openInputStream(options, Context.NONE);
}
/**
* Opens a file input stream to download the specified range of the file. Defaults to ETag locking if the option
* is not specified.
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openInputStream
* <pre>
* options = new DataLakeFileInputStreamOptions&
* .setRequestConditions&
* DataLakeFileOpenInputStreamResult stream = client.openInputStream&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openInputStream
*
* @param options {@link DataLakeFileInputStreamOptions}
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link DataLakeFileOpenInputStreamResult} object that contains the stream to use for reading from the file.
* @throws DataLakeStorageException If a storage service error occurred.
*/
/**
* Creates and opens an output stream to write data to the file. If the file already exists on the service, it
* will be overwritten.
*
* @return The {@link OutputStream} that can be used to write to the file.
* @throws DataLakeStorageException If a storage service error occurred.
*/
public OutputStream getOutputStream() {
return getOutputStream(null);
}
/**
* Creates and opens an output stream to write data to the file. If the file already exists on the service, it
* will be overwritten.
* <p>
* To avoid overwriting, pass "*" to {@link DataLakeRequestConditions
* </p>
*
* @param options {@link DataLakeFileOutputStreamOptions}
* @return The {@link OutputStream} that can be used to write to the file.
* @throws DataLakeStorageException If a storage service error occurred.
*/
public OutputStream getOutputStream(DataLakeFileOutputStreamOptions options) {
return getOutputStream(options, null);
}
/**
* Creates and opens an output stream to write data to the file. If the file already exists on the service, it
* will be overwritten.
* <p>
* To avoid overwriting, pass "*" to {@link DataLakeRequestConditions
* </p>
*
* @param options {@link DataLakeFileOutputStreamOptions}
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return The {@link OutputStream} that can be used to write to the file.
* @throws DataLakeStorageException If a storage service error occurred.
*/
public OutputStream getOutputStream(DataLakeFileOutputStreamOptions options, Context context) {
BlockBlobOutputStreamOptions convertedOptions = Transforms.toBlockBlobOutputStreamOptions(options);
return blockBlobClient.getBlobOutputStream(convertedOptions, context);
}
/**
* Reads the entire file into a file specified by the path.
*
* <p>The file will be created and must not exist, if the file already exists a {@link FileAlreadyExistsException}
* will be thrown.</p>
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile
* <pre>
* client.readToFile&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile
*
* <p>For more information, see the
* <a href="https:
*
* @param filePath A {@link String} representing the filePath where the downloaded data will be written.
* @return The file properties and metadata.
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathProperties readToFile(String filePath) {
return readToFile(filePath, false);
}
/**
* Reads the entire file into a file specified by the path.
*
* <p>The file will be created and must not exist, if the file already exists a {@link FileAlreadyExistsException}
* will be thrown.</p>
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile
* <pre>
* client.readToFile&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile
*
* <p>For more information, see the
* <a href="https:
*
* @param options {@link ReadToFileOptions}
* @return The file properties and metadata.
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathProperties readToFile(ReadToFileOptions options) {
return readToFile(options, false);
}
/**
* Reads the entire file into a file specified by the path.
*
* <p>If overwrite is set to false, the file will be created and must not exist, if the file already exists a
* {@link FileAlreadyExistsException} will be thrown.</p>
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile
* <pre>
* boolean overwrite = false; &
* client.readToFile&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile
*
* <p>For more information, see the
* <a href="https:
*
* @param filePath A {@link String} representing the filePath where the downloaded data will be written.
* @param overwrite Whether to overwrite the file, should the file exist.
* @return The file properties and metadata.
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathProperties readToFile(String filePath, boolean overwrite) {
Set<OpenOption> openOptions = null;
if (overwrite) {
openOptions = new HashSet<>();
openOptions.add(StandardOpenOption.CREATE);
openOptions.add(StandardOpenOption.TRUNCATE_EXISTING);
openOptions.add(StandardOpenOption.READ);
openOptions.add(StandardOpenOption.WRITE);
}
return readToFileWithResponse(filePath, null, null, null, null, false, openOptions, null, Context.NONE)
.getValue();
}
/**
* Reads the entire file into a file specified by the path.
*
* <p>If overwrite is set to false, the file will be created and must not exist, if the file already exists a
* {@link FileAlreadyExistsException} will be thrown.</p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFile
* <pre>
* boolean overwrite1 = false; &
* client.readToFile&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFile
*
* <p>For more information, see the
* <a href="https:
*
* @param options {@link ReadToFileOptions}
* @param overwrite Whether to overwrite the file, should the file exist.
* @return The file properties and metadata.
* @throws UncheckedIOException If an I/O error occurs
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PathProperties readToFile(ReadToFileOptions options, boolean overwrite) {
Set<OpenOption> openOptions = null;
if (overwrite) {
openOptions = new HashSet<>();
openOptions.add(StandardOpenOption.CREATE);
openOptions.add(StandardOpenOption.TRUNCATE_EXISTING);
openOptions.add(StandardOpenOption.READ);
openOptions.add(StandardOpenOption.WRITE);
options.setOpenOptions(openOptions);
}
return readToFileWithResponse(options, null, Context.NONE)
.getValue();
}
/**
* Reads the entire file into a file specified by the path.
*
* <p>By default the file will be created and must not exist, if the file already exists a
* {@link FileAlreadyExistsException} will be thrown. To override this behavior, provide appropriate
* {@link OpenOption OpenOptions} </p>
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse
* <pre>
* FileRange fileRange = new FileRange&
* DownloadRetryOptions downloadRetryOptions = new DownloadRetryOptions&
* Set<OpenOption> openOptions = new HashSet<>&
* StandardOpenOption.WRITE, StandardOpenOption.READ&
*
* client.readToFileWithResponse&
* downloadRetryOptions, null, false, openOptions, timeout, new Context&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param filePath A {@link String} representing the filePath where the downloaded data will be written.
* @param range {@link FileRange}
* @param parallelTransferOptions {@link ParallelTransferOptions} to use to download to file. Number of parallel
* transfers parameter is ignored.
* @param downloadRetryOptions {@link DownloadRetryOptions}
* @param requestConditions {@link DataLakeRequestConditions}
* @param rangeGetContentMd5 Whether the contentMD5 for the specified file range should be returned.
* @param openOptions {@link OpenOption OpenOptions} to use to configure how to open or create the file.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A response containing the file properties and metadata.
* @throws UncheckedIOException If an I/O error occurs.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<PathProperties> readToFileWithResponse(String filePath, FileRange range,
ParallelTransferOptions parallelTransferOptions, DownloadRetryOptions downloadRetryOptions,
DataLakeRequestConditions requestConditions, boolean rangeGetContentMd5, Set<OpenOption> openOptions,
Duration timeout, Context context) {
return DataLakeImplUtils.returnOrConvertException(() -> {
Response<BlobProperties> response = blockBlobClient.downloadToFileWithResponse(
new BlobDownloadToFileOptions(filePath)
.setRange(Transforms.toBlobRange(range)).setParallelTransferOptions(parallelTransferOptions)
.setDownloadRetryOptions(Transforms.toBlobDownloadRetryOptions(downloadRetryOptions))
.setRequestConditions(Transforms.toBlobRequestConditions(requestConditions))
.setRetrieveContentRangeMd5(rangeGetContentMd5).setOpenOptions(openOptions), timeout,
context);
return new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue(), response));
}, LOGGER);
}
/**
* Reads the entire file into a file specified by the path.
*
* <p>By default the file will be created and must not exist, if the file already exists a
* {@link FileAlreadyExistsException} will be thrown. To override this behavior, provide appropriate
* {@link OpenOption OpenOptions} </p>
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse
* <pre>
* ReadToFileOptions options = new ReadToFileOptions&
* options.setRange&
* options.setDownloadRetryOptions&
* options.setOpenOptions&
* StandardOpenOption.WRITE, StandardOpenOption.READ&
* options.setParallelTransferOptions&
* options.setDataLakeRequestConditions&
* options.setRangeGetContentMd5&
*
* client.readToFileWithResponse&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse
*
* @param options {@link ReadToFileOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A response containing the file properties and metadata.
* @throws UncheckedIOException If an I/O error occurs.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<PathProperties> readToFileWithResponse(ReadToFileOptions options, Duration timeout, Context context) {
context = BuilderHelper.addUpnHeader(() -> (options == null) ? null : options.isUpn(), context);
Context finalContext = context;
return DataLakeImplUtils.returnOrConvertException(() -> {
Response<BlobProperties> response = blockBlobClient.downloadToFileWithResponse(
new BlobDownloadToFileOptions(options.getFilePath())
.setRange(Transforms.toBlobRange(options.getRange()))
.setParallelTransferOptions(options.getParallelTransferOptions())
.setDownloadRetryOptions(Transforms.toBlobDownloadRetryOptions(options.getDownloadRetryOptions()))
.setRequestConditions(Transforms.toBlobRequestConditions(options.getDataLakeRequestConditions()))
.setRetrieveContentRangeMd5(options.isRangeGetContentMd5())
.setOpenOptions(options.getOpenOptions()), timeout, finalContext);
return new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue(), response));
}, LOGGER);
}
/**
* Moves the file to another location within the file system.
* For more information see the
* <a href="https:
* Docs</a>.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeDirectoryAsyncClient.rename
* <pre>
* DataLakeDirectoryAsyncClient renamedClient = client.rename&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeDirectoryAsyncClient.rename
*
* @param destinationFileSystem The file system of the destination within the account.
* {@code null} for the current file system.
* @param destinationPath Relative path from the file system to rename the file to, excludes the file system name.
* For example if you want to move a file with fileSystem = "myfilesystem", path = "mydir/hello.txt" to another path
* in myfilesystem (ex: newdir/hi.txt) then set the destinationPath = "newdir/hi.txt"
* @return A {@link DataLakeFileClient} used to interact with the new file created.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DataLakeFileClient rename(String destinationFileSystem, String destinationPath) {
return renameWithResponse(destinationFileSystem, destinationPath, null, null, null, null).getValue();
}
/**
* Moves the file to another location within the file system.
* For more information, see the
* <a href="https:
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.renameWithResponse
* <pre>
* DataLakeRequestConditions sourceRequestConditions = new DataLakeRequestConditions&
* .setLeaseId&
* DataLakeRequestConditions destinationRequestConditions = new DataLakeRequestConditions&
*
* DataLakeFileClient newRenamedClient = client.renameWithResponse&
* sourceRequestConditions, destinationRequestConditions, timeout, new Context&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.renameWithResponse
*
* @param destinationFileSystem The file system of the destination within the account.
* {@code null} for the current file system.
* @param destinationPath Relative path from the file system to rename the file to, excludes the file system name.
* For example if you want to move a file with fileSystem = "myfilesystem", path = "mydir/hello.txt" to another path
* in myfilesystem (ex: newdir/hi.txt) then set the destinationPath = "newdir/hi.txt"
* @param sourceRequestConditions {@link DataLakeRequestConditions} against the source.
* @param destinationRequestConditions {@link DataLakeRequestConditions} against the destination.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} whose {@link Response
* used to interact with the file created.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<DataLakeFileClient> renameWithResponse(String destinationFileSystem, String destinationPath,
DataLakeRequestConditions sourceRequestConditions, DataLakeRequestConditions destinationRequestConditions,
Duration timeout, Context context) {
Mono<Response<DataLakeFileClient>> response =
dataLakeFileAsyncClient.renameWithResponse(destinationFileSystem, destinationPath,
sourceRequestConditions, destinationRequestConditions, context)
.map(asyncResponse ->
new SimpleResponse<>(asyncResponse.getRequest(), asyncResponse.getStatusCode(),
asyncResponse.getHeaders(),
new DataLakeFileClient(new DataLakeFileAsyncClient(asyncResponse.getValue()),
new SpecializedBlobClientBuilder()
.blobAsyncClient(asyncResponse.getValue().blockBlobAsyncClient)
.buildBlockBlobClient())));
Response<DataLakeFileClient> resp = StorageImplUtils.blockWithOptionalTimeout(response, timeout);
return new SimpleResponse<>(resp, new DataLakeFileClient(resp.getValue()));
}
/**
* Opens an input stream to query the file.
*
* <p>For more information, see the
* <a href="https:
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream
* <pre>
* String expression = "SELECT * from BlobStorage";
* InputStream inputStream = client.openQueryInputStream&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream
*
* @param expression The query expression.
* @return An <code>InputStream</code> object that represents the stream to use for reading the query response.
*/
public InputStream openQueryInputStream(String expression) {
return openQueryInputStreamWithResponse(new FileQueryOptions(expression)).getValue();
}
/**
* Opens an input stream to query the file.
*
* <p>For more information, see the
* <a href="https:
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream
* <pre>
* String expression = "SELECT * from BlobStorage";
* FileQuerySerialization input = new FileQueryDelimitedSerialization&
* .setColumnSeparator&
* .setEscapeChar&
* .setRecordSeparator&
* .setHeadersPresent&
* .setFieldQuote&
* FileQuerySerialization output = new FileQueryJsonSerialization&
* .setRecordSeparator&
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* .setLeaseId&
* Consumer<FileQueryError> errorConsumer = System.out::println;
* Consumer<FileQueryProgress> progressConsumer = progress -> System.out.println&
* + progress.getBytesScanned&
* FileQueryOptions queryOptions = new FileQueryOptions&
* .setInputSerialization&
* .setOutputSerialization&
* .setRequestConditions&
* .setErrorConsumer&
* .setProgressConsumer&
*
* InputStream inputStream = client.openQueryInputStreamWithResponse&
* &
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream
*
* @param queryOptions {@link FileQueryOptions The query options}.
* @return A response containing status code and HTTP headers including an <code>InputStream</code> object
* that represents the stream to use for reading the query response.
*/
public Response<InputStream> openQueryInputStreamWithResponse(FileQueryOptions queryOptions) {
FileQueryAsyncResponse response = dataLakeFileAsyncClient.queryWithResponse(queryOptions)
.block();
if (response == null) {
throw LOGGER.logExceptionAsError(new IllegalStateException("Query response cannot be null"));
}
return new ResponseBase<>(response.getRequest(), response.getStatusCode(), response.getHeaders(),
new FluxInputStream(response.getValue()), response.getDeserializedHeaders());
}
/**
* Queries an entire file into an output stream.
*
* <p>For more information, see the
* <a href="https:
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.query
* <pre>
* ByteArrayOutputStream queryData = new ByteArrayOutputStream&
* String expression = "SELECT * from BlobStorage";
* client.query&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.query
*
* @param stream A non-null {@link OutputStream} instance where the downloaded data will be written.
* @param expression The query expression.
* @throws UncheckedIOException If an I/O error occurs.
* @throws NullPointerException if {@code stream} is null.
*/
public void query(OutputStream stream, String expression) {
queryWithResponse(new FileQueryOptions(expression, stream), null, Context.NONE);
}
/**
* Queries an entire file into an output stream.
*
* <p>For more information, see the
* <a href="https:
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.queryWithResponse
* <pre>
* ByteArrayOutputStream queryData = new ByteArrayOutputStream&
* String expression = "SELECT * from BlobStorage";
* FileQueryJsonSerialization input = new FileQueryJsonSerialization&
* .setRecordSeparator&
* FileQueryDelimitedSerialization output = new FileQueryDelimitedSerialization&
* .setEscapeChar&
* .setColumnSeparator&
* .setRecordSeparator&
* .setFieldQuote&
* .setHeadersPresent&
* DataLakeRequestConditions requestConditions = new DataLakeRequestConditions&
* Consumer<FileQueryError> errorConsumer = System.out::println;
* Consumer<FileQueryProgress> progressConsumer = progress -> System.out.println&
* + progress.getBytesScanned&
* FileQueryOptions queryOptions = new FileQueryOptions&
* .setInputSerialization&
* .setOutputSerialization&
* .setRequestConditions&
* .setErrorConsumer&
* .setProgressConsumer&
* System.out.printf&
* client.queryWithResponse&
* .getStatusCode&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.queryWithResponse
*
* @param queryOptions {@link FileQueryOptions The query options}.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A response containing status code and HTTP headers.
* @throws UncheckedIOException If an I/O error occurs.
* @throws NullPointerException if {@code stream} is null.
*/
public FileQueryResponse queryWithResponse(FileQueryOptions queryOptions, Duration timeout, Context context) {
return DataLakeImplUtils.returnOrConvertException(() -> {
BlobQueryResponse response = blockBlobClient.queryWithResponse(
Transforms.toBlobQueryOptions(queryOptions), timeout, context);
return Transforms.toFileQueryResponse(response);
}, LOGGER);
}
/**
* Schedules the file for deletion.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletion
* <pre>
* FileScheduleDeletionOptions options = new FileScheduleDeletionOptions&
* client.scheduleDeletion&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletion
*
* @param options Schedule deletion parameters.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void scheduleDeletion(FileScheduleDeletionOptions options) {
this.scheduleDeletionWithResponse(options, null, Context.NONE);
}
/**
* Schedules the file for deletion.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletionWithResponse
* <pre>
* FileScheduleDeletionOptions options = new FileScheduleDeletionOptions&
* Context context = new Context&
*
* client.scheduleDeletionWithResponse&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletionWithResponse
*
* @param options Schedule deletion parameters.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A response containing status code and HTTP headers.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> scheduleDeletionWithResponse(FileScheduleDeletionOptions options,
Duration timeout, Context context) {
Mono<Response<Void>> response = this.dataLakeFileAsyncClient.scheduleDeletionWithResponse(options, context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
} |
```suggestion service = new ScheduledThreadPoolExecutor(1, r -> new Thread(r, name() + "-worker")); ``` | public Maintainer(Controller controller, Duration interval, JobControl jobControl, String name, Set<SystemName> activeSystems) {
if (interval.isNegative() || interval.isZero())
throw new IllegalArgumentException("Interval must be positive, but was " + interval);
this.controller = controller;
this.maintenanceInterval = interval;
this.jobControl = jobControl;
this.name = name;
this.activeSystems = Set.copyOf(activeSystems);
service = new ScheduledThreadPoolExecutor(1, r -> new Thread(r, getClass().getSimpleName() + "-worker"));
long delay = staggeredDelay(controller.curator().cluster(), controller.hostname(), controller.clock().instant(), interval);
service.scheduleAtFixedRate(this, delay, interval.toMillis(), TimeUnit.MILLISECONDS);
jobControl.started(name());
} | service = new ScheduledThreadPoolExecutor(1, r -> new Thread(r, getClass().getSimpleName() + "-worker")); | public Maintainer(Controller controller, Duration interval, JobControl jobControl, String name, Set<SystemName> activeSystems) {
if (interval.isNegative() || interval.isZero())
throw new IllegalArgumentException("Interval must be positive, but was " + interval);
this.controller = controller;
this.maintenanceInterval = interval;
this.jobControl = jobControl;
this.name = name;
this.activeSystems = Set.copyOf(activeSystems);
service = new ScheduledThreadPoolExecutor(1, r -> new Thread(r, name() + "-worker"));
long delay = staggeredDelay(controller.curator().cluster(), controller.hostname(), controller.clock().instant(), interval);
service.scheduleAtFixedRate(this, delay, interval.toMillis(), TimeUnit.MILLISECONDS);
jobControl.started(name());
} | class Maintainer extends AbstractComponent implements Runnable {
protected static final Logger log = Logger.getLogger(Maintainer.class.getName());
private final Controller controller;
private final Duration maintenanceInterval;
private final JobControl jobControl;
private final ScheduledExecutorService service;
private final String name;
/** The systems in which this maintainer should run */
private final Set<SystemName> activeSystems;
public Maintainer(Controller controller, Duration interval, JobControl jobControl) {
this(controller, interval, jobControl, null, EnumSet.allOf(SystemName.class));
}
protected Controller controller() { return controller; }
@Override
public void run() {
try {
if ( ! activeSystems.contains(controller.system())) {
return;
}
if (jobControl.isActive(name())) {
try (Lock lock = jobControl.curator().lockMaintenanceJob(name())) {
maintain();
}
}
}
catch (TimeoutException e) {
}
catch (Throwable t) {
log.log(Level.WARNING, "Maintainer " + name() + " failed. Will retry in " +
maintenanceInterval + ": " + Exceptions.toMessageString(t));
}
}
@Override
public void deconstruct() {
var timeout = Duration.ofSeconds(30);
service.shutdown();
try {
if (!service.awaitTermination(timeout.toMillis(), TimeUnit.MILLISECONDS)) {
log.log(Level.WARNING, "Maintainer " + name() + " failed to shutdown " +
"within " + timeout);
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
/** Called once each time this maintenance job should run */
protected abstract void maintain();
public Duration maintenanceInterval() { return maintenanceInterval; }
public final String name() {
return name == null ? this.getClass().getSimpleName() : name;
}
/** Returns the name of this */
@Override
public final String toString() {
return name();
}
static long staggeredDelay(List<HostName> cluster, HostName host, Instant now, Duration interval) {
if ( ! cluster.contains(host))
return interval.toMillis();
long offset = cluster.indexOf(host) * interval.toMillis() / cluster.size();
long timeUntilNextRun = Math.floorMod(offset - now.toEpochMilli(), interval.toMillis());
return timeUntilNextRun;
}
} | class Maintainer extends AbstractComponent implements Runnable {
protected static final Logger log = Logger.getLogger(Maintainer.class.getName());
private final Controller controller;
private final Duration maintenanceInterval;
private final JobControl jobControl;
private final ScheduledExecutorService service;
private final String name;
/** The systems in which this maintainer should run */
private final Set<SystemName> activeSystems;
public Maintainer(Controller controller, Duration interval, JobControl jobControl) {
this(controller, interval, jobControl, null, EnumSet.allOf(SystemName.class));
}
protected Controller controller() { return controller; }
@Override
public void run() {
try {
if ( ! activeSystems.contains(controller.system())) {
return;
}
if (jobControl.isActive(name())) {
try (Lock lock = jobControl.curator().lockMaintenanceJob(name())) {
maintain();
}
}
}
catch (TimeoutException e) {
}
catch (Throwable t) {
log.log(Level.WARNING, "Maintainer " + name() + " failed. Will retry in " +
maintenanceInterval + ": " + Exceptions.toMessageString(t));
}
}
@Override
public void deconstruct() {
var timeout = Duration.ofSeconds(30);
service.shutdown();
try {
if (!service.awaitTermination(timeout.toMillis(), TimeUnit.MILLISECONDS)) {
log.log(Level.WARNING, "Maintainer " + name() + " failed to shutdown " +
"within " + timeout);
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
/** Called once each time this maintenance job should run */
protected abstract void maintain();
public Duration maintenanceInterval() { return maintenanceInterval; }
public final String name() {
return name == null ? this.getClass().getSimpleName() : name;
}
/** Returns the name of this */
@Override
public final String toString() {
return name();
}
static long staggeredDelay(List<HostName> cluster, HostName host, Instant now, Duration interval) {
if ( ! cluster.contains(host))
return interval.toMillis();
long offset = cluster.indexOf(host) * interval.toMillis() / cluster.size();
return Math.floorMod(offset - now.toEpochMilli(), interval.toMillis());
}
} |
Does it make sense to have this.retryPolicy set to new Retrypolicy() in the constructor of the class. And in the method `public ChatClientBuilder retryPolicy(RetryPolicy retryPolicy)`, it just reset the retryPolicy attribute This way, we can avoid the null checks in the code. | private void applyRequiredPolicies(List<HttpPipelinePolicy> policies) {
policies.add(getUserAgentPolicy());
policies.add(this.retryPolicy == null ? new RetryPolicy() : this.retryPolicy);
policies.add(new CookiePolicy());
policies.add(new HttpLoggingPolicy(logOptions));
} | policies.add(this.retryPolicy == null ? new RetryPolicy() : this.retryPolicy); | private void applyRequiredPolicies(List<HttpPipelinePolicy> policies) {
policies.add(getUserAgentPolicy());
policies.add(this.retryPolicy == null ? new RetryPolicy() : this.retryPolicy);
policies.add(new CookiePolicy());
policies.add(new HttpLoggingPolicy(logOptions));
} | class ChatClientBuilder {
private String endpoint;
private HttpClient httpClient;
private CommunicationTokenCredential communicationTokenCredential;
private final List<HttpPipelinePolicy> customPolicies = new ArrayList<HttpPipelinePolicy>();
private HttpLogOptions logOptions = new HttpLogOptions();
private HttpPipeline httpPipeline;
private Configuration configuration;
private RetryPolicy retryPolicy;
private static final String APP_CONFIG_PROPERTIES = "azure-communication-chat.properties";
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
/**
* Set endpoint of the service
*
* @param endpoint url of the service
* @return the updated ChatClientBuilder object
*/
public ChatClientBuilder endpoint(String endpoint) {
this.endpoint = Objects.requireNonNull(endpoint, "'endpoint' cannot be null.");
return this;
}
/**
* Set HttpClient to use
*
* @param httpClient HttpClient to use
* @return the updated ChatClientBuilder object
*/
public ChatClientBuilder httpClient(HttpClient httpClient) {
this.httpClient = Objects.requireNonNull(httpClient, "'httpClient' cannot be null.");
return this;
}
/**
* Set a token credential for authorization
*
* @param communicationTokenCredential valid token credential as a string
* @return the updated ChatClientBuilder object
*/
public ChatClientBuilder credential(CommunicationTokenCredential communicationTokenCredential) {
this.communicationTokenCredential = Objects.requireNonNull(
communicationTokenCredential, "'communicationTokenCredential' cannot be null.");
return this;
}
/**
* Apply additional {@link HttpPipelinePolicy}
*
* @param customPolicy HttpPipelinePolicy objects to be applied after
* AzureKeyCredentialPolicy, UserAgentPolicy, RetryPolicy, and CookiePolicy
* @return the updated ChatClientBuilder object
*/
public ChatClientBuilder addPolicy(HttpPipelinePolicy customPolicy) {
this.customPolicies.add(Objects.requireNonNull(customPolicy, "'customPolicy' cannot be null."));
return this;
}
/**
* Sets the {@link HttpLogOptions} for service requests.
*
* @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses.
* @return the updated ChatClientBuilder object
*/
public ChatClientBuilder httpLogOptions(HttpLogOptions logOptions) {
this.logOptions = Objects.requireNonNull(logOptions, "'logOptions' cannot be null.");
return this;
}
/**
* Sets the {@link ChatServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version of the client library will have the result of potentially moving to a newer service version.
* <p>
* Targeting a specific service version may also mean that the service will return an error for newer APIs.
*
* @param version {@link ChatServiceVersion} of the service to be used when making requests.
* @return the updated ChatClientBuilder object
*/
public ChatClientBuilder serviceVersion(ChatServiceVersion version) {
return this;
}
/**
* Sets the {@link HttpPipeline} to use for the service client.
*
* If {@code pipeline} is set, all other settings are ignored, aside from {@link
*
* @param httpPipeline HttpPipeline to use for sending service requests and receiving responses.
* @return the updated BlobServiceClientBuilder object
*/
public ChatClientBuilder pipeline(HttpPipeline httpPipeline) {
this.httpPipeline = httpPipeline;
return this;
}
/**
* Sets the configuration object used to retrieve environment configuration values during building of the client.
*
* @param configuration Configuration store used to retrieve environment configurations.
* @return the updated BlobServiceClientBuilder object
*/
public ChatClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets the {@link RetryPolicy} that is used when each request is sent.
* <p>
* The default retry policy will be used in the pipeline, if not provided.
*
* @param retryPolicy User's retry policy applied to each request.
* @return The updated {@link ChatClientBuilder} object.
* @throws NullPointerException If the specified {@code retryPolicy} is null.
*/
public ChatClientBuilder retryPolicy(RetryPolicy retryPolicy) {
this.retryPolicy = Objects.requireNonNull(retryPolicy, "The retry policy cannot be null");
return this;
}
/**
* Create synchronous client applying CommunicationTokenCredential, UserAgentPolicy,
* RetryPolicy, and CookiePolicy.
* Additional HttpPolicies specified by additionalPolicies will be applied after them
*
* @return ChatClient instance
*/
public ChatClient buildClient() {
ChatAsyncClient asyncClient = buildAsyncClient();
return new ChatClient(asyncClient);
}
/**
* Create asynchronous client applying CommunicationTokenCredential, UserAgentPolicy,
* RetryPolicy, and CookiePolicy.
* Additional HttpPolicies specified by additionalPolicies will be applied after them
*
* @return ChatAsyncClient instance
*/
public ChatAsyncClient buildAsyncClient() {
Objects.requireNonNull(endpoint);
HttpPipeline pipeline;
if (httpPipeline != null) {
pipeline = httpPipeline;
} else {
Objects.requireNonNull(communicationTokenCredential);
Objects.requireNonNull(httpClient);
CommunicationBearerTokenCredential tokenCredential =
new CommunicationBearerTokenCredential(communicationTokenCredential);
pipeline = createHttpPipeline(httpClient,
new BearerTokenAuthenticationPolicy(tokenCredential, ""),
customPolicies);
}
AzureCommunicationChatServiceImplBuilder clientBuilder = new AzureCommunicationChatServiceImplBuilder();
clientBuilder.endpoint(endpoint)
.pipeline(pipeline);
return new ChatAsyncClient(clientBuilder.buildClient());
}
private HttpPipeline createHttpPipeline(HttpClient httpClient,
HttpPipelinePolicy authorizationPolicy,
List<HttpPipelinePolicy> additionalPolicies) {
List<HttpPipelinePolicy> policies = new ArrayList<HttpPipelinePolicy>();
policies.add(authorizationPolicy);
applyRequiredPolicies(policies);
if (additionalPolicies != null && additionalPolicies.size() > 0) {
policies.addAll(additionalPolicies);
}
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.httpClient(httpClient)
.build();
}
/*
* Creates a {@link UserAgentPolicy} using the default chat service module name and version.
*
* @return The default {@link UserAgentPolicy} for the module.
*/
private UserAgentPolicy getUserAgentPolicy() {
Map<String, String> properties = CoreUtils.getProperties(APP_CONFIG_PROPERTIES);
String clientName = properties.getOrDefault(SDK_NAME, "UnknownName");
String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion");
return new UserAgentPolicy(
logOptions.getApplicationId(), clientName, clientVersion, configuration);
}
} | class ChatClientBuilder {
private String endpoint;
private HttpClient httpClient;
private CommunicationTokenCredential communicationTokenCredential;
private final List<HttpPipelinePolicy> customPolicies = new ArrayList<HttpPipelinePolicy>();
private HttpLogOptions logOptions = new HttpLogOptions();
private HttpPipeline httpPipeline;
private Configuration configuration;
private RetryPolicy retryPolicy;
private static final String APP_CONFIG_PROPERTIES = "azure-communication-chat.properties";
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
/**
* Set endpoint of the service
*
* @param endpoint url of the service
* @return the updated ChatClientBuilder object
*/
public ChatClientBuilder endpoint(String endpoint) {
this.endpoint = Objects.requireNonNull(endpoint, "'endpoint' cannot be null.");
return this;
}
/**
* Set HttpClient to use
*
* @param httpClient HttpClient to use
* @return the updated ChatClientBuilder object
*/
public ChatClientBuilder httpClient(HttpClient httpClient) {
this.httpClient = Objects.requireNonNull(httpClient, "'httpClient' cannot be null.");
return this;
}
/**
* Set a token credential for authorization
*
* @param communicationTokenCredential valid token credential as a string
* @return the updated ChatClientBuilder object
*/
public ChatClientBuilder credential(CommunicationTokenCredential communicationTokenCredential) {
this.communicationTokenCredential = Objects.requireNonNull(
communicationTokenCredential, "'communicationTokenCredential' cannot be null.");
return this;
}
/**
* Apply additional {@link HttpPipelinePolicy}
*
* @param customPolicy HttpPipelinePolicy objects to be applied after
* AzureKeyCredentialPolicy, UserAgentPolicy, RetryPolicy, and CookiePolicy
* @return the updated ChatClientBuilder object
*/
public ChatClientBuilder addPolicy(HttpPipelinePolicy customPolicy) {
this.customPolicies.add(Objects.requireNonNull(customPolicy, "'customPolicy' cannot be null."));
return this;
}
/**
* Sets the {@link HttpLogOptions} for service requests.
*
* @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses.
* @return the updated ChatClientBuilder object
*/
public ChatClientBuilder httpLogOptions(HttpLogOptions logOptions) {
this.logOptions = Objects.requireNonNull(logOptions, "'logOptions' cannot be null.");
return this;
}
/**
* Sets the {@link ChatServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version of the client library will have the result of potentially moving to a newer service version.
* <p>
* Targeting a specific service version may also mean that the service will return an error for newer APIs.
*
* @param version {@link ChatServiceVersion} of the service to be used when making requests.
* @return the updated ChatClientBuilder object
*/
public ChatClientBuilder serviceVersion(ChatServiceVersion version) {
return this;
}
/**
* Sets the {@link HttpPipeline} to use for the service client.
*
* If {@code pipeline} is set, all other settings are ignored, aside from {@link
*
* @param httpPipeline HttpPipeline to use for sending service requests and receiving responses.
* @return the updated BlobServiceClientBuilder object
*/
public ChatClientBuilder pipeline(HttpPipeline httpPipeline) {
this.httpPipeline = httpPipeline;
return this;
}
/**
* Sets the configuration object used to retrieve environment configuration values during building of the client.
*
* @param configuration Configuration store used to retrieve environment configurations.
* @return the updated BlobServiceClientBuilder object
*/
public ChatClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets the {@link RetryPolicy} that is used when each request is sent.
*
* @param retryPolicy User's retry policy applied to each request.
* @return The updated {@link ChatClientBuilder} object.
* @throws NullPointerException If the specified {@code retryPolicy} is null.
*/
public ChatClientBuilder retryPolicy(RetryPolicy retryPolicy) {
this.retryPolicy = Objects.requireNonNull(retryPolicy, "The retry policy cannot be null");
return this;
}
/**
* Create synchronous client applying CommunicationTokenCredential, UserAgentPolicy,
* RetryPolicy, and CookiePolicy.
* Additional HttpPolicies specified by additionalPolicies will be applied after them
*
* @return ChatClient instance
*/
public ChatClient buildClient() {
ChatAsyncClient asyncClient = buildAsyncClient();
return new ChatClient(asyncClient);
}
/**
* Create asynchronous client applying CommunicationTokenCredential, UserAgentPolicy,
* RetryPolicy, and CookiePolicy.
* Additional HttpPolicies specified by additionalPolicies will be applied after them
*
* @return ChatAsyncClient instance
*/
public ChatAsyncClient buildAsyncClient() {
Objects.requireNonNull(endpoint);
HttpPipeline pipeline;
if (httpPipeline != null) {
pipeline = httpPipeline;
} else {
Objects.requireNonNull(communicationTokenCredential);
Objects.requireNonNull(httpClient);
CommunicationBearerTokenCredential tokenCredential =
new CommunicationBearerTokenCredential(communicationTokenCredential);
pipeline = createHttpPipeline(httpClient,
new BearerTokenAuthenticationPolicy(tokenCredential, ""),
customPolicies);
}
AzureCommunicationChatServiceImplBuilder clientBuilder = new AzureCommunicationChatServiceImplBuilder();
clientBuilder.endpoint(endpoint)
.pipeline(pipeline);
return new ChatAsyncClient(clientBuilder.buildClient());
}
private HttpPipeline createHttpPipeline(HttpClient httpClient,
HttpPipelinePolicy authorizationPolicy,
List<HttpPipelinePolicy> additionalPolicies) {
List<HttpPipelinePolicy> policies = new ArrayList<HttpPipelinePolicy>();
policies.add(authorizationPolicy);
applyRequiredPolicies(policies);
if (additionalPolicies != null && additionalPolicies.size() > 0) {
policies.addAll(additionalPolicies);
}
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.httpClient(httpClient)
.build();
}
/*
* Creates a {@link UserAgentPolicy} using the default chat service module name and version.
*
* @return The default {@link UserAgentPolicy} for the module.
*/
private UserAgentPolicy getUserAgentPolicy() {
Map<String, String> properties = CoreUtils.getProperties(APP_CONFIG_PROPERTIES);
String clientName = properties.getOrDefault(SDK_NAME, "UnknownName");
String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion");
return new UserAgentPolicy(
logOptions.getApplicationId(), clientName, clientVersion, configuration);
}
} |
I don't think we need this line, since already checked in the `while`. | public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getTypeName());
sb.append(getSizeString());
BType element = elementType;
while (element instanceof BArrayType) {
sb.append(((BArrayType) element).getSizeString());
if (!(((BArrayType) element).elementType instanceof BArrayType)) {
break;
}
element = ((BArrayType) element).elementType;
}
return sb.toString();
} | if (!(((BArrayType) element).elementType instanceof BArrayType)) { | public String toString() {
StringBuilder sb = new StringBuilder();
BType tempElementType = elementType;
sb.append(getSizeString());
while (tempElementType.getTag() == TypeTags.ARRAY_TAG) {
BArrayType arrayElement = (BArrayType) tempElementType;
sb.append(arrayElement.getSizeString());
tempElementType = arrayElement.elementType;
}
if (tempElementType.getTag() == TypeTags.UNION_TAG) {
return sb.insert(0, "(" + tempElementType.toString() + ")").toString();
}
return sb.insert(0, tempElementType.toString()).toString();
} | class BArrayType extends BType {
private BType elementType;
private int dimensions = 1;
private int size = -1;
private boolean hasFillerValue;
private ArrayState state = ArrayState.UNSEALED;
public BArrayType(BType elementType) {
super(null, null, ArrayValue.class);
this.elementType = elementType;
if (elementType instanceof BArrayType) {
dimensions = ((BArrayType) elementType).getDimensions() + 1;
}
hasFillerValue = TypeChecker.hasFillerValue(this.elementType);
}
public BArrayType(BType elemType, int size) {
super(null, null, ArrayValue.class);
this.elementType = elemType;
if (elementType instanceof BArrayType) {
dimensions = ((BArrayType) elementType).getDimensions() + 1;
}
if (size != -1) {
state = ArrayState.CLOSED_SEALED;
this.size = size;
}
hasFillerValue = TypeChecker.hasFillerValue(this.elementType);
}
public BType getElementType() {
return elementType;
}
@Override
public <V extends Object> V getZeroValue() {
if (size == -1) {
return getEmptyValue();
}
int tag = elementType.getTag();
switch (tag) {
case TypeTags.INT_TAG:
case TypeTags.FLOAT_TAG:
case TypeTags.BOOLEAN_TAG:
case TypeTags.STRING_TAG:
case TypeTags.BYTE_TAG:
case TypeTags.DECIMAL_TAG:
return (V) new ArrayValueImpl(new BArrayType(elementType), size);
case TypeTags.ARRAY_TAG:
default:
return (V) new ArrayValueImpl(this);
}
}
@Override
public <V extends Object> V getEmptyValue() {
int tag = elementType.getTag();
switch (tag) {
case TypeTags.INT_TAG:
case TypeTags.FLOAT_TAG:
case TypeTags.DECIMAL_TAG:
case TypeTags.BOOLEAN_TAG:
case TypeTags.STRING_TAG:
case TypeTags.BYTE_TAG:
return (V) new ArrayValueImpl(new BArrayType(elementType));
default:
return (V) new ArrayValueImpl(this);
}
}
@Override
public int getTag() {
return TypeTags.ARRAY_TAG;
}
@Override
public int hashCode() {
return toString().hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof BArrayType) {
BArrayType other = (BArrayType) obj;
if (other.state == ArrayState.CLOSED_SEALED && this.size != other.size) {
return false;
}
return this.elementType.equals(other.elementType);
}
return false;
}
@Override
private String getSizeString() {
return size != -1 ? "[" + size + "]" : "[]";
}
private String getTypeName() {
if (elementType instanceof BMapType) {
return elementType.toString();
}
if (elementType instanceof BArrayType) {
return ((BArrayType) elementType).getTypeName();
}
return elementType.getName();
}
public int getDimensions() {
return this.dimensions;
}
public int getSize() {
return size;
}
public boolean hasFillerValue() {
return hasFillerValue;
}
public ArrayState getState() {
return state;
}
@Override
public boolean isAnydata() {
return this.elementType.isPureType();
}
} | class BArrayType extends BType {
private BType elementType;
private int dimensions = 1;
private int size = -1;
private boolean hasFillerValue;
private ArrayState state = ArrayState.UNSEALED;
public BArrayType(BType elementType) {
super(null, null, ArrayValue.class);
this.elementType = elementType;
if (elementType instanceof BArrayType) {
dimensions = ((BArrayType) elementType).getDimensions() + 1;
}
hasFillerValue = TypeChecker.hasFillerValue(this.elementType);
}
public BArrayType(BType elemType, int size) {
super(null, null, ArrayValue.class);
this.elementType = elemType;
if (elementType instanceof BArrayType) {
dimensions = ((BArrayType) elementType).getDimensions() + 1;
}
if (size != -1) {
state = ArrayState.CLOSED_SEALED;
this.size = size;
}
hasFillerValue = TypeChecker.hasFillerValue(this.elementType);
}
public BType getElementType() {
return elementType;
}
@Override
public <V extends Object> V getZeroValue() {
if (size == -1) {
return getEmptyValue();
}
int tag = elementType.getTag();
switch (tag) {
case TypeTags.INT_TAG:
case TypeTags.FLOAT_TAG:
case TypeTags.BOOLEAN_TAG:
case TypeTags.STRING_TAG:
case TypeTags.BYTE_TAG:
case TypeTags.DECIMAL_TAG:
return (V) new ArrayValueImpl(new BArrayType(elementType), size);
case TypeTags.ARRAY_TAG:
default:
return (V) new ArrayValueImpl(this);
}
}
@Override
public <V extends Object> V getEmptyValue() {
int tag = elementType.getTag();
switch (tag) {
case TypeTags.INT_TAG:
case TypeTags.FLOAT_TAG:
case TypeTags.DECIMAL_TAG:
case TypeTags.BOOLEAN_TAG:
case TypeTags.STRING_TAG:
case TypeTags.BYTE_TAG:
return (V) new ArrayValueImpl(new BArrayType(elementType));
default:
return (V) new ArrayValueImpl(this);
}
}
@Override
public int getTag() {
return TypeTags.ARRAY_TAG;
}
@Override
public int hashCode() {
return toString().hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof BArrayType) {
BArrayType other = (BArrayType) obj;
if (other.state == ArrayState.CLOSED_SEALED && this.size != other.size) {
return false;
}
return this.elementType.equals(other.elementType);
}
return false;
}
@Override
private String getSizeString() {
return size != -1 ? "[" + size + "]" : "[]";
}
public int getDimensions() {
return this.dimensions;
}
public int getSize() {
return size;
}
public boolean hasFillerValue() {
return hasFillerValue;
}
public ArrayState getState() {
return state;
}
@Override
public boolean isAnydata() {
return this.elementType.isPureType();
}
} |
@FroMage you could also check that the bean has all the expected types, see [`ResourceBeanTypeTest`](https://github.com/quarkusio/quarkus/blob/main/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/beanTypes/ResourceBeanTypeTest.java#L39-L45) as an example. But I won't insist :) | void shouldDeployWithoutIssues() {
} | void shouldDeployWithoutIssues() {
} | class BeanParamTest {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.setArchiveProducer(() -> {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(MyBeanParamWithFieldsAndProperties.class, Top.class);
});
@Test
public static class Top {
@PathParam("pathParam")
private String pathParam = "pathParam";
public String getPathParam() {
return pathParam;
}
public void setPathParam(String pathParam) {
this.pathParam = pathParam;
}
}
public static class MyBeanParamWithFieldsAndProperties extends Top {
@HeaderParam("headerParam")
private String headerParam = "headerParam";
@CookieParam("cookieParam")
private String cookieParam = "cookieParam";
@FormParam("formParam")
private String formParam = "formParam";
@QueryParam("queryParam")
private String queryParam = "queryParam";
public String getHeaderParam() {
return headerParam;
}
public void setHeaderParam(String headerParam) {
this.headerParam = headerParam;
}
public String getCookieParam() {
return cookieParam;
}
public void setCookieParam(String cookieParam) {
this.cookieParam = cookieParam;
}
public String getFormParam() {
return formParam;
}
public void setFormParam(String formParam) {
this.formParam = formParam;
}
public String getQueryParam() {
return queryParam;
}
public void setQueryParam(String queryParam) {
this.queryParam = queryParam;
}
}
@Path("/")
public static class Resource {
@Path("/a/{restPathDefault}/{restPath_Overridden}/{pathParam}")
@POST
public String beanParamWithFields(@BeanParam MyBeanParamWithFieldsAndProperties p) {
return null;
}
@Path("/b/{pathParam}")
@POST
public String beanParamWithFields(@BeanParam Top p) {
return null;
}
}
} | class BeanParamTest {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.setArchiveProducer(() -> {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(MyBeanParamWithFieldsAndProperties.class, Top.class);
});
@Test
public static class Top {
@PathParam("pathParam")
private String pathParam = "pathParam";
public String getPathParam() {
return pathParam;
}
public void setPathParam(String pathParam) {
this.pathParam = pathParam;
}
}
public static class MyBeanParamWithFieldsAndProperties extends Top {
@HeaderParam("headerParam")
private String headerParam = "headerParam";
@CookieParam("cookieParam")
private String cookieParam = "cookieParam";
@FormParam("formParam")
private String formParam = "formParam";
@QueryParam("queryParam")
private String queryParam = "queryParam";
public String getHeaderParam() {
return headerParam;
}
public void setHeaderParam(String headerParam) {
this.headerParam = headerParam;
}
public String getCookieParam() {
return cookieParam;
}
public void setCookieParam(String cookieParam) {
this.cookieParam = cookieParam;
}
public String getFormParam() {
return formParam;
}
public void setFormParam(String formParam) {
this.formParam = formParam;
}
public String getQueryParam() {
return queryParam;
}
public void setQueryParam(String queryParam) {
this.queryParam = queryParam;
}
}
@Path("/")
public static class Resource {
@Path("/a/{restPathDefault}/{restPath_Overridden}/{pathParam}")
@POST
public String beanParamWithFields(@BeanParam MyBeanParamWithFieldsAndProperties p) {
return null;
}
@Path("/b/{pathParam}")
@POST
public String beanParamWithFields(@BeanParam Top p) {
return null;
}
}
} | |
Currently they do not have a shared parent to put the constant, and it seems not worthwhile to make a new class for constant, as currently here is only 1 "kubernetes" duplicated in 2 places. | private static boolean isWebApp(SiteInner inner) {
boolean ret = false;
if (inner.kind() == null) {
ret = true;
} else {
List<String> kinds = Arrays.asList(inner.kind().split(Pattern.quote(",")));
if ((kinds.contains("app") || kinds.contains("api")) && !kinds.contains("kubernetes")) {
ret = true;
}
}
return ret;
} | if ((kinds.contains("app") || kinds.contains("api")) && !kinds.contains("kubernetes")) { | private static boolean isWebApp(SiteInner inner) {
boolean ret = false;
if (inner.kind() == null) {
ret = true;
} else {
List<String> kinds = Arrays.asList(inner.kind().split(Pattern.quote(",")));
if ((kinds.contains("app") || kinds.contains("api")) && !kinds.contains("kubernetes")) {
ret = true;
}
}
return ret;
} | class WebAppsImpl
extends GroupableResourcesImpl<WebApp, WebAppImpl, SiteInner, WebAppsClient, AppServiceManager>
implements WebApps, SupportsBatchDeletion {
public WebAppsImpl(final AppServiceManager manager) {
super(manager.serviceClient().getWebApps(), manager);
}
@Override
public Mono<WebApp> getByResourceGroupAsync(final String resourceGroupName, final String name) {
if (CoreUtils.isNullOrEmpty(resourceGroupName)) {
return Mono.error(
new IllegalArgumentException("Parameter 'resourceGroupName' is required and cannot be null."));
}
if (CoreUtils.isNullOrEmpty(name)) {
return Mono.error(
new IllegalArgumentException("Parameter 'name' is required and cannot be null."));
}
return this
.getInnerAsync(resourceGroupName, name)
.flatMap(
siteInner ->
Mono
.zip(
this.inner().getConfigurationAsync(resourceGroupName, name),
this.inner().getDiagnosticLogsConfigurationAsync(resourceGroupName, name),
(SiteConfigResourceInner siteConfigResourceInner, SiteLogsConfigInner logsConfigInner) ->
wrapModel(siteInner, siteConfigResourceInner, logsConfigInner)));
}
@Override
protected Mono<SiteInner> getInnerAsync(String resourceGroupName, String name) {
return this.inner().getByResourceGroupAsync(resourceGroupName, name);
}
@Override
protected Mono<Void> deleteInnerAsync(String resourceGroupName, String name) {
return inner().deleteAsync(resourceGroupName, name).then();
}
@Override
protected WebAppImpl wrapModel(String name) {
return new WebAppImpl(name, new SiteInner().withKind("app"), null, null, this.manager());
}
protected WebAppImpl wrapModel(SiteInner inner, SiteConfigResourceInner siteConfig, SiteLogsConfigInner logConfig) {
if (inner == null) {
return null;
}
return new WebAppImpl(inner.name(), inner, siteConfig, logConfig, this.manager());
}
@Override
protected WebAppImpl wrapModel(SiteInner inner) {
return wrapModel(inner, null, null);
}
@Override
public WebAppImpl define(String name) {
return wrapModel(name);
}
@Override
public Flux<String> deleteByIdsAsync(Collection<String> ids) {
return BatchDeletionImpl.deleteByIdsAsync(ids, this::deleteInnerAsync);
}
@Override
public Flux<String> deleteByIdsAsync(String... ids) {
return this.deleteByIdsAsync(new ArrayList<>(Arrays.asList(ids)));
}
@Override
public void deleteByIds(Collection<String> ids) {
if (ids != null && !ids.isEmpty()) {
this.deleteByIdsAsync(ids).blockLast();
}
}
@Override
public void deleteByIds(String... ids) {
this.deleteByIds(new ArrayList<>(Arrays.asList(ids)));
}
@Override
public PagedIterable<WebAppBasic> listByResourceGroup(String resourceGroupName) {
return new PagedIterable<>(this.listByResourceGroupAsync(resourceGroupName));
}
@Override
public PagedFlux<WebAppBasic> listByResourceGroupAsync(String resourceGroupName) {
if (CoreUtils.isNullOrEmpty(resourceGroupName)) {
return new PagedFlux<>(() -> Mono.error(
new IllegalArgumentException("Parameter 'resourceGroupName' is required and cannot be null.")));
}
return PagedConverter.flatMapPage(inner().listByResourceGroupAsync(resourceGroupName),
inner -> isWebApp(inner) ? Mono.just(new WebAppBasicImpl(inner, this.manager())) : Mono.empty());
}
@Override
public PagedIterable<WebAppBasic> list() {
return new PagedIterable<>(this.listAsync());
}
@Override
public PagedFlux<WebAppBasic> listAsync() {
return PagedConverter.flatMapPage(inner().listAsync(),
inner -> isWebApp(inner) ? Mono.just(new WebAppBasicImpl(inner, this.manager())) : Mono.empty());
}
} | class WebAppsImpl
extends GroupableResourcesImpl<WebApp, WebAppImpl, SiteInner, WebAppsClient, AppServiceManager>
implements WebApps, SupportsBatchDeletion {
public WebAppsImpl(final AppServiceManager manager) {
super(manager.serviceClient().getWebApps(), manager);
}
@Override
public Mono<WebApp> getByResourceGroupAsync(final String resourceGroupName, final String name) {
if (CoreUtils.isNullOrEmpty(resourceGroupName)) {
return Mono.error(
new IllegalArgumentException("Parameter 'resourceGroupName' is required and cannot be null."));
}
if (CoreUtils.isNullOrEmpty(name)) {
return Mono.error(
new IllegalArgumentException("Parameter 'name' is required and cannot be null."));
}
return this
.getInnerAsync(resourceGroupName, name)
.flatMap(
siteInner ->
Mono
.zip(
this.inner().getConfigurationAsync(resourceGroupName, name),
this.inner().getDiagnosticLogsConfigurationAsync(resourceGroupName, name),
(SiteConfigResourceInner siteConfigResourceInner, SiteLogsConfigInner logsConfigInner) ->
wrapModel(siteInner, siteConfigResourceInner, logsConfigInner)));
}
@Override
protected Mono<SiteInner> getInnerAsync(String resourceGroupName, String name) {
return this.inner().getByResourceGroupAsync(resourceGroupName, name);
}
@Override
protected Mono<Void> deleteInnerAsync(String resourceGroupName, String name) {
return inner().deleteAsync(resourceGroupName, name).then();
}
@Override
protected WebAppImpl wrapModel(String name) {
return new WebAppImpl(name, new SiteInner().withKind("app"), null, null, this.manager());
}
protected WebAppImpl wrapModel(SiteInner inner, SiteConfigResourceInner siteConfig, SiteLogsConfigInner logConfig) {
if (inner == null) {
return null;
}
return new WebAppImpl(inner.name(), inner, siteConfig, logConfig, this.manager());
}
@Override
protected WebAppImpl wrapModel(SiteInner inner) {
return wrapModel(inner, null, null);
}
@Override
public WebAppImpl define(String name) {
return wrapModel(name);
}
@Override
public Flux<String> deleteByIdsAsync(Collection<String> ids) {
return BatchDeletionImpl.deleteByIdsAsync(ids, this::deleteInnerAsync);
}
@Override
public Flux<String> deleteByIdsAsync(String... ids) {
return this.deleteByIdsAsync(new ArrayList<>(Arrays.asList(ids)));
}
@Override
public void deleteByIds(Collection<String> ids) {
if (ids != null && !ids.isEmpty()) {
this.deleteByIdsAsync(ids).blockLast();
}
}
@Override
public void deleteByIds(String... ids) {
this.deleteByIds(new ArrayList<>(Arrays.asList(ids)));
}
@Override
public PagedIterable<WebAppBasic> listByResourceGroup(String resourceGroupName) {
return new PagedIterable<>(this.listByResourceGroupAsync(resourceGroupName));
}
@Override
public PagedFlux<WebAppBasic> listByResourceGroupAsync(String resourceGroupName) {
if (CoreUtils.isNullOrEmpty(resourceGroupName)) {
return new PagedFlux<>(() -> Mono.error(
new IllegalArgumentException("Parameter 'resourceGroupName' is required and cannot be null.")));
}
return PagedConverter.flatMapPage(inner().listByResourceGroupAsync(resourceGroupName),
inner -> isWebApp(inner) ? Mono.just(new WebAppBasicImpl(inner, this.manager())) : Mono.empty());
}
@Override
public PagedIterable<WebAppBasic> list() {
return new PagedIterable<>(this.listAsync());
}
@Override
public PagedFlux<WebAppBasic> listAsync() {
return PagedConverter.flatMapPage(inner().listAsync(),
inner -> isWebApp(inner) ? Mono.just(new WebAppBasicImpl(inner, this.manager())) : Mono.empty());
}
} |
```suggestion try { lowerBound = elementTimestampOrTimerHoldTimestamp.minus(doFn.getAllowedTimestampSkew()); } catch (ArithmeticException e) { lowerBound = BoundedWindow.TIMESTAMP_MIN_VALUE; } if (outputTimestamp.isBefore(lowerBound) || outputTimestamp.isAfter(BoundedWindow.TIMESTAMP_MAX_VALUE)) { ``` | private Timer<K> getTimerForTime(Instant scheduledTime) {
if (outputTimestamp != null) {
Instant lowerBound;
Instant upperBound = BoundedWindow.TIMESTAMP_MAX_VALUE;
try {
lowerBound = elementTimestampOrTimerHoldTimestamp.minus(doFn.getAllowedTimestampSkew());
} catch (ArithmeticException e) {
lowerBound = BoundedWindow.TIMESTAMP_MIN_VALUE;
}
if (outputTimestamp.isBefore(lowerBound) || outputTimestamp.isAfter(upperBound)) {
throw new IllegalArgumentException(
String.format(
"output timestamp %s (allowed skew %s) should be after input message timestamp or"
+ " output timestamp of firing timers %s and before %s",
outputTimestamp,
doFn.getAllowedTimestampSkew(),
elementTimestampOrTimerHoldTimestamp,
upperBound));
}
}
if (outputTimestamp == null && TimeDomain.EVENT_TIME.equals(timeDomain)) {
outputTimestamp = scheduledTime;
}
if (outputTimestamp == null) {
outputTimestamp = elementTimestampOrTimerHoldTimestamp;
}
Instant windowExpiry = LateDataUtils.garbageCollectionTime(currentWindow, allowedLateness);
if (TimeDomain.EVENT_TIME.equals(timeDomain)) {
checkArgument(
!outputTimestamp.isAfter(scheduledTime),
"Attempted to set an event-time timer with an output timestamp of %s that is"
+ " after the timer firing timestamp %s",
outputTimestamp,
scheduledTime);
checkArgument(
!scheduledTime.isAfter(windowExpiry),
"Attempted to set an event-time timer with a firing timestamp of %s that is"
+ " after the expiration of window %s",
scheduledTime,
windowExpiry);
} else {
checkArgument(
!outputTimestamp.isAfter(windowExpiry),
"Attempted to set a processing-time timer with an output timestamp of %s that is"
+ " after the expiration of window %s",
outputTimestamp,
windowExpiry);
}
return Timer.of(
userKey,
dynamicTimerTag,
Collections.singletonList(boundedWindow),
scheduledTime,
outputTimestamp,
paneInfo);
} | if (outputTimestamp.isBefore(lowerBound) || outputTimestamp.isAfter(upperBound)) { | private Timer<K> getTimerForTime(Instant scheduledTime) {
if (outputTimestamp != null) {
Instant lowerBound;
try {
lowerBound = elementTimestampOrTimerHoldTimestamp.minus(doFn.getAllowedTimestampSkew());
} catch (ArithmeticException e) {
lowerBound = BoundedWindow.TIMESTAMP_MIN_VALUE;
}
if (outputTimestamp.isBefore(lowerBound)
|| outputTimestamp.isAfter(BoundedWindow.TIMESTAMP_MAX_VALUE)) {
throw new IllegalArgumentException(
String.format(
"Cannot output timer with output timestamp %s. Output timestamps must be no "
+ "earlier than the timestamp of the current input (%s) minus the allowed skew "
+ "(%s) and no later than %s. See the DoFn
+ "details on changing the allowed skew.",
outputTimestamp,
elementTimestampOrTimerHoldTimestamp,
PeriodFormat.getDefault().print(doFn.getAllowedTimestampSkew().toPeriod()),
BoundedWindow.TIMESTAMP_MAX_VALUE));
}
}
if (outputTimestamp == null && TimeDomain.EVENT_TIME.equals(timeDomain)) {
outputTimestamp = scheduledTime;
}
if (outputTimestamp == null) {
outputTimestamp = elementTimestampOrTimerHoldTimestamp;
}
Instant windowExpiry = LateDataUtils.garbageCollectionTime(currentWindow, allowedLateness);
if (TimeDomain.EVENT_TIME.equals(timeDomain)) {
checkArgument(
!outputTimestamp.isAfter(scheduledTime),
"Attempted to set an event-time timer with an output timestamp of %s that is"
+ " after the timer firing timestamp %s",
outputTimestamp,
scheduledTime);
checkArgument(
!scheduledTime.isAfter(windowExpiry),
"Attempted to set an event-time timer with a firing timestamp of %s that is"
+ " after the expiration of window %s",
scheduledTime,
windowExpiry);
} else {
checkArgument(
!outputTimestamp.isAfter(windowExpiry),
"Attempted to set a processing-time timer with an output timestamp of %s that is"
+ " after the expiration of window %s",
outputTimestamp,
windowExpiry);
}
return Timer.of(
userKey,
dynamicTimerTag,
Collections.singletonList(boundedWindow),
scheduledTime,
outputTimestamp,
paneInfo);
} | class FnApiTimer<K> implements org.apache.beam.sdk.state.Timer {
private final String timerIdOrFamily;
private final K userKey;
private final String dynamicTimerTag;
private final TimeDomain timeDomain;
private final Duration allowedLateness;
private final Instant fireTimestamp;
private final Instant elementTimestampOrTimerHoldTimestamp;
private final BoundedWindow boundedWindow;
private final PaneInfo paneInfo;
private Instant outputTimestamp;
private Duration period = Duration.ZERO;
private Duration offset = Duration.ZERO;
FnApiTimer(
String timerIdOrFamily,
K userKey,
String dynamicTimerTag,
BoundedWindow boundedWindow,
Instant elementTimestampOrTimerHoldTimestamp,
Instant elementTimestampOrTimerFireTimestamp,
PaneInfo paneInfo,
TimeDomain timeDomain) {
this.timerIdOrFamily = timerIdOrFamily;
this.userKey = userKey;
this.dynamicTimerTag = dynamicTimerTag;
this.elementTimestampOrTimerHoldTimestamp = elementTimestampOrTimerHoldTimestamp;
this.boundedWindow = boundedWindow;
this.paneInfo = paneInfo;
this.timeDomain = timeDomain;
switch (timeDomain) {
case EVENT_TIME:
fireTimestamp = elementTimestampOrTimerFireTimestamp;
break;
case PROCESSING_TIME:
fireTimestamp = new Instant(DateTimeUtils.currentTimeMillis());
break;
default:
throw new IllegalArgumentException(
String.format("Unknown or unsupported time domain %s", timeDomain));
}
try {
this.allowedLateness =
rehydratedComponents
.getPCollection(
pTransform.getInputsOrThrow(ParDoTranslation.getMainInputName(pTransform)))
.getWindowingStrategy()
.getAllowedLateness();
} catch (IOException e) {
throw new IllegalArgumentException(
String.format("Unable to get allowed lateness for timer %s", timerIdOrFamily));
}
}
@Override
public void set(Instant absoluteTime) {
if (TimeDomain.EVENT_TIME.equals(timeDomain)) {
Instant windowExpiry = LateDataUtils.garbageCollectionTime(currentWindow, allowedLateness);
checkArgument(
!absoluteTime.isAfter(windowExpiry),
"Attempted to set event time timer for %s but that is after"
+ " the expiration of window %s",
absoluteTime,
windowExpiry);
}
timerBundleTracker.timerModified(timerIdOrFamily, timeDomain, getTimerForTime(absoluteTime));
}
@Override
public void setRelative() {
Instant target;
if (period.equals(Duration.ZERO)) {
target = fireTimestamp.plus(offset);
} else {
long millisSinceStart = fireTimestamp.plus(offset).getMillis() % period.getMillis();
target =
millisSinceStart == 0
? fireTimestamp
: fireTimestamp.plus(period).minus(millisSinceStart);
}
target = minTargetAndGcTime(target);
timerBundleTracker.timerModified(timerIdOrFamily, timeDomain, getTimerForTime(target));
}
@Override
public void clear() {
timerBundleTracker.timerModified(timerIdOrFamily, timeDomain, getClearedTimer());
}
@Override
public org.apache.beam.sdk.state.Timer offset(Duration offset) {
this.offset = offset;
return this;
}
@Override
public org.apache.beam.sdk.state.Timer align(Duration period) {
this.period = period;
return this;
}
@Override
public org.apache.beam.sdk.state.Timer withOutputTimestamp(Instant outputTime) {
this.outputTimestamp = outputTime;
return this;
}
@Override
public Instant getCurrentRelativeTime() {
return fireTimestamp;
}
/**
* For event time timers the target time should be prior to window GC time. So it returns
* min(time to set, GC Time of window).
*/
private Instant minTargetAndGcTime(Instant target) {
if (TimeDomain.EVENT_TIME.equals(timeDomain)) {
Instant windowExpiry = LateDataUtils.garbageCollectionTime(currentWindow, allowedLateness);
if (target.isAfter(windowExpiry)) {
return windowExpiry;
}
}
return target;
}
private Timer<K> getClearedTimer() {
return Timer.cleared(userKey, dynamicTimerTag, Collections.singletonList(boundedWindow));
}
@SuppressWarnings("deprecation")
} | class FnApiTimer<K> implements org.apache.beam.sdk.state.Timer {
private final String timerIdOrFamily;
private final K userKey;
private final String dynamicTimerTag;
private final TimeDomain timeDomain;
private final Duration allowedLateness;
private final Instant fireTimestamp;
private final Instant elementTimestampOrTimerHoldTimestamp;
private final BoundedWindow boundedWindow;
private final PaneInfo paneInfo;
private Instant outputTimestamp;
private Duration period = Duration.ZERO;
private Duration offset = Duration.ZERO;
FnApiTimer(
String timerIdOrFamily,
K userKey,
String dynamicTimerTag,
BoundedWindow boundedWindow,
Instant elementTimestampOrTimerHoldTimestamp,
Instant elementTimestampOrTimerFireTimestamp,
PaneInfo paneInfo,
TimeDomain timeDomain) {
this.timerIdOrFamily = timerIdOrFamily;
this.userKey = userKey;
this.dynamicTimerTag = dynamicTimerTag;
this.elementTimestampOrTimerHoldTimestamp = elementTimestampOrTimerHoldTimestamp;
this.boundedWindow = boundedWindow;
this.paneInfo = paneInfo;
this.timeDomain = timeDomain;
switch (timeDomain) {
case EVENT_TIME:
fireTimestamp = elementTimestampOrTimerFireTimestamp;
break;
case PROCESSING_TIME:
fireTimestamp = new Instant(DateTimeUtils.currentTimeMillis());
break;
default:
throw new IllegalArgumentException(
String.format("Unknown or unsupported time domain %s", timeDomain));
}
try {
this.allowedLateness =
rehydratedComponents
.getPCollection(
pTransform.getInputsOrThrow(ParDoTranslation.getMainInputName(pTransform)))
.getWindowingStrategy()
.getAllowedLateness();
} catch (IOException e) {
throw new IllegalArgumentException(
String.format("Unable to get allowed lateness for timer %s", timerIdOrFamily));
}
}
@Override
public void set(Instant absoluteTime) {
if (TimeDomain.EVENT_TIME.equals(timeDomain)) {
Instant windowExpiry = LateDataUtils.garbageCollectionTime(currentWindow, allowedLateness);
checkArgument(
!absoluteTime.isAfter(windowExpiry),
"Attempted to set event time timer for %s but that is after"
+ " the expiration of window %s",
absoluteTime,
windowExpiry);
}
timerBundleTracker.timerModified(timerIdOrFamily, timeDomain, getTimerForTime(absoluteTime));
}
@Override
public void setRelative() {
Instant target;
if (period.equals(Duration.ZERO)) {
target = fireTimestamp.plus(offset);
} else {
long millisSinceStart = fireTimestamp.plus(offset).getMillis() % period.getMillis();
target =
millisSinceStart == 0
? fireTimestamp
: fireTimestamp.plus(period).minus(Duration.millis(millisSinceStart));
}
target = minTargetAndGcTime(target);
timerBundleTracker.timerModified(timerIdOrFamily, timeDomain, getTimerForTime(target));
}
@Override
public void clear() {
timerBundleTracker.timerModified(timerIdOrFamily, timeDomain, getClearedTimer());
}
@Override
public org.apache.beam.sdk.state.Timer offset(Duration offset) {
this.offset = offset;
return this;
}
@Override
public org.apache.beam.sdk.state.Timer align(Duration period) {
this.period = period;
return this;
}
@Override
public org.apache.beam.sdk.state.Timer withOutputTimestamp(Instant outputTime) {
this.outputTimestamp = outputTime;
return this;
}
@Override
public Instant getCurrentRelativeTime() {
return fireTimestamp;
}
/**
* For event time timers the target time should be prior to window GC time. So it returns
* min(time to set, GC Time of window).
*/
private Instant minTargetAndGcTime(Instant target) {
if (TimeDomain.EVENT_TIME.equals(timeDomain)) {
Instant windowExpiry = LateDataUtils.garbageCollectionTime(currentWindow, allowedLateness);
if (target.isAfter(windowExpiry)) {
return windowExpiry;
}
}
return target;
}
private Timer<K> getClearedTimer() {
return Timer.cleared(userKey, dynamicTimerTag, Collections.singletonList(boundedWindow));
}
@SuppressWarnings("deprecation")
} |
You are right in theory but the practice is that we are doing the propagation and we are responsible for cleaning up afterwards (deactivate context). Following that principle, there cannot really be an active req. context on the "new" thread. Hence we have this `NOOP` snapshot here. | public ThreadContextSnapshot currentContext(Map<String, String> map) {
ArcContainer arc = Arc.container();
if (arc == null || !arc.isRunning()) {
return null;
}
if (!isContextActiveOnThisThread(arc)) {
return NOOP_SNAPSHOT;
}
InjectableContext.ContextState state = arc.requestContext().getState();
return () -> {
ArcContainer arcContainer = Arc.container();
if (arcContainer == null || !arcContainer.isRunning()) {
throw new IllegalStateException("Arc context propagation was attempted but the container is not running.");
}
ThreadContextController controller;
ManagedContext requestContext = arcContainer.requestContext();
if (isContextActiveOnThisThread(arcContainer)) {
InjectableContext.ContextState stateToRestore = requestContext.getState();
requestContext.deactivate();
requestContext.activate(state);
controller = () -> {
requestContext.deactivate();
requestContext.activate(stateToRestore);
};
} else {
requestContext.activate(state);
controller = () -> {
requestContext.deactivate();
};
}
return controller;
};
} | return NOOP_SNAPSHOT; | public ThreadContextSnapshot currentContext(Map<String, String> map) {
ArcContainer arc = Arc.container();
if (arc == null || !arc.isRunning()) {
return null;
}
InjectableContext.ContextState state = isContextActiveOnThisThread(arc) ? arc.requestContext().getState() : null;
return () -> {
ArcContainer arcContainer = Arc.container();
if (arcContainer == null || !arcContainer.isRunning()) {
throw new IllegalStateException("Arc context propagation was attempted but the container is not running.");
}
ThreadContextController controller;
ManagedContext requestContext = arcContainer.requestContext();
if (isContextActiveOnThisThread(arcContainer)) {
InjectableContext.ContextState stateToRestore = requestContext.getState();
requestContext.deactivate();
if (state != null) {
requestContext.activate(state);
}
controller = () -> {
if (state != null) {
requestContext.deactivate();
}
requestContext.activate(stateToRestore);
};
} else {
if (state != null) {
requestContext.activate(state);
}
controller = () -> {
if (state != null) {
requestContext.deactivate();
}
};
}
return controller;
};
} | class ArcContextProvider implements ThreadContextProvider {
private static ThreadContextSnapshot NOOP_SNAPSHOT = () -> () -> {
};
@Override
@Override
public ThreadContextSnapshot clearedContext(Map<String, String> map) {
ArcContainer arc = Arc.container();
if (arc == null || !arc.isRunning()) {
return null;
}
if (!isContextActiveOnThisThread(arc)) {
return NOOP_SNAPSHOT;
}
return () -> {
ArcContainer arcContainer = Arc.container();
if (arcContainer == null || !arcContainer.isRunning()) {
throw new IllegalStateException("Arc context propagation was attempted but the container is not running.");
}
ThreadContextController controller;
ManagedContext requestContext = arcContainer.requestContext();
if (isContextActiveOnThisThread(arcContainer)) {
InjectableContext.ContextState stateToRestore = requestContext.getState();
requestContext.deactivate();
requestContext.activate();
controller = () -> {
requestContext.deactivate();
requestContext.activate(stateToRestore);
};
} else {
requestContext.activate();
controller = () -> {
requestContext.deactivate();
};
}
return controller;
};
}
@Override
public String getThreadContextType() {
return ThreadContext.CDI;
}
private boolean isContextActiveOnThisThread(ArcContainer arc) {
return arc.requestContext().isActive();
}
} | class ArcContextProvider implements ThreadContextProvider {
@Override
@Override
public ThreadContextSnapshot clearedContext(Map<String, String> map) {
ArcContainer arc = Arc.container();
if (arc == null || !arc.isRunning()) {
return null;
}
return () -> {
ArcContainer arcContainer = Arc.container();
if (arcContainer == null || !arcContainer.isRunning()) {
throw new IllegalStateException("Arc context propagation was attempted but the container is not running.");
}
ThreadContextController controller;
ManagedContext requestContext = arcContainer.requestContext();
if (isContextActiveOnThisThread(arcContainer)) {
InjectableContext.ContextState stateToRestore = requestContext.getState();
requestContext.deactivate();
requestContext.activate();
controller = () -> {
requestContext.deactivate();
requestContext.activate(stateToRestore);
};
} else {
requestContext.activate();
controller = () -> {
requestContext.deactivate();
};
}
return controller;
};
}
@Override
public String getThreadContextType() {
return ThreadContext.CDI;
}
private boolean isContextActiveOnThisThread(ArcContainer arc) {
return arc.requestContext().isActive();
}
} |
What I meant was something like ```java Map<Name, BPackageSymbol> modules = new HashMap<>(); modules.put(Names.ERROR, this.langErrorModuleSymbol); modules.put(Names.OBJECT, this.langObjectModuleSymbol); modules.put(Names.XML, this.langXmlModuleSymbol); this.predeclaredModules = Collections.unmodifiableMap(modules); ``` | public void loadPredeclaredModules() {
this.predeclaredModules.put(Names.ERROR, this.langErrorModuleSymbol);
this.predeclaredModules.put(Names.OBJECT, this.langObjectModuleSymbol);
this.predeclaredModules.put(Names.XML, this.langXmlModuleSymbol);
this.predeclaredModules = Collections.unmodifiableMap(this.predeclaredModules);
} | this.predeclaredModules = Collections.unmodifiableMap(this.predeclaredModules); | public void loadPredeclaredModules() {
Map<Name, BPackageSymbol> modules = new HashMap<>();
modules.put(Names.ERROR, this.langErrorModuleSymbol);
modules.put(Names.OBJECT, this.langObjectModuleSymbol);
modules.put(Names.XML, this.langXmlModuleSymbol);
this.predeclaredModules = Collections.unmodifiableMap(modules);
} | class SymbolTable {
private static final CompilerContext.Key<SymbolTable> SYM_TABLE_KEY =
new CompilerContext.Key<>();
public static final PackageID TRANSACTION = new PackageID(Names.BUILTIN_ORG, Names.TRANSACTION_PACKAGE,
Names.EMPTY);
public static final Integer BBYTE_MIN_VALUE = 0;
public static final Integer BBYTE_MAX_VALUE = 255;
public static final Integer SIGNED32_MAX_VALUE = 2147483647;
public static final Integer SIGNED32_MIN_VALUE = -2147483648;
public static final Integer SIGNED16_MAX_VALUE = 32767;
public static final Integer SIGNED16_MIN_VALUE = -32768;
public static final Integer SIGNED8_MAX_VALUE = 127;
public static final Integer SIGNED8_MIN_VALUE = -128;
public static final Long UNSIGNED32_MAX_VALUE = 4294967295L;
public static final Integer UNSIGNED16_MAX_VALUE = 65535;
public static final Integer UNSIGNED8_MAX_VALUE = 255;
public final BLangPackage rootPkgNode;
public final BPackageSymbol rootPkgSymbol;
public final BSymbol notFoundSymbol;
public final BSymbol invalidUsageSymbol;
public final Scope rootScope;
public final BType noType = new BNoType(TypeTags.NONE);
public final BType nilType = new BNilType();
public final BType neverType = new BNeverType();
public final BType intType = new BType(TypeTags.INT, null, Flags.READONLY);
public final BType byteType = new BType(TypeTags.BYTE, null, Flags.READONLY);
public final BType floatType = new BType(TypeTags.FLOAT, null, Flags.READONLY);
public final BType decimalType = new BType(TypeTags.DECIMAL, null, Flags.READONLY);
public final BType stringType = new BType(TypeTags.STRING, null, Flags.READONLY);
public final BType booleanType = new BType(TypeTags.BOOLEAN, null, Flags.READONLY);
public final BType jsonType = new BJSONType(TypeTags.JSON, null);
public final BType anyType = new BAnyType(TypeTags.ANY, null);
public final BType anydataType = new BAnydataType(TypeTags.ANYDATA, null);
public final BMapType mapType = new BMapType(TypeTags.MAP, anyType, null);
public final BMapType mapStringType = new BMapType(TypeTags.MAP, stringType, null);
public final BMapType mapAnydataType = new BMapType(TypeTags.MAP, anydataType, null);
public final BMapType mapJsonType = new BMapType(TypeTags.MAP, jsonType, null);
public final BFutureType futureType = new BFutureType(TypeTags.FUTURE, nilType, null);
public final BArrayType arrayType = new BArrayType(anyType);
public final BArrayType arrayStringType = new BArrayType(stringType);
public final BArrayType arrayAnydataType = new BArrayType(anydataType);
public final BArrayType arrayJsonType = new BArrayType(jsonType);
public final BType tupleType = new BTupleType(Lists.of(noType));
public final BType recordType = new BRecordType(null);
public final BType stringArrayType = new BArrayType(stringType);
public final BType jsonArrayType = new BArrayType(jsonType);
public final BType anydataArrayType = new BArrayType(anydataType);
public final BType anyServiceType = new BServiceType(null);
public final BType handleType = new BHandleType(TypeTags.HANDLE, null);
public final BTypedescType typeDesc = new BTypedescType(this.anyType, null);
public final BType readonlyType = new BReadonlyType(TypeTags.READONLY, null);
public final BType semanticError = new BType(TypeTags.SEMANTIC_ERROR, null);
public final BType nullSet = new BType(TypeTags.NULL_SET, null);
public BType streamType = new BStreamType(TypeTags.STREAM, anydataType, null, null);
public BType tableType = new BTableType(TypeTags.TABLE, anydataType, null);
public BErrorType errorType;
public BRecordType detailType;
public BConstructorSymbol errorConstructor;
public BUnionType anyOrErrorType;
public BUnionType pureType;
public BUnionType errorOrNilType;
public BFiniteType trueType;
public BObjectType intRangeType;
public BMapType mapAllType;
public BArrayType arrayAllType;
public BObjectType rawTemplateType;
public final BIntSubType signed32IntType = new BIntSubType(TypeTags.SIGNED32_INT, Names.SIGNED32);
public final BIntSubType signed16IntType = new BIntSubType(TypeTags.SIGNED16_INT, Names.SIGNED16);
public final BIntSubType signed8IntType = new BIntSubType(TypeTags.SIGNED8_INT, Names.SIGNED8);
public final BIntSubType unsigned32IntType = new BIntSubType(TypeTags.UNSIGNED32_INT, Names.UNSIGNED32);
public final BIntSubType unsigned16IntType = new BIntSubType(TypeTags.UNSIGNED16_INT, Names.UNSIGNED16);
public final BIntSubType unsigned8IntType = new BIntSubType(TypeTags.UNSIGNED8_INT, Names.UNSIGNED8);
public final BStringSubType charStringType = new BStringSubType(TypeTags.CHAR_STRING, Names.CHAR);
public final BXMLSubType xmlElementType = new BXMLSubType(TypeTags.XML_ELEMENT, Names.XML_ELEMENT);
public final BXMLSubType xmlPIType = new BXMLSubType(TypeTags.XML_PI, Names.XML_PI);
public final BXMLSubType xmlCommentType = new BXMLSubType(TypeTags.XML_COMMENT, Names.XML_COMMENT);
public final BXMLSubType xmlTextType = new BXMLSubType(TypeTags.XML_TEXT, Names.XML_TEXT, Flags.READONLY);
public final BType xmlType = new BXMLType(BUnionType.create(null, xmlElementType, xmlCommentType,
xmlPIType, xmlTextType), null);
public BPackageSymbol langInternalModuleSymbol;
public BPackageSymbol langAnnotationModuleSymbol;
public BPackageSymbol langArrayModuleSymbol;
public BPackageSymbol langDecimalModuleSymbol;
public BPackageSymbol langErrorModuleSymbol;
public BPackageSymbol langFloatModuleSymbol;
public BPackageSymbol langFutureModuleSymbol;
public BPackageSymbol langIntModuleSymbol;
public BPackageSymbol langMapModuleSymbol;
public BPackageSymbol langObjectModuleSymbol;
public BPackageSymbol langStreamModuleSymbol;
public BPackageSymbol langStringModuleSymbol;
public BPackageSymbol langTableModuleSymbol;
public BPackageSymbol langTypedescModuleSymbol;
public BPackageSymbol langValueModuleSymbol;
public BPackageSymbol langXmlModuleSymbol;
public BPackageSymbol langBooleanModuleSymbol;
public BPackageSymbol langQueryModuleSymbol;
public BPackageSymbol langTransactionModuleSymbol;
private Names names;
public Map<BPackageSymbol, SymbolEnv> pkgEnvMap = new HashMap<>();
public Map<Name, BPackageSymbol> predeclaredModules = new HashMap<>();
public static SymbolTable getInstance(CompilerContext context) {
SymbolTable symTable = context.get(SYM_TABLE_KEY);
if (symTable == null) {
symTable = new SymbolTable(context);
}
return symTable;
}
private SymbolTable(CompilerContext context) {
context.put(SYM_TABLE_KEY, this);
this.names = Names.getInstance(context);
this.rootPkgNode = (BLangPackage) TreeBuilder.createPackageNode();
this.rootPkgSymbol = new BPackageSymbol(PackageID.ANNOTATIONS, null);
this.rootPkgNode.pos = new DiagnosticPos(new BDiagnosticSource(rootPkgSymbol.pkgID, Names.EMPTY.value), 0, 0,
0, 0);
this.rootPkgNode.symbol = this.rootPkgSymbol;
this.rootScope = new Scope(rootPkgSymbol);
this.rootPkgSymbol.scope = this.rootScope;
this.notFoundSymbol = new BSymbol(SymTag.NIL, Flags.PUBLIC, Names.INVALID,
rootPkgSymbol.pkgID, noType, rootPkgSymbol);
this.invalidUsageSymbol = new BSymbol(SymTag.NIL, Flags.PUBLIC, Names.INVALID, rootPkgSymbol.pkgID, noType,
rootPkgSymbol);
initializeType(intType, TypeKind.INT.typeName());
initializeType(byteType, TypeKind.BYTE.typeName());
initializeType(floatType, TypeKind.FLOAT.typeName());
initializeType(decimalType, TypeKind.DECIMAL.typeName());
initializeType(stringType, TypeKind.STRING.typeName());
initializeType(booleanType, TypeKind.BOOLEAN.typeName());
initializeType(jsonType, TypeKind.JSON.typeName());
initializeType(xmlType, TypeKind.XML.typeName());
initializeType(streamType, TypeKind.STREAM.typeName());
initializeType(tableType, TypeKind.TABLE.typeName());
initializeType(mapType, TypeKind.MAP.typeName());
initializeType(mapStringType, TypeKind.MAP.typeName());
initializeType(mapAnydataType, TypeKind.MAP.typeName());
initializeType(futureType, TypeKind.FUTURE.typeName());
initializeType(anyType, TypeKind.ANY.typeName());
initializeType(anydataType, TypeKind.ANYDATA.typeName());
initializeType(nilType, TypeKind.NIL.typeName());
initializeType(neverType, TypeKind.NEVER.typeName());
initializeType(anyServiceType, TypeKind.SERVICE.typeName());
initializeType(handleType, TypeKind.HANDLE.typeName());
initializeType(typeDesc, TypeKind.TYPEDESC.typeName());
initializeType(readonlyType, TypeKind.READONLY.typeName());
initializeTSymbol(signed32IntType, Names.SIGNED32, PackageID.INT);
initializeTSymbol(signed16IntType, Names.SIGNED16, PackageID.INT);
initializeTSymbol(signed8IntType, Names.SIGNED8, PackageID.INT);
initializeTSymbol(unsigned32IntType, Names.UNSIGNED32, PackageID.INT);
initializeTSymbol(unsigned16IntType, Names.UNSIGNED16, PackageID.INT);
initializeTSymbol(unsigned8IntType, Names.UNSIGNED8, PackageID.INT);
initializeTSymbol(charStringType, Names.CHAR, PackageID.STRING);
initializeTSymbol(xmlElementType, Names.XML_ELEMENT, PackageID.XML);
initializeTSymbol(xmlPIType, Names.XML_PI, PackageID.XML);
initializeTSymbol(xmlCommentType, Names.XML_COMMENT, PackageID.XML);
initializeTSymbol(xmlTextType, Names.XML_TEXT, PackageID.XML);
BLangLiteral trueLiteral = new BLangLiteral();
trueLiteral.type = this.booleanType;
trueLiteral.value = Boolean.TRUE;
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, Flags.PUBLIC,
names.fromString("$anonType$TRUE"),
rootPkgNode.packageID, null, rootPkgNode.symbol.owner);
this.trueType = new BFiniteType(finiteTypeSymbol, new HashSet<BLangExpression>() {{
add(trueLiteral);
}});
}
public BType getTypeFromTag(int tag) {
switch (tag) {
case TypeTags.INT:
return intType;
case TypeTags.BYTE:
return byteType;
case TypeTags.FLOAT:
return floatType;
case TypeTags.DECIMAL:
return decimalType;
case TypeTags.STRING:
return stringType;
case TypeTags.BOOLEAN:
return booleanType;
case TypeTags.JSON:
return jsonType;
case TypeTags.XML:
return xmlType;
case TypeTags.XML_COMMENT:
return xmlCommentType;
case TypeTags.XML_PI:
return xmlPIType;
case TypeTags.XML_ELEMENT:
return xmlElementType;
case TypeTags.XML_TEXT:
return xmlTextType;
case TypeTags.STREAM:
return streamType;
case TypeTags.TABLE:
return tableType;
case TypeTags.NIL:
return nilType;
case TypeTags.NEVER:
return neverType;
case TypeTags.ERROR:
return errorType;
case TypeTags.SIGNED32_INT:
return signed32IntType;
case TypeTags.SIGNED16_INT:
return signed16IntType;
case TypeTags.SIGNED8_INT:
return signed8IntType;
case TypeTags.UNSIGNED32_INT:
return unsigned32IntType;
case TypeTags.UNSIGNED16_INT:
return unsigned16IntType;
case TypeTags.UNSIGNED8_INT:
return unsigned8IntType;
case TypeTags.CHAR_STRING:
return charStringType;
default:
return semanticError;
}
}
public BType getLangLibSubType(String name) {
switch (name) {
case Names.STRING_SIGNED32:
return this.signed32IntType;
case Names.STRING_SIGNED16:
return this.signed16IntType;
case Names.STRING_SIGNED8:
return this.signed8IntType;
case Names.STRING_UNSIGNED32:
return this.unsigned32IntType;
case Names.STRING_UNSIGNED16:
return this.unsigned16IntType;
case Names.STRING_UNSIGNED8:
return this.unsigned8IntType;
case Names.STRING_CHAR:
return this.charStringType;
case Names.STRING_XML_ELEMENT:
return this.xmlElementType;
case Names.STRING_XML_PI:
return this.xmlPIType;
case Names.STRING_XML_COMMENT:
return this.xmlCommentType;
case Names.STRING_XML_TEXT:
return this.xmlTextType;
}
throw new IllegalStateException("LangLib Subtype not found: " + name);
}
private void initializeType(BType type, String name) {
initializeType(type, names.fromString(name));
}
private void initializeType(BType type, Name name) {
defineType(type, new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, name, rootPkgSymbol.pkgID, type, rootPkgSymbol));
}
private void initializeTSymbol(BType type, Name name, PackageID packageID) {
type.tsymbol = new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, name, packageID, type, rootPkgSymbol);
}
private void defineType(BType type, BTypeSymbol tSymbol) {
type.tsymbol = tSymbol;
rootScope.define(tSymbol.name, tSymbol);
}
public void defineOperators() {
defineIntegerArithmeticOperations();
defineXmlStringConcatanationOperations();
defineBinaryOperator(OperatorKind.ADD, stringType, stringType, stringType);
defineBinaryOperator(OperatorKind.ADD, stringType, charStringType, stringType);
defineBinaryOperator(OperatorKind.ADD, charStringType, stringType, stringType);
defineBinaryOperator(OperatorKind.ADD, charStringType, charStringType, stringType);
defineBinaryOperator(OperatorKind.ADD, floatType, floatType, floatType);
defineBinaryOperator(OperatorKind.ADD, decimalType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.ADD, intType, floatType, floatType);
defineBinaryOperator(OperatorKind.ADD, floatType, intType, floatType);
defineBinaryOperator(OperatorKind.ADD, intType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.ADD, decimalType, intType, decimalType);
defineBinaryOperator(OperatorKind.ADD, floatType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.ADD, decimalType, floatType, decimalType);
defineBinaryOperator(OperatorKind.SUB, floatType, floatType, floatType);
defineBinaryOperator(OperatorKind.SUB, decimalType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.SUB, floatType, intType, floatType);
defineBinaryOperator(OperatorKind.SUB, intType, floatType, floatType);
defineBinaryOperator(OperatorKind.SUB, decimalType, intType, decimalType);
defineBinaryOperator(OperatorKind.SUB, intType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.SUB, decimalType, floatType, decimalType);
defineBinaryOperator(OperatorKind.SUB, floatType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.DIV, floatType, floatType, floatType);
defineBinaryOperator(OperatorKind.DIV, decimalType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.DIV, intType, floatType, floatType);
defineBinaryOperator(OperatorKind.DIV, floatType, intType, floatType);
defineBinaryOperator(OperatorKind.DIV, intType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.DIV, decimalType, intType, decimalType);
defineBinaryOperator(OperatorKind.DIV, floatType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.DIV, decimalType, floatType, decimalType);
defineBinaryOperator(OperatorKind.MUL, floatType, floatType, floatType);
defineBinaryOperator(OperatorKind.MUL, decimalType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.MUL, floatType, intType, floatType);
defineBinaryOperator(OperatorKind.MUL, intType, floatType, floatType);
defineBinaryOperator(OperatorKind.MUL, decimalType, intType, decimalType);
defineBinaryOperator(OperatorKind.MUL, intType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.MUL, decimalType, floatType, decimalType);
defineBinaryOperator(OperatorKind.MUL, floatType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.MOD, floatType, floatType, floatType);
defineBinaryOperator(OperatorKind.MOD, decimalType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.MOD, floatType, intType, floatType);
defineBinaryOperator(OperatorKind.MOD, intType, floatType, floatType);
defineBinaryOperator(OperatorKind.MOD, decimalType, intType, decimalType);
defineBinaryOperator(OperatorKind.MOD, intType, decimalType, decimalType);
defineIntegerBitwiseAndOperations();
defineIntegerBitwiseOrOperations(OperatorKind.BITWISE_OR);
defineIntegerBitwiseOrOperations(OperatorKind.BITWISE_XOR);
defineIntegerLeftShiftOperations();
defineIntegerRightShiftOperations(OperatorKind.BITWISE_RIGHT_SHIFT);
defineIntegerRightShiftOperations(OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT);
defineBinaryOperator(OperatorKind.EQUAL, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, byteType, byteType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, booleanType, booleanType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, stringType, stringType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, intType, byteType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, byteType, intType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, jsonType, nilType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, nilType, jsonType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, anyType, nilType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, nilType, anyType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, anydataType, nilType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, nilType, anydataType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, nilType, nilType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, byteType, byteType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, booleanType, booleanType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, stringType, stringType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, intType, byteType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, byteType, intType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, jsonType, nilType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, jsonType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, anyType, nilType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, anyType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, anydataType, nilType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, anydataType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, nilType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, byteType, byteType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, booleanType, booleanType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, stringType, stringType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, intType, byteType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, byteType, intType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, jsonType, nilType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, nilType, jsonType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, anyType, nilType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, nilType, anyType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, anydataType, nilType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, nilType, anydataType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, nilType, nilType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, byteType, byteType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, booleanType, booleanType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, stringType, stringType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, intType, byteType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, byteType, intType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, byteType, byteType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, booleanType, booleanType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, stringType, stringType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, intType, byteType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, byteType, intType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, intType, floatType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, floatType, intType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, intType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, decimalType, intType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, floatType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, decimalType, floatType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, floatType, intType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, intType, floatType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, intType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, decimalType, intType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, floatType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, decimalType, floatType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, floatType, intType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, intType, floatType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, intType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, decimalType, intType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, floatType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, decimalType, floatType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, floatType, intType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, intType, floatType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, intType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, decimalType, intType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, floatType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, decimalType, floatType, booleanType);
defineBinaryOperator(OperatorKind.AND, booleanType, booleanType, booleanType);
defineBinaryOperator(OperatorKind.OR, booleanType, booleanType, booleanType);
defineUnaryOperator(OperatorKind.ADD, floatType, floatType);
defineUnaryOperator(OperatorKind.ADD, decimalType, decimalType);
defineUnaryOperator(OperatorKind.ADD, intType, intType);
defineUnaryOperator(OperatorKind.SUB, floatType, floatType);
defineUnaryOperator(OperatorKind.SUB, decimalType, decimalType);
defineUnaryOperator(OperatorKind.SUB, intType, intType);
defineUnaryOperator(OperatorKind.NOT, booleanType, booleanType);
defineUnaryOperator(OperatorKind.BITWISE_COMPLEMENT, byteType, byteType);
defineUnaryOperator(OperatorKind.BITWISE_COMPLEMENT, intType, intType);
}
private void defineXmlStringConcatanationOperations() {
defineBinaryOperator(OperatorKind.ADD, xmlType, stringType, xmlType);
defineBinaryOperator(OperatorKind.ADD, xmlType, charStringType, xmlType);
defineBinaryOperator(OperatorKind.ADD, stringType, xmlType, xmlType);
defineBinaryOperator(OperatorKind.ADD, charStringType, xmlType, xmlType);
defineBinaryOperator(OperatorKind.ADD, stringType, xmlTextType, xmlTextType);
defineBinaryOperator(OperatorKind.ADD, charStringType, xmlTextType, xmlTextType);
defineBinaryOperator(OperatorKind.ADD, xmlTextType, stringType, xmlTextType);
defineBinaryOperator(OperatorKind.ADD, xmlTextType, charStringType, xmlTextType);
}
private void defineIntegerArithmeticOperations() {
BType[] intTypes = {intType, byteType, signed32IntType, signed16IntType, signed8IntType, unsigned32IntType,
unsigned16IntType,
unsigned8IntType};
for (BType lhs : intTypes) {
for (BType rhs : intTypes) {
defineBinaryOperator(OperatorKind.ADD, lhs, rhs, intType);
defineBinaryOperator(OperatorKind.SUB, lhs, rhs, intType);
defineBinaryOperator(OperatorKind.DIV, lhs, rhs, intType);
defineBinaryOperator(OperatorKind.MUL, lhs, rhs, intType);
defineBinaryOperator(OperatorKind.MOD, lhs, rhs, intType);
}
}
}
private void defineIntegerBitwiseAndOperations() {
BType[] unsignedIntTypes = {byteType, unsigned8IntType, unsigned16IntType, unsigned32IntType};
BType[] signedIntTypes = {intType, signed8IntType, signed16IntType, signed32IntType};
for (BType unsigned : unsignedIntTypes) {
for (BType signed : signedIntTypes) {
defineBinaryOperator(OperatorKind.BITWISE_AND, unsigned, signed, unsigned);
}
}
for (int i = 0; i < unsignedIntTypes.length; i++) {
for (int j = 0; j < unsignedIntTypes.length; j++) {
BType unsignedIntTypeLhs = unsignedIntTypes[i];
BType unsignedIntTypeRhs = unsignedIntTypes[j];
defineBinaryOperator(OperatorKind.BITWISE_AND, unsignedIntTypeLhs, unsignedIntTypeRhs,
i <= j ? unsignedIntTypeLhs : unsignedIntTypeRhs);
}
}
for (BType signed : signedIntTypes) {
for (BType unsigned : unsignedIntTypes) {
defineBinaryOperator(OperatorKind.BITWISE_AND, signed, unsigned, unsigned);
}
}
for (BType signedLhs : signedIntTypes) {
for (BType signedRhs : signedIntTypes) {
defineBinaryOperator(OperatorKind.BITWISE_AND, signedLhs, signedRhs, intType);
}
}
}
private void defineIntegerBitwiseOrOperations(OperatorKind orOpKind) {
BType[] unsignedIntTypes = {byteType, unsigned8IntType, unsigned16IntType, unsigned32IntType};
BType[] signedIntTypes = {intType, signed8IntType, signed16IntType, signed32IntType};
for (BType unsigned : unsignedIntTypes) {
for (BType signed : signedIntTypes) {
defineBinaryOperator(orOpKind, unsigned, signed, intType);
}
}
for (int i = 0; i < unsignedIntTypes.length; i++) {
for (int j = 0; j < unsignedIntTypes.length; j++) {
BType unsignedIntTypeLhs = unsignedIntTypes[i];
BType unsignedIntTypeRhs = unsignedIntTypes[j];
defineBinaryOperator(orOpKind, unsignedIntTypeLhs, unsignedIntTypeRhs,
i <= j ? unsignedIntTypeLhs : unsignedIntTypeRhs);
}
}
for (BType signed : signedIntTypes) {
for (BType unsigned : unsignedIntTypes) {
defineBinaryOperator(orOpKind, signed, unsigned, intType);
}
}
for (BType signedLhs : signedIntTypes) {
for (BType signedRhs : signedIntTypes) {
defineBinaryOperator(orOpKind, signedLhs, signedRhs, intType);
}
}
}
private void defineIntegerLeftShiftOperations() {
BType[] allIntTypes = {intType, byteType, signed32IntType, signed16IntType, signed8IntType, unsigned32IntType,
unsigned16IntType, unsigned8IntType};
for (BType lhs : allIntTypes) {
for (BType rhs : allIntTypes) {
defineBinaryOperator(OperatorKind.BITWISE_LEFT_SHIFT, lhs, rhs, intType);
}
}
}
private void defineIntegerRightShiftOperations(OperatorKind rightShiftOpKind) {
BType[] unsignedIntTypes = {byteType, unsigned8IntType, unsigned16IntType, unsigned32IntType};
BType[] signedIntTypes = {intType, signed8IntType, signed16IntType, signed32IntType};
BType[] allIntTypes = {intType, byteType, signed32IntType, signed16IntType, signed8IntType, unsigned32IntType,
unsigned16IntType, unsigned8IntType};
for (BType unsignedLhs : unsignedIntTypes) {
for (BType intRhs : allIntTypes) {
defineBinaryOperator(rightShiftOpKind, unsignedLhs, intRhs, unsignedLhs);
}
}
for (BType signedLhs : signedIntTypes) {
for (BType intRhs : allIntTypes) {
defineBinaryOperator(rightShiftOpKind, signedLhs, intRhs, intType);
}
}
}
public void defineBinaryOperator(OperatorKind kind,
BType lhsType,
BType rhsType,
BType retType) {
List<BType> paramTypes = Lists.of(lhsType, rhsType);
defineOperator(names.fromString(kind.value()), paramTypes, retType);
}
private void defineUnaryOperator(OperatorKind kind,
BType type,
BType retType) {
List<BType> paramTypes = Lists.of(type);
defineOperator(names.fromString(kind.value()), paramTypes, retType);
}
private void defineOperator(Name name,
List<BType> paramTypes,
BType retType) {
BInvokableType opType = new BInvokableType(paramTypes, retType, null);
BOperatorSymbol symbol = new BOperatorSymbol(name, rootPkgSymbol.pkgID, opType, rootPkgSymbol);
rootScope.define(name, symbol);
}
} | class SymbolTable {
private static final CompilerContext.Key<SymbolTable> SYM_TABLE_KEY =
new CompilerContext.Key<>();
public static final PackageID TRANSACTION = new PackageID(Names.BUILTIN_ORG, Names.TRANSACTION_PACKAGE,
Names.EMPTY);
public static final Integer BBYTE_MIN_VALUE = 0;
public static final Integer BBYTE_MAX_VALUE = 255;
public static final Integer SIGNED32_MAX_VALUE = 2147483647;
public static final Integer SIGNED32_MIN_VALUE = -2147483648;
public static final Integer SIGNED16_MAX_VALUE = 32767;
public static final Integer SIGNED16_MIN_VALUE = -32768;
public static final Integer SIGNED8_MAX_VALUE = 127;
public static final Integer SIGNED8_MIN_VALUE = -128;
public static final Long UNSIGNED32_MAX_VALUE = 4294967295L;
public static final Integer UNSIGNED16_MAX_VALUE = 65535;
public static final Integer UNSIGNED8_MAX_VALUE = 255;
public final BLangPackage rootPkgNode;
public final BPackageSymbol rootPkgSymbol;
public final BSymbol notFoundSymbol;
public final BSymbol invalidUsageSymbol;
public final Scope rootScope;
public final BType noType = new BNoType(TypeTags.NONE);
public final BType nilType = new BNilType();
public final BType neverType = new BNeverType();
public final BType intType = new BType(TypeTags.INT, null, Flags.READONLY);
public final BType byteType = new BType(TypeTags.BYTE, null, Flags.READONLY);
public final BType floatType = new BType(TypeTags.FLOAT, null, Flags.READONLY);
public final BType decimalType = new BType(TypeTags.DECIMAL, null, Flags.READONLY);
public final BType stringType = new BType(TypeTags.STRING, null, Flags.READONLY);
public final BType booleanType = new BType(TypeTags.BOOLEAN, null, Flags.READONLY);
public final BType jsonType = new BJSONType(TypeTags.JSON, null);
public final BType anyType = new BAnyType(TypeTags.ANY, null);
public final BType anydataType = new BAnydataType(TypeTags.ANYDATA, null);
public final BMapType mapType = new BMapType(TypeTags.MAP, anyType, null);
public final BMapType mapStringType = new BMapType(TypeTags.MAP, stringType, null);
public final BMapType mapAnydataType = new BMapType(TypeTags.MAP, anydataType, null);
public final BMapType mapJsonType = new BMapType(TypeTags.MAP, jsonType, null);
public final BFutureType futureType = new BFutureType(TypeTags.FUTURE, nilType, null);
public final BArrayType arrayType = new BArrayType(anyType);
public final BArrayType arrayStringType = new BArrayType(stringType);
public final BArrayType arrayAnydataType = new BArrayType(anydataType);
public final BArrayType arrayJsonType = new BArrayType(jsonType);
public final BType tupleType = new BTupleType(Lists.of(noType));
public final BType recordType = new BRecordType(null);
public final BType stringArrayType = new BArrayType(stringType);
public final BType jsonArrayType = new BArrayType(jsonType);
public final BType anydataArrayType = new BArrayType(anydataType);
public final BType anyServiceType = new BServiceType(null);
public final BType handleType = new BHandleType(TypeTags.HANDLE, null);
public final BTypedescType typeDesc = new BTypedescType(this.anyType, null);
public final BType readonlyType = new BReadonlyType(TypeTags.READONLY, null);
public final BType anydataOrReadonly = BUnionType.create(null, anydataType, readonlyType);
public final BType semanticError = new BType(TypeTags.SEMANTIC_ERROR, null);
public final BType nullSet = new BType(TypeTags.NULL_SET, null);
public BType streamType = new BStreamType(TypeTags.STREAM, anydataType, null, null);
public BType tableType = new BTableType(TypeTags.TABLE, anydataType, null);
public BMapType detailType = new BMapType(TypeTags.MAP, anydataOrReadonly, null);
public BErrorType errorType = new BErrorType(null, detailType);
public BConstructorSymbol errorConstructor;
public BUnionType anyOrErrorType;
public BUnionType pureType;
public BUnionType errorOrNilType;
public BFiniteType trueType;
public BObjectType intRangeType;
public BMapType mapAllType;
public BArrayType arrayAllType;
public BObjectType rawTemplateType;
public final BIntSubType signed32IntType = new BIntSubType(TypeTags.SIGNED32_INT, Names.SIGNED32);
public final BIntSubType signed16IntType = new BIntSubType(TypeTags.SIGNED16_INT, Names.SIGNED16);
public final BIntSubType signed8IntType = new BIntSubType(TypeTags.SIGNED8_INT, Names.SIGNED8);
public final BIntSubType unsigned32IntType = new BIntSubType(TypeTags.UNSIGNED32_INT, Names.UNSIGNED32);
public final BIntSubType unsigned16IntType = new BIntSubType(TypeTags.UNSIGNED16_INT, Names.UNSIGNED16);
public final BIntSubType unsigned8IntType = new BIntSubType(TypeTags.UNSIGNED8_INT, Names.UNSIGNED8);
public final BStringSubType charStringType = new BStringSubType(TypeTags.CHAR_STRING, Names.CHAR);
public final BXMLSubType xmlElementType = new BXMLSubType(TypeTags.XML_ELEMENT, Names.XML_ELEMENT);
public final BXMLSubType xmlPIType = new BXMLSubType(TypeTags.XML_PI, Names.XML_PI);
public final BXMLSubType xmlCommentType = new BXMLSubType(TypeTags.XML_COMMENT, Names.XML_COMMENT);
public final BXMLSubType xmlTextType = new BXMLSubType(TypeTags.XML_TEXT, Names.XML_TEXT, Flags.READONLY);
public final BType xmlType = new BXMLType(BUnionType.create(null, xmlElementType, xmlCommentType,
xmlPIType, xmlTextType), null);
public BPackageSymbol langInternalModuleSymbol;
public BPackageSymbol langAnnotationModuleSymbol;
public BPackageSymbol langArrayModuleSymbol;
public BPackageSymbol langDecimalModuleSymbol;
public BPackageSymbol langErrorModuleSymbol;
public BPackageSymbol langFloatModuleSymbol;
public BPackageSymbol langFutureModuleSymbol;
public BPackageSymbol langIntModuleSymbol;
public BPackageSymbol langMapModuleSymbol;
public BPackageSymbol langObjectModuleSymbol;
public BPackageSymbol langStreamModuleSymbol;
public BPackageSymbol langStringModuleSymbol;
public BPackageSymbol langTableModuleSymbol;
public BPackageSymbol langTypedescModuleSymbol;
public BPackageSymbol langValueModuleSymbol;
public BPackageSymbol langXmlModuleSymbol;
public BPackageSymbol langBooleanModuleSymbol;
public BPackageSymbol langQueryModuleSymbol;
public BPackageSymbol langTransactionModuleSymbol;
private Names names;
public Map<BPackageSymbol, SymbolEnv> pkgEnvMap = new HashMap<>();
public Map<Name, BPackageSymbol> predeclaredModules = new HashMap<>();
public static SymbolTable getInstance(CompilerContext context) {
SymbolTable symTable = context.get(SYM_TABLE_KEY);
if (symTable == null) {
symTable = new SymbolTable(context);
}
return symTable;
}
private SymbolTable(CompilerContext context) {
context.put(SYM_TABLE_KEY, this);
this.names = Names.getInstance(context);
this.rootPkgNode = (BLangPackage) TreeBuilder.createPackageNode();
this.rootPkgSymbol = new BPackageSymbol(PackageID.ANNOTATIONS, null);
this.rootPkgNode.pos = new DiagnosticPos(new BDiagnosticSource(rootPkgSymbol.pkgID, Names.EMPTY.value), 0, 0,
0, 0);
this.rootPkgNode.symbol = this.rootPkgSymbol;
this.rootScope = new Scope(rootPkgSymbol);
this.rootPkgSymbol.scope = this.rootScope;
this.notFoundSymbol = new BSymbol(SymTag.NIL, Flags.PUBLIC, Names.INVALID,
rootPkgSymbol.pkgID, noType, rootPkgSymbol);
this.invalidUsageSymbol = new BSymbol(SymTag.NIL, Flags.PUBLIC, Names.INVALID, rootPkgSymbol.pkgID, noType,
rootPkgSymbol);
initializeType(intType, TypeKind.INT.typeName());
initializeType(byteType, TypeKind.BYTE.typeName());
initializeType(floatType, TypeKind.FLOAT.typeName());
initializeType(decimalType, TypeKind.DECIMAL.typeName());
initializeType(stringType, TypeKind.STRING.typeName());
initializeType(booleanType, TypeKind.BOOLEAN.typeName());
initializeType(jsonType, TypeKind.JSON.typeName());
initializeType(xmlType, TypeKind.XML.typeName());
initializeType(streamType, TypeKind.STREAM.typeName());
initializeType(tableType, TypeKind.TABLE.typeName());
initializeType(mapType, TypeKind.MAP.typeName());
initializeType(mapStringType, TypeKind.MAP.typeName());
initializeType(mapAnydataType, TypeKind.MAP.typeName());
initializeType(futureType, TypeKind.FUTURE.typeName());
initializeType(anyType, TypeKind.ANY.typeName());
initializeType(anydataType, TypeKind.ANYDATA.typeName());
initializeType(nilType, TypeKind.NIL.typeName());
initializeType(neverType, TypeKind.NEVER.typeName());
initializeType(anyServiceType, TypeKind.SERVICE.typeName());
initializeType(handleType, TypeKind.HANDLE.typeName());
initializeType(typeDesc, TypeKind.TYPEDESC.typeName());
initializeType(readonlyType, TypeKind.READONLY.typeName());
initializeTSymbol(signed32IntType, Names.SIGNED32, PackageID.INT);
initializeTSymbol(signed16IntType, Names.SIGNED16, PackageID.INT);
initializeTSymbol(signed8IntType, Names.SIGNED8, PackageID.INT);
initializeTSymbol(unsigned32IntType, Names.UNSIGNED32, PackageID.INT);
initializeTSymbol(unsigned16IntType, Names.UNSIGNED16, PackageID.INT);
initializeTSymbol(unsigned8IntType, Names.UNSIGNED8, PackageID.INT);
initializeTSymbol(charStringType, Names.CHAR, PackageID.STRING);
initializeTSymbol(xmlElementType, Names.XML_ELEMENT, PackageID.XML);
initializeTSymbol(xmlPIType, Names.XML_PI, PackageID.XML);
initializeTSymbol(xmlCommentType, Names.XML_COMMENT, PackageID.XML);
initializeTSymbol(xmlTextType, Names.XML_TEXT, PackageID.XML);
BLangLiteral trueLiteral = new BLangLiteral();
trueLiteral.type = this.booleanType;
trueLiteral.value = Boolean.TRUE;
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, Flags.PUBLIC,
names.fromString("$anonType$TRUE"),
rootPkgNode.packageID, null, rootPkgNode.symbol.owner);
this.trueType = new BFiniteType(finiteTypeSymbol, new HashSet<BLangExpression>() {{
add(trueLiteral);
}});
}
public BType getTypeFromTag(int tag) {
switch (tag) {
case TypeTags.INT:
return intType;
case TypeTags.BYTE:
return byteType;
case TypeTags.FLOAT:
return floatType;
case TypeTags.DECIMAL:
return decimalType;
case TypeTags.STRING:
return stringType;
case TypeTags.BOOLEAN:
return booleanType;
case TypeTags.JSON:
return jsonType;
case TypeTags.XML:
return xmlType;
case TypeTags.XML_COMMENT:
return xmlCommentType;
case TypeTags.XML_PI:
return xmlPIType;
case TypeTags.XML_ELEMENT:
return xmlElementType;
case TypeTags.XML_TEXT:
return xmlTextType;
case TypeTags.STREAM:
return streamType;
case TypeTags.TABLE:
return tableType;
case TypeTags.NIL:
return nilType;
case TypeTags.NEVER:
return neverType;
case TypeTags.ERROR:
return errorType;
case TypeTags.SIGNED32_INT:
return signed32IntType;
case TypeTags.SIGNED16_INT:
return signed16IntType;
case TypeTags.SIGNED8_INT:
return signed8IntType;
case TypeTags.UNSIGNED32_INT:
return unsigned32IntType;
case TypeTags.UNSIGNED16_INT:
return unsigned16IntType;
case TypeTags.UNSIGNED8_INT:
return unsigned8IntType;
case TypeTags.CHAR_STRING:
return charStringType;
default:
return semanticError;
}
}
public BType getLangLibSubType(String name) {
switch (name) {
case Names.STRING_SIGNED32:
return this.signed32IntType;
case Names.STRING_SIGNED16:
return this.signed16IntType;
case Names.STRING_SIGNED8:
return this.signed8IntType;
case Names.STRING_UNSIGNED32:
return this.unsigned32IntType;
case Names.STRING_UNSIGNED16:
return this.unsigned16IntType;
case Names.STRING_UNSIGNED8:
return this.unsigned8IntType;
case Names.STRING_CHAR:
return this.charStringType;
case Names.STRING_XML_ELEMENT:
return this.xmlElementType;
case Names.STRING_XML_PI:
return this.xmlPIType;
case Names.STRING_XML_COMMENT:
return this.xmlCommentType;
case Names.STRING_XML_TEXT:
return this.xmlTextType;
}
throw new IllegalStateException("LangLib Subtype not found: " + name);
}
private void initializeType(BType type, String name) {
initializeType(type, names.fromString(name));
}
private void initializeType(BType type, Name name) {
defineType(type, new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, name, rootPkgSymbol.pkgID, type, rootPkgSymbol));
}
private void initializeTSymbol(BType type, Name name, PackageID packageID) {
type.tsymbol = new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, name, packageID, type, rootPkgSymbol);
}
private void defineType(BType type, BTypeSymbol tSymbol) {
type.tsymbol = tSymbol;
rootScope.define(tSymbol.name, tSymbol);
}
public void defineOperators() {
defineIntegerArithmeticOperations();
defineXmlStringConcatanationOperations();
defineBinaryOperator(OperatorKind.ADD, stringType, stringType, stringType);
defineBinaryOperator(OperatorKind.ADD, stringType, charStringType, stringType);
defineBinaryOperator(OperatorKind.ADD, charStringType, stringType, stringType);
defineBinaryOperator(OperatorKind.ADD, charStringType, charStringType, stringType);
defineBinaryOperator(OperatorKind.ADD, floatType, floatType, floatType);
defineBinaryOperator(OperatorKind.ADD, decimalType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.ADD, intType, floatType, floatType);
defineBinaryOperator(OperatorKind.ADD, floatType, intType, floatType);
defineBinaryOperator(OperatorKind.ADD, intType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.ADD, decimalType, intType, decimalType);
defineBinaryOperator(OperatorKind.ADD, floatType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.ADD, decimalType, floatType, decimalType);
defineBinaryOperator(OperatorKind.SUB, floatType, floatType, floatType);
defineBinaryOperator(OperatorKind.SUB, decimalType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.SUB, floatType, intType, floatType);
defineBinaryOperator(OperatorKind.SUB, intType, floatType, floatType);
defineBinaryOperator(OperatorKind.SUB, decimalType, intType, decimalType);
defineBinaryOperator(OperatorKind.SUB, intType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.SUB, decimalType, floatType, decimalType);
defineBinaryOperator(OperatorKind.SUB, floatType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.DIV, floatType, floatType, floatType);
defineBinaryOperator(OperatorKind.DIV, decimalType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.DIV, intType, floatType, floatType);
defineBinaryOperator(OperatorKind.DIV, floatType, intType, floatType);
defineBinaryOperator(OperatorKind.DIV, intType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.DIV, decimalType, intType, decimalType);
defineBinaryOperator(OperatorKind.DIV, floatType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.DIV, decimalType, floatType, decimalType);
defineBinaryOperator(OperatorKind.MUL, floatType, floatType, floatType);
defineBinaryOperator(OperatorKind.MUL, decimalType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.MUL, floatType, intType, floatType);
defineBinaryOperator(OperatorKind.MUL, intType, floatType, floatType);
defineBinaryOperator(OperatorKind.MUL, decimalType, intType, decimalType);
defineBinaryOperator(OperatorKind.MUL, intType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.MUL, decimalType, floatType, decimalType);
defineBinaryOperator(OperatorKind.MUL, floatType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.MOD, floatType, floatType, floatType);
defineBinaryOperator(OperatorKind.MOD, decimalType, decimalType, decimalType);
defineBinaryOperator(OperatorKind.MOD, floatType, intType, floatType);
defineBinaryOperator(OperatorKind.MOD, intType, floatType, floatType);
defineBinaryOperator(OperatorKind.MOD, decimalType, intType, decimalType);
defineBinaryOperator(OperatorKind.MOD, intType, decimalType, decimalType);
defineIntegerBitwiseAndOperations();
defineIntegerBitwiseOrOperations(OperatorKind.BITWISE_OR);
defineIntegerBitwiseOrOperations(OperatorKind.BITWISE_XOR);
defineIntegerLeftShiftOperations();
defineIntegerRightShiftOperations(OperatorKind.BITWISE_RIGHT_SHIFT);
defineIntegerRightShiftOperations(OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT);
defineBinaryOperator(OperatorKind.EQUAL, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, byteType, byteType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, booleanType, booleanType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, stringType, stringType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, intType, byteType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, byteType, intType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, jsonType, nilType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, nilType, jsonType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, anyType, nilType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, nilType, anyType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, anydataType, nilType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, nilType, anydataType, booleanType);
defineBinaryOperator(OperatorKind.EQUAL, nilType, nilType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, byteType, byteType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, booleanType, booleanType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, stringType, stringType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, intType, byteType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, byteType, intType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, jsonType, nilType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, jsonType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, anyType, nilType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, anyType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, anydataType, nilType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, anydataType, booleanType);
defineBinaryOperator(OperatorKind.NOT_EQUAL, nilType, nilType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, byteType, byteType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, booleanType, booleanType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, stringType, stringType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, intType, byteType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, byteType, intType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, jsonType, nilType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, nilType, jsonType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, anyType, nilType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, nilType, anyType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, anydataType, nilType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, nilType, anydataType, booleanType);
defineBinaryOperator(OperatorKind.EQUALS, nilType, nilType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, byteType, byteType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, booleanType, booleanType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, stringType, stringType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, intType, byteType, booleanType);
defineBinaryOperator(OperatorKind.REF_EQUAL, byteType, intType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, byteType, byteType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, booleanType, booleanType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, stringType, stringType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, intType, byteType, booleanType);
defineBinaryOperator(OperatorKind.REF_NOT_EQUAL, byteType, intType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, intType, floatType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, floatType, intType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, intType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, decimalType, intType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, floatType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.LESS_THAN, decimalType, floatType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, floatType, intType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, intType, floatType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, intType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, decimalType, intType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, floatType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.LESS_EQUAL, decimalType, floatType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, floatType, intType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, intType, floatType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, intType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, decimalType, intType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, floatType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_THAN, decimalType, floatType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, intType, intType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, floatType, intType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, intType, floatType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, floatType, floatType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, decimalType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, intType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, decimalType, intType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, floatType, decimalType, booleanType);
defineBinaryOperator(OperatorKind.GREATER_EQUAL, decimalType, floatType, booleanType);
defineBinaryOperator(OperatorKind.AND, booleanType, booleanType, booleanType);
defineBinaryOperator(OperatorKind.OR, booleanType, booleanType, booleanType);
defineUnaryOperator(OperatorKind.ADD, floatType, floatType);
defineUnaryOperator(OperatorKind.ADD, decimalType, decimalType);
defineUnaryOperator(OperatorKind.ADD, intType, intType);
defineUnaryOperator(OperatorKind.SUB, floatType, floatType);
defineUnaryOperator(OperatorKind.SUB, decimalType, decimalType);
defineUnaryOperator(OperatorKind.SUB, intType, intType);
defineUnaryOperator(OperatorKind.NOT, booleanType, booleanType);
defineUnaryOperator(OperatorKind.BITWISE_COMPLEMENT, byteType, byteType);
defineUnaryOperator(OperatorKind.BITWISE_COMPLEMENT, intType, intType);
}
private void defineXmlStringConcatanationOperations() {
defineBinaryOperator(OperatorKind.ADD, xmlType, stringType, xmlType);
defineBinaryOperator(OperatorKind.ADD, xmlType, charStringType, xmlType);
defineBinaryOperator(OperatorKind.ADD, stringType, xmlType, xmlType);
defineBinaryOperator(OperatorKind.ADD, charStringType, xmlType, xmlType);
defineBinaryOperator(OperatorKind.ADD, stringType, xmlTextType, xmlTextType);
defineBinaryOperator(OperatorKind.ADD, charStringType, xmlTextType, xmlTextType);
defineBinaryOperator(OperatorKind.ADD, xmlTextType, stringType, xmlTextType);
defineBinaryOperator(OperatorKind.ADD, xmlTextType, charStringType, xmlTextType);
}
private void defineIntegerArithmeticOperations() {
BType[] intTypes = {intType, byteType, signed32IntType, signed16IntType, signed8IntType, unsigned32IntType,
unsigned16IntType,
unsigned8IntType};
for (BType lhs : intTypes) {
for (BType rhs : intTypes) {
defineBinaryOperator(OperatorKind.ADD, lhs, rhs, intType);
defineBinaryOperator(OperatorKind.SUB, lhs, rhs, intType);
defineBinaryOperator(OperatorKind.DIV, lhs, rhs, intType);
defineBinaryOperator(OperatorKind.MUL, lhs, rhs, intType);
defineBinaryOperator(OperatorKind.MOD, lhs, rhs, intType);
}
}
}
private void defineIntegerBitwiseAndOperations() {
BType[] unsignedIntTypes = {byteType, unsigned8IntType, unsigned16IntType, unsigned32IntType};
BType[] signedIntTypes = {intType, signed8IntType, signed16IntType, signed32IntType};
for (BType unsigned : unsignedIntTypes) {
for (BType signed : signedIntTypes) {
defineBinaryOperator(OperatorKind.BITWISE_AND, unsigned, signed, unsigned);
}
}
for (int i = 0; i < unsignedIntTypes.length; i++) {
for (int j = 0; j < unsignedIntTypes.length; j++) {
BType unsignedIntTypeLhs = unsignedIntTypes[i];
BType unsignedIntTypeRhs = unsignedIntTypes[j];
defineBinaryOperator(OperatorKind.BITWISE_AND, unsignedIntTypeLhs, unsignedIntTypeRhs,
i <= j ? unsignedIntTypeLhs : unsignedIntTypeRhs);
}
}
for (BType signed : signedIntTypes) {
for (BType unsigned : unsignedIntTypes) {
defineBinaryOperator(OperatorKind.BITWISE_AND, signed, unsigned, unsigned);
}
}
for (BType signedLhs : signedIntTypes) {
for (BType signedRhs : signedIntTypes) {
defineBinaryOperator(OperatorKind.BITWISE_AND, signedLhs, signedRhs, intType);
}
}
}
private void defineIntegerBitwiseOrOperations(OperatorKind orOpKind) {
BType[] unsignedIntTypes = {byteType, unsigned8IntType, unsigned16IntType, unsigned32IntType};
BType[] signedIntTypes = {intType, signed8IntType, signed16IntType, signed32IntType};
for (BType unsigned : unsignedIntTypes) {
for (BType signed : signedIntTypes) {
defineBinaryOperator(orOpKind, unsigned, signed, intType);
}
}
for (int i = 0; i < unsignedIntTypes.length; i++) {
for (int j = 0; j < unsignedIntTypes.length; j++) {
BType unsignedIntTypeLhs = unsignedIntTypes[i];
BType unsignedIntTypeRhs = unsignedIntTypes[j];
defineBinaryOperator(orOpKind, unsignedIntTypeLhs, unsignedIntTypeRhs,
i <= j ? unsignedIntTypeLhs : unsignedIntTypeRhs);
}
}
for (BType signed : signedIntTypes) {
for (BType unsigned : unsignedIntTypes) {
defineBinaryOperator(orOpKind, signed, unsigned, intType);
}
}
for (BType signedLhs : signedIntTypes) {
for (BType signedRhs : signedIntTypes) {
defineBinaryOperator(orOpKind, signedLhs, signedRhs, intType);
}
}
}
private void defineIntegerLeftShiftOperations() {
BType[] allIntTypes = {intType, byteType, signed32IntType, signed16IntType, signed8IntType, unsigned32IntType,
unsigned16IntType, unsigned8IntType};
for (BType lhs : allIntTypes) {
for (BType rhs : allIntTypes) {
defineBinaryOperator(OperatorKind.BITWISE_LEFT_SHIFT, lhs, rhs, intType);
}
}
}
private void defineIntegerRightShiftOperations(OperatorKind rightShiftOpKind) {
BType[] unsignedIntTypes = {byteType, unsigned8IntType, unsigned16IntType, unsigned32IntType};
BType[] signedIntTypes = {intType, signed8IntType, signed16IntType, signed32IntType};
BType[] allIntTypes = {intType, byteType, signed32IntType, signed16IntType, signed8IntType, unsigned32IntType,
unsigned16IntType, unsigned8IntType};
for (BType unsignedLhs : unsignedIntTypes) {
for (BType intRhs : allIntTypes) {
defineBinaryOperator(rightShiftOpKind, unsignedLhs, intRhs, unsignedLhs);
}
}
for (BType signedLhs : signedIntTypes) {
for (BType intRhs : allIntTypes) {
defineBinaryOperator(rightShiftOpKind, signedLhs, intRhs, intType);
}
}
}
public void defineBinaryOperator(OperatorKind kind,
BType lhsType,
BType rhsType,
BType retType) {
List<BType> paramTypes = Lists.of(lhsType, rhsType);
defineOperator(names.fromString(kind.value()), paramTypes, retType);
}
private void defineUnaryOperator(OperatorKind kind,
BType type,
BType retType) {
List<BType> paramTypes = Lists.of(type);
defineOperator(names.fromString(kind.value()), paramTypes, retType);
}
private void defineOperator(Name name,
List<BType> paramTypes,
BType retType) {
BInvokableType opType = new BInvokableType(paramTypes, retType, null);
BOperatorSymbol symbol = new BOperatorSymbol(name, rootPkgSymbol.pkgID, opType, rootPkgSymbol);
rootScope.define(name, symbol);
}
} |
New issue https://github.com/ballerina-platform/ballerina-lang/issues/36069 | public Boolean visit(BRecordType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.RECORD || !hasSameReadonlyFlag(s, t)) {
return false;
}
BRecordType source = (BRecordType) s;
if (source.fields.size() != t.fields.size()) {
return false;
}
for (BField sourceField : source.fields.values()) {
if (t.fields.containsKey(sourceField.name.value)) {
BField targetField = t.fields.get(sourceField.name.value);
if (isSameType(sourceField.type, targetField.type, new HashSet<>(this.unresolvedTypes)) &&
hasSameOptionalFlag(sourceField.symbol, targetField.symbol) &&
(!Symbols.isFlagOn(targetField.symbol.flags, Flags.READONLY) ||
Symbols.isFlagOn(sourceField.symbol.flags, Flags.READONLY))) {
continue;
}
}
return false;
}
return isSameType(source.restFieldType, t.restFieldType, new HashSet<>(this.unresolvedTypes));
} | if (isSameType(sourceField.type, targetField.type, new HashSet<>(this.unresolvedTypes)) && | public Boolean visit(BRecordType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.RECORD || !hasSameReadonlyFlag(s, t)) {
return false;
}
BRecordType source = (BRecordType) s;
if (source.fields.size() != t.fields.size()) {
return false;
}
for (BField sourceField : source.fields.values()) {
if (t.fields.containsKey(sourceField.name.value)) {
BField targetField = t.fields.get(sourceField.name.value);
if (isSameType(sourceField.type, targetField.type, new HashSet<>(this.unresolvedTypes)) &&
hasSameOptionalFlag(sourceField.symbol, targetField.symbol) &&
(!Symbols.isFlagOn(targetField.symbol.flags, Flags.READONLY) ||
Symbols.isFlagOn(sourceField.symbol.flags, Flags.READONLY))) {
continue;
}
}
return false;
}
return isSameType(source.restFieldType, t.restFieldType, new HashSet<>(this.unresolvedTypes));
} | class BSameTypeVisitor implements BTypeVisitor<BType, Boolean> {
Set<TypePair> unresolvedTypes;
BSameTypeVisitor(Set<TypePair> unresolvedTypes) {
this.unresolvedTypes = unresolvedTypes;
}
@Override
public Boolean visit(BType target, BType source) {
BType t = getReferredType(target);
BType s = getReferredType(source);
if (t == s) {
return true;
}
switch (t.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
return t.tag == s.tag
&& ((TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s)) ||
(t.tag == TypeTags.TYPEREFDESC || s.tag == TypeTags.TYPEREFDESC));
case TypeTags.ANY:
case TypeTags.ANYDATA:
return t.tag == s.tag && hasSameReadonlyFlag(s, t)
&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));
default:
break;
}
return false;
}
@Override
public Boolean visit(BBuiltInRefType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnyType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnydataType t, BType s) {
if (t == s) {
return true;
}
return t.tag == s.tag;
}
@Override
public Boolean visit(BMapType t, BType s) {
if (s.tag != TypeTags.MAP || !hasSameReadonlyFlag(s, t)) {
return false;
}
BMapType sType = ((BMapType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFutureType t, BType s) {
return s.tag == TypeTags.FUTURE &&
isSameType(t.constraint, ((BFutureType) s).constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BXMLType t, BType s) {
return visit((BBuiltInRefType) t, s);
}
@Override
public Boolean visit(BJSONType t, BType s) {
return s.tag == TypeTags.JSON && hasSameReadonlyFlag(s, t);
}
@Override
public Boolean visit(BArrayType t, BType s) {
return s.tag == TypeTags.ARRAY && hasSameReadonlyFlag(s, t) && isSameArrayType(s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BObjectType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.OBJECT) {
return false;
}
return t.tsymbol.pkgID.equals(s.tsymbol.pkgID) && t.tsymbol.name.equals(s.tsymbol.name);
}
@Override
private boolean hasSameOptionalFlag(BVarSymbol s, BVarSymbol t) {
return ((s.flags & Flags.OPTIONAL) ^ (t.flags & Flags.OPTIONAL)) != Flags.OPTIONAL;
}
private boolean hasSameReadonlyFlag(BType source, BType target) {
return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY);
}
public Boolean visit(BTupleType t, BType s) {
if (((!t.tupleTypes.isEmpty() && checkAllTupleMembersBelongNoType(t.tupleTypes)) ||
(t.restType != null && t.restType.tag == TypeTags.NONE)) &&
!(s.tag == TypeTags.ARRAY && ((BArrayType) s).state == BArrayState.OPEN)) {
return true;
}
if (s.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(s, t)) {
return false;
}
BTupleType source = (BTupleType) s;
if (source.tupleTypes.size() != t.tupleTypes.size()) {
return false;
}
BType sourceRestType = source.restType;
BType targetRestType = t.restType;
if ((sourceRestType == null || targetRestType == null) && sourceRestType != targetRestType) {
return false;
}
for (int i = 0; i < source.tupleTypes.size(); i++) {
if (t.getTupleTypes().get(i) == symTable.noType) {
continue;
}
if (!isSameType(source.getTupleTypes().get(i), t.tupleTypes.get(i), this.unresolvedTypes)) {
return false;
}
}
if (sourceRestType == null || targetRestType == symTable.noType) {
return true;
}
return isSameType(sourceRestType, targetRestType, this.unresolvedTypes);
}
@Override
public Boolean visit(BStreamType t, BType s) {
return s.tag == TypeTags.STREAM && isSameStreamType(s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BTableType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BInvokableType t, BType s) {
return s.tag == TypeTags.INVOKABLE && isSameFunctionType((BInvokableType) s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BUnionType tUnionType, BType s) {
if (s.tag != TypeTags.UNION || !hasSameReadonlyFlag(s, tUnionType)) {
return false;
}
BUnionType sUnionType = (BUnionType) s;
if (sUnionType.getMemberTypes().size()
!= tUnionType.getMemberTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sUnionType.getMemberTypes().size());
Set<BType> targetTypes = new LinkedHashSet<>(tUnionType.getMemberTypes().size());
sourceTypes.add(sUnionType);
sourceTypes.addAll(sUnionType.getMemberTypes());
targetTypes.add(tUnionType);
targetTypes.addAll(tUnionType.getMemberTypes());
boolean notSameType = sourceTypes
.stream()
.map(sT -> targetTypes
.stream()
.anyMatch(it -> isSameType(it, sT, this.unresolvedTypes)))
.anyMatch(foundSameType -> !foundSameType);
return !notSameType;
}
@Override
public Boolean visit(BIntersectionType tIntersectionType, BType s) {
if (s.tag != TypeTags.INTERSECTION || !hasSameReadonlyFlag(s, tIntersectionType)) {
return false;
}
BIntersectionType sIntersectionType = (BIntersectionType) s;
if (sIntersectionType.getConstituentTypes().size() != tIntersectionType.getConstituentTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sIntersectionType.getConstituentTypes());
Set<BType> targetTypes = new LinkedHashSet<>(tIntersectionType.getConstituentTypes());
for (BType sourceType : sourceTypes) {
boolean foundSameType = false;
for (BType targetType : targetTypes) {
if (isSameType(sourceType, targetType, this.unresolvedTypes)) {
foundSameType = true;
break;
}
}
if (!foundSameType) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BErrorType t, BType s) {
if (s.tag != TypeTags.ERROR) {
return false;
}
BErrorType source = (BErrorType) s;
if (!source.typeIdSet.equals(t.typeIdSet)) {
return false;
}
if (source.detailType == t.detailType) {
return true;
}
return isSameType(source.detailType, t.detailType, this.unresolvedTypes);
}
@Override
public Boolean visit(BTypedescType t, BType s) {
if (s.tag != TypeTags.TYPEDESC) {
return false;
}
BTypedescType sType = ((BTypedescType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFiniteType t, BType s) {
return s == t;
}
@Override
public Boolean visit(BParameterizedType t, BType s) {
if (s.tag != TypeTags.PARAMETERIZED_TYPE) {
return false;
}
BParameterizedType sType = (BParameterizedType) s;
return isSameType(sType.paramValueType, t.paramValueType) && sType.paramSymbol.equals(t.paramSymbol);
}
public Boolean visit(BTypeReferenceType t, BType s) {
return isSameType(getReferredType(t), s);
}
} | class BSameTypeVisitor implements BTypeVisitor<BType, Boolean> {
Set<TypePair> unresolvedTypes;
BSameTypeVisitor(Set<TypePair> unresolvedTypes) {
this.unresolvedTypes = unresolvedTypes;
}
@Override
public Boolean visit(BType target, BType source) {
BType t = getReferredType(target);
BType s = getReferredType(source);
if (t == s) {
return true;
}
switch (t.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
return t.tag == s.tag
&& ((TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s)) ||
(t.tag == TypeTags.TYPEREFDESC || s.tag == TypeTags.TYPEREFDESC));
case TypeTags.ANY:
case TypeTags.ANYDATA:
return t.tag == s.tag && hasSameReadonlyFlag(s, t)
&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));
default:
break;
}
return false;
}
@Override
public Boolean visit(BBuiltInRefType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnyType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnydataType t, BType s) {
if (t == s) {
return true;
}
return t.tag == s.tag;
}
@Override
public Boolean visit(BMapType t, BType s) {
if (s.tag != TypeTags.MAP || !hasSameReadonlyFlag(s, t)) {
return false;
}
BMapType sType = ((BMapType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFutureType t, BType s) {
return s.tag == TypeTags.FUTURE &&
isSameType(t.constraint, ((BFutureType) s).constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BXMLType t, BType s) {
return visit((BBuiltInRefType) t, s);
}
@Override
public Boolean visit(BJSONType t, BType s) {
return s.tag == TypeTags.JSON && hasSameReadonlyFlag(s, t);
}
@Override
public Boolean visit(BArrayType t, BType s) {
return s.tag == TypeTags.ARRAY && hasSameReadonlyFlag(s, t) && isSameArrayType(s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BObjectType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.OBJECT) {
return false;
}
return t.tsymbol.pkgID.equals(s.tsymbol.pkgID) && t.tsymbol.name.equals(s.tsymbol.name);
}
@Override
private boolean hasSameOptionalFlag(BVarSymbol s, BVarSymbol t) {
return ((s.flags & Flags.OPTIONAL) ^ (t.flags & Flags.OPTIONAL)) != Flags.OPTIONAL;
}
private boolean hasSameReadonlyFlag(BType source, BType target) {
return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY);
}
public Boolean visit(BTupleType t, BType s) {
if (((!t.tupleTypes.isEmpty() && checkAllTupleMembersBelongNoType(t.tupleTypes)) ||
(t.restType != null && t.restType.tag == TypeTags.NONE)) &&
!(s.tag == TypeTags.ARRAY && ((BArrayType) s).state == BArrayState.OPEN)) {
return true;
}
if (s.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(s, t)) {
return false;
}
BTupleType source = (BTupleType) s;
if (source.tupleTypes.size() != t.tupleTypes.size()) {
return false;
}
BType sourceRestType = source.restType;
BType targetRestType = t.restType;
if ((sourceRestType == null || targetRestType == null) && sourceRestType != targetRestType) {
return false;
}
for (int i = 0; i < source.tupleTypes.size(); i++) {
if (t.getTupleTypes().get(i) == symTable.noType) {
continue;
}
if (!isSameType(source.getTupleTypes().get(i), t.tupleTypes.get(i), this.unresolvedTypes)) {
return false;
}
}
if (sourceRestType == null || targetRestType == symTable.noType) {
return true;
}
return isSameType(sourceRestType, targetRestType, this.unresolvedTypes);
}
@Override
public Boolean visit(BStreamType t, BType s) {
return s.tag == TypeTags.STREAM && isSameStreamType(s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BTableType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BInvokableType t, BType s) {
return s.tag == TypeTags.INVOKABLE && isSameFunctionType((BInvokableType) s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BUnionType tUnionType, BType s) {
if (s.tag != TypeTags.UNION || !hasSameReadonlyFlag(s, tUnionType)) {
return false;
}
BUnionType sUnionType = (BUnionType) s;
if (sUnionType.getMemberTypes().size()
!= tUnionType.getMemberTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sUnionType.getMemberTypes().size());
Set<BType> targetTypes = new LinkedHashSet<>(tUnionType.getMemberTypes().size());
sourceTypes.add(sUnionType);
sourceTypes.addAll(sUnionType.getMemberTypes());
targetTypes.add(tUnionType);
targetTypes.addAll(tUnionType.getMemberTypes());
boolean notSameType = sourceTypes
.stream()
.map(sT -> targetTypes
.stream()
.anyMatch(it -> isSameType(it, sT, this.unresolvedTypes)))
.anyMatch(foundSameType -> !foundSameType);
return !notSameType;
}
@Override
public Boolean visit(BIntersectionType tIntersectionType, BType s) {
if (s.tag != TypeTags.INTERSECTION || !hasSameReadonlyFlag(s, tIntersectionType)) {
return false;
}
BIntersectionType sIntersectionType = (BIntersectionType) s;
if (sIntersectionType.getConstituentTypes().size() != tIntersectionType.getConstituentTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sIntersectionType.getConstituentTypes());
Set<BType> targetTypes = new LinkedHashSet<>(tIntersectionType.getConstituentTypes());
for (BType sourceType : sourceTypes) {
boolean foundSameType = false;
for (BType targetType : targetTypes) {
if (isSameType(sourceType, targetType, this.unresolvedTypes)) {
foundSameType = true;
break;
}
}
if (!foundSameType) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BErrorType t, BType s) {
if (s.tag != TypeTags.ERROR) {
return false;
}
BErrorType source = (BErrorType) s;
if (!source.typeIdSet.equals(t.typeIdSet)) {
return false;
}
if (source.detailType == t.detailType) {
return true;
}
return isSameType(source.detailType, t.detailType, this.unresolvedTypes);
}
@Override
public Boolean visit(BTypedescType t, BType s) {
if (s.tag != TypeTags.TYPEDESC) {
return false;
}
BTypedescType sType = ((BTypedescType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFiniteType t, BType s) {
return s == t;
}
@Override
public Boolean visit(BParameterizedType t, BType s) {
if (s.tag != TypeTags.PARAMETERIZED_TYPE) {
return false;
}
BParameterizedType sType = (BParameterizedType) s;
return isSameType(sType.paramValueType, t.paramValueType) && sType.paramSymbol.equals(t.paramSymbol);
}
public Boolean visit(BTypeReferenceType t, BType s) {
return isSameType(getReferredType(t), s);
}
} |
> projects @tristaZero It can be seen from the source code that when the original projects and the optimized projects are the same, projectInts will return null. Maybe the `0 == projects.length` condition is redundant. ```java private static TableScanNode createProjectableFilterable(Compiler compiler, TableScan rel, ImmutableList<RexNode> filters, ImmutableIntList projects, ProjectableFilterableTable pfTable) { final DataContext root = compiler.getDataContext(); final ImmutableIntList originalProjects = projects; for (;;) { final List<RexNode> mutableFilters = Lists.newArrayList(filters); final int[] projectInts; if (projects == null || projects.equals(TableScan.identity(rel.getTable()))) { projectInts = null; } else { projectInts = projects.toIntArray(); } final Enumerable<Object[]> enumerable1 = pfTable.scan(root, mutableFilters, projectInts); ... } ``` | public String generate(final String table) {
String project = null == projects || 0 == projects.length ? "*" : Arrays.stream(projects).mapToObj(each -> fields.get(each).getName()).collect(Collectors.joining(", "));
return String.format("SELECT %s FROM %s", project, table);
} | String project = null == projects || 0 == projects.length ? "*" : Arrays.stream(projects).mapToObj(each -> fields.get(each).getName()).collect(Collectors.joining(", ")); | public String generate(final String table) {
Collection<String> actualColumnNames = null == projects ? columnNames : Arrays.stream(projects).mapToObj(columnNames::get).collect(Collectors.toList());
return String.format("SELECT %s FROM %s", Joiner.on(", ").join(actualColumnNames), table);
} | class FederateExecutionSQLGenerator {
private final DataContext root;
private final List<RexNode> filters;
private final int[] projects;
private final List<RelDataTypeField> fields;
/**
* Generate sql.
*
* @param table table
* @return sql
*/
} | class FederateExecutionSQLGenerator {
private final DataContext root;
private final List<RexNode> filters;
private final int[] projects;
private final List<String> columnNames;
/**
* Generate sql.
*
* @param table table
* @return sql
*/
} |
Super minor nitpick -> this is redundant. Feel free to ignore if you don't feel like removing it :) | public void createZip() throws IOException {
final File file = new File("target/zip");
delete(file);
file.mkdirs();
File zipFile = new File(file, "project.zip");
try (FileOutputStream fos = new FileOutputStream(zipFile);
ZipOutputStream zos = new ZipOutputStream(fos);
ZipProjectWriter zipWriter = new ZipProjectWriter(zos)) {
final CreateProject createProject = new CreateProject(zipWriter).groupId("io.quarkus")
.artifactId("basic-rest")
.version("1.0.0-SNAPSHOT");
Assertions.assertTrue(createProject.doCreateProject(new HashMap<>()));
}
Assertions.assertTrue(zipFile.exists());
File unzipProject = new File(file, "unzipProject");
try (FileInputStream fis = new FileInputStream(zipFile); ZipInputStream zis = new ZipInputStream(fis)) {
ZipEntry zipEntry = zis.getNextEntry();
byte[] buffer = new byte[1024];
while (zipEntry != null) {
File newFile = newFile(unzipProject, zipEntry);
if (zipEntry.isDirectory()) {
newFile.mkdirs();
} else {
new File(newFile.getParent()).mkdirs();
FileOutputStream fos = new FileOutputStream(newFile);
int len;
while ((len = zis.read(buffer)) > 0) {
fos.write(buffer, 0, len);
}
fos.close();
}
zipEntry = zis.getNextEntry();
}
zis.closeEntry();
zis.close();
}
final File gitignore = new File(unzipProject, ".gitignore");
Assertions.assertTrue(gitignore.exists());
final String gitignoreContent = new String(Files.readAllBytes(gitignore.toPath()), StandardCharsets.UTF_8);
Assertions.assertTrue(gitignoreContent.contains("\ntarget/\n"));
} | zis.close(); | public void createZip() throws IOException {
final File file = new File("target/zip");
delete(file);
file.mkdirs();
File zipFile = new File(file, "project.zip");
try (FileOutputStream fos = new FileOutputStream(zipFile);
ZipOutputStream zos = new ZipOutputStream(fos);
ZipProjectWriter zipWriter = new ZipProjectWriter(zos)) {
final CreateProject createProject = new CreateProject(zipWriter).groupId("io.quarkus")
.artifactId("basic-rest")
.version("1.0.0-SNAPSHOT");
Assertions.assertTrue(createProject.doCreateProject(new HashMap<>()));
}
Assertions.assertTrue(zipFile.exists());
File unzipProject = new File(file, "unzipProject");
try (FileInputStream fis = new FileInputStream(zipFile); ZipInputStream zis = new ZipInputStream(fis)) {
ZipEntry zipEntry = zis.getNextEntry();
byte[] buffer = new byte[1024];
while (zipEntry != null) {
File newFile = newFile(unzipProject, zipEntry);
if (zipEntry.isDirectory()) {
newFile.mkdirs();
} else {
new File(newFile.getParent()).mkdirs();
FileOutputStream fos = new FileOutputStream(newFile);
int len;
while ((len = zis.read(buffer)) > 0) {
fos.write(buffer, 0, len);
}
fos.close();
}
zipEntry = zis.getNextEntry();
}
zis.closeEntry();
}
final File gitignore = new File(unzipProject, ".gitignore");
Assertions.assertTrue(gitignore.exists());
final String gitignoreContent = new String(Files.readAllBytes(gitignore.toPath()), StandardCharsets.UTF_8);
Assertions.assertTrue(gitignoreContent.contains("\ntarget/\n"));
} | class CreateProjectTest {
@Test
public void create() throws IOException {
final File file = new File("target/basic-rest");
delete(file);
final CreateProject createProject = new CreateProject(new FileProjectWriter(file)).groupId("io.quarkus")
.artifactId("basic-rest")
.version("1.0.0-SNAPSHOT");
Assertions.assertTrue(createProject.doCreateProject(new HashMap<>()));
final File gitignore = new File(file, ".gitignore");
Assertions.assertTrue(gitignore.exists());
final String gitignoreContent = new String(Files.readAllBytes(gitignore.toPath()), StandardCharsets.UTF_8);
Assertions.assertTrue(gitignoreContent.contains("\ntarget/\n"));
}
@Test
public void createGradle() throws IOException {
final File file = new File("target/basic-rest-gradle");
delete(file);
final CreateProject createProject = new CreateProject(new FileProjectWriter(file)).groupId("io.quarkus")
.artifactId("basic-rest")
.version("1.0.0-SNAPSHOT")
.buildTool(BuildTool.GRADLE);
Assertions.assertTrue(createProject.doCreateProject(new HashMap<>()));
final File gitignore = new File(file, ".gitignore");
Assertions.assertTrue(gitignore.exists());
final String gitignoreContent = new String(Files.readAllBytes(gitignore.toPath()), StandardCharsets.UTF_8);
Assertions.assertFalse(gitignoreContent.contains("\ntarget/\n"));
Assertions.assertTrue(gitignoreContent.contains("\nbuild/"));
Assertions.assertTrue(gitignoreContent.contains("\n.gradle/\n"));
}
@Test
public void createOnTopPomWithoutResource() throws IOException {
final File testDir = new File("target/existing");
delete(testDir);
testDir.mkdirs();
Model model = new Model();
model.setModelVersion("4.0.0");
model.setGroupId("org.acme");
model.setArtifactId("foobar");
model.setVersion("10.1.2");
final File pom = new File(testDir, "pom.xml");
MojoUtils.write(model, pom);
final CreateProject createProject = new CreateProject(new FileProjectWriter(testDir)).groupId("something.is")
.artifactId("wrong")
.version("1.0.0-SNAPSHOT");
Assertions.assertTrue(createProject.doCreateProject(new HashMap<>()));
assertThat(contentOf(pom, "UTF-8"))
.contains(getPluginArtifactId(), QUARKUS_VERSION_PROPERTY, getPluginGroupId());
assertThat(new File(testDir, "src/main/java")).isDirectory();
assertThat(new File(testDir, "src/test/java")).isDirectory();
assertThat(new File(testDir, "src/main/resources/application.properties")).exists();
assertThat(new File(testDir, "src/main/resources/META-INF/resources/index.html")).isFile();
assertThat(new File(testDir, "src/main/java")).isDirectory().matches(f -> {
String[] list = f.list();
return list != null && list.length == 0;
});
assertThat(new File(testDir, "src/test/java")).isDirectory().matches(f -> {
String[] list = f.list();
return list != null && list.length == 0;
});
assertThat(contentOf(new File(testDir, "pom.xml"), "UTF-8"))
.containsIgnoringCase(getBomArtifactId());
}
@Test
public void createOnTopPomWithResource() throws IOException {
final File testDir = new File("target/existing");
delete(testDir);
testDir.mkdirs();
Model model = new Model();
model.setModelVersion("4.0.0");
model.setGroupId("org.acme");
model.setArtifactId("foobar");
model.setVersion("10.1.2");
final File pom = new File(testDir, "pom.xml");
MojoUtils.write(model, pom);
final CreateProject createProject = new CreateProject(new FileProjectWriter(testDir)).groupId("something.is")
.artifactId("wrong")
.className("org.foo.MyResource")
.version("1.0.0-SNAPSHOT");
Assertions.assertTrue(createProject.doCreateProject(new HashMap<>()));
assertThat(contentOf(pom, "UTF-8"))
.contains(getPluginArtifactId(), QUARKUS_VERSION_PROPERTY, getPluginGroupId());
assertThat(new File(testDir, "src/main/java")).isDirectory();
assertThat(new File(testDir, "src/test/java")).isDirectory();
assertThat(new File(testDir, "src/main/resources/application.properties")).exists();
assertThat(new File(testDir, "src/main/resources/META-INF/resources/index.html")).exists();
assertThat(new File(testDir, "src/main/java")).isDirectory();
assertThat(new File(testDir, "src/main/java/org/foo/MyResource.java")).isFile();
assertThat(new File(testDir, "src/test/java")).isDirectory();
assertThat(new File(testDir, "src/test/java/org/foo/MyResourceTest.java")).isFile();
assertThat(new File(testDir, "src/test/java/org/foo/NativeMyResourceIT.java")).isFile();
assertThat(contentOf(new File(testDir, "pom.xml"))).contains(getBomArtifactId());
}
@Test
public void createNewWithCustomizations() throws IOException {
final File testDir = new File("target/existing");
delete(testDir);
testDir.mkdirs();
final File pom = new File(testDir, "pom.xml");
Map<String, Object> properties = new HashMap<>();
properties.put("projectGroupId", "org.acme");
properties.put("projectArtifactId", "acme");
properties.put("className", "org.acme.MyResource");
properties.put("extensions", "commons-io:commons-io:2.5");
Assertions.assertTrue(new CreateProject(new FileProjectWriter(testDir)).groupId("org.acme")
.artifactId("acme")
.version("1.0.0-SNAPSHOT")
.className("org.acme.MyResource")
.doCreateProject(properties));
assertThat(new File(testDir, "pom.xml")).isFile();
assertThat(new File(testDir, "src/main/java/org/acme/MyResource.java")).isFile();
assertThat(new File(testDir, "src/main/java/org/acme/MyApplication.java")).doesNotExist();
assertThat(contentOf(pom, "UTF-8"))
.contains(getPluginArtifactId(), QUARKUS_VERSION_PROPERTY, getPluginGroupId());
assertThat(new File(testDir, "src/main/java")).isDirectory();
assertThat(new File(testDir, "src/test/java")).isDirectory();
assertThat(new File(testDir, "src/main/resources/application.properties")).exists();
assertThat(new File(testDir, "src/main/resources/META-INF/resources/index.html")).exists();
assertThat(contentOf(new File(testDir, "pom.xml"), "UTF-8"))
.containsIgnoringCase(MojoUtils.QUARKUS_VERSION_PROPERTY);
}
public static void delete(final File file) throws IOException {
if (file.exists()) {
try (Stream<Path> stream = Files.walk(file.toPath())) {
stream.sorted(Comparator.reverseOrder())
.map(Path::toFile)
.forEach(File::delete);
}
}
Assertions.assertFalse(
Files.exists(file.toPath()), "Directory still exists");
}
@Test
private static File newFile(File destinationDir, ZipEntry zipEntry) throws IOException {
File destFile = new File(destinationDir, zipEntry.getName());
String destDirPath = destinationDir.getCanonicalPath();
String destFilePath = destFile.getCanonicalPath();
if (!destFilePath.startsWith(destDirPath + File.separator)) {
throw new IOException("Entry is outside of the target dir: " + zipEntry.getName());
}
return destFile;
}
} | class CreateProjectTest {
@Test
public void create() throws IOException {
final File file = new File("target/basic-rest");
delete(file);
final CreateProject createProject = new CreateProject(new FileProjectWriter(file)).groupId("io.quarkus")
.artifactId("basic-rest")
.version("1.0.0-SNAPSHOT");
Assertions.assertTrue(createProject.doCreateProject(new HashMap<>()));
final File gitignore = new File(file, ".gitignore");
Assertions.assertTrue(gitignore.exists());
final String gitignoreContent = new String(Files.readAllBytes(gitignore.toPath()), StandardCharsets.UTF_8);
Assertions.assertTrue(gitignoreContent.contains("\ntarget/\n"));
}
@Test
public void createGradle() throws IOException {
final File file = new File("target/basic-rest-gradle");
delete(file);
final CreateProject createProject = new CreateProject(new FileProjectWriter(file)).groupId("io.quarkus")
.artifactId("basic-rest")
.version("1.0.0-SNAPSHOT")
.buildTool(BuildTool.GRADLE);
Assertions.assertTrue(createProject.doCreateProject(new HashMap<>()));
final File gitignore = new File(file, ".gitignore");
Assertions.assertTrue(gitignore.exists());
final String gitignoreContent = new String(Files.readAllBytes(gitignore.toPath()), StandardCharsets.UTF_8);
Assertions.assertFalse(gitignoreContent.contains("\ntarget/\n"));
Assertions.assertTrue(gitignoreContent.contains("\nbuild/"));
Assertions.assertTrue(gitignoreContent.contains("\n.gradle/\n"));
}
@Test
public void createOnTopPomWithoutResource() throws IOException {
final File testDir = new File("target/existing");
delete(testDir);
testDir.mkdirs();
Model model = new Model();
model.setModelVersion("4.0.0");
model.setGroupId("org.acme");
model.setArtifactId("foobar");
model.setVersion("10.1.2");
final File pom = new File(testDir, "pom.xml");
MojoUtils.write(model, pom);
final CreateProject createProject = new CreateProject(new FileProjectWriter(testDir)).groupId("something.is")
.artifactId("wrong")
.version("1.0.0-SNAPSHOT");
Assertions.assertTrue(createProject.doCreateProject(new HashMap<>()));
assertThat(contentOf(pom, "UTF-8"))
.contains(getPluginArtifactId(), QUARKUS_VERSION_PROPERTY, getPluginGroupId());
assertThat(new File(testDir, "src/main/java")).isDirectory();
assertThat(new File(testDir, "src/test/java")).isDirectory();
assertThat(new File(testDir, "src/main/resources/application.properties")).exists();
assertThat(new File(testDir, "src/main/resources/META-INF/resources/index.html")).isFile();
assertThat(new File(testDir, "src/main/java")).isDirectory().matches(f -> {
String[] list = f.list();
return list != null && list.length == 0;
});
assertThat(new File(testDir, "src/test/java")).isDirectory().matches(f -> {
String[] list = f.list();
return list != null && list.length == 0;
});
assertThat(contentOf(new File(testDir, "pom.xml"), "UTF-8"))
.containsIgnoringCase(getBomArtifactId());
}
@Test
public void createOnTopPomWithResource() throws IOException {
final File testDir = new File("target/existing");
delete(testDir);
testDir.mkdirs();
Model model = new Model();
model.setModelVersion("4.0.0");
model.setGroupId("org.acme");
model.setArtifactId("foobar");
model.setVersion("10.1.2");
final File pom = new File(testDir, "pom.xml");
MojoUtils.write(model, pom);
final CreateProject createProject = new CreateProject(new FileProjectWriter(testDir)).groupId("something.is")
.artifactId("wrong")
.className("org.foo.MyResource")
.version("1.0.0-SNAPSHOT");
Assertions.assertTrue(createProject.doCreateProject(new HashMap<>()));
assertThat(contentOf(pom, "UTF-8"))
.contains(getPluginArtifactId(), QUARKUS_VERSION_PROPERTY, getPluginGroupId());
assertThat(new File(testDir, "src/main/java")).isDirectory();
assertThat(new File(testDir, "src/test/java")).isDirectory();
assertThat(new File(testDir, "src/main/resources/application.properties")).exists();
assertThat(new File(testDir, "src/main/resources/META-INF/resources/index.html")).exists();
assertThat(new File(testDir, "src/main/java")).isDirectory();
assertThat(new File(testDir, "src/main/java/org/foo/MyResource.java")).isFile();
assertThat(new File(testDir, "src/test/java")).isDirectory();
assertThat(new File(testDir, "src/test/java/org/foo/MyResourceTest.java")).isFile();
assertThat(new File(testDir, "src/test/java/org/foo/NativeMyResourceIT.java")).isFile();
assertThat(contentOf(new File(testDir, "pom.xml"))).contains(getBomArtifactId());
}
@Test
public void createNewWithCustomizations() throws IOException {
final File testDir = new File("target/existing");
delete(testDir);
testDir.mkdirs();
final File pom = new File(testDir, "pom.xml");
Map<String, Object> properties = new HashMap<>();
properties.put("projectGroupId", "org.acme");
properties.put("projectArtifactId", "acme");
properties.put("className", "org.acme.MyResource");
properties.put("extensions", "commons-io:commons-io:2.5");
Assertions.assertTrue(new CreateProject(new FileProjectWriter(testDir)).groupId("org.acme")
.artifactId("acme")
.version("1.0.0-SNAPSHOT")
.className("org.acme.MyResource")
.doCreateProject(properties));
assertThat(new File(testDir, "pom.xml")).isFile();
assertThat(new File(testDir, "src/main/java/org/acme/MyResource.java")).isFile();
assertThat(new File(testDir, "src/main/java/org/acme/MyApplication.java")).doesNotExist();
assertThat(contentOf(pom, "UTF-8"))
.contains(getPluginArtifactId(), QUARKUS_VERSION_PROPERTY, getPluginGroupId());
assertThat(new File(testDir, "src/main/java")).isDirectory();
assertThat(new File(testDir, "src/test/java")).isDirectory();
assertThat(new File(testDir, "src/main/resources/application.properties")).exists();
assertThat(new File(testDir, "src/main/resources/META-INF/resources/index.html")).exists();
assertThat(contentOf(new File(testDir, "pom.xml"), "UTF-8"))
.containsIgnoringCase(MojoUtils.QUARKUS_VERSION_PROPERTY);
}
public static void delete(final File file) throws IOException {
if (file.exists()) {
try (Stream<Path> stream = Files.walk(file.toPath())) {
stream.sorted(Comparator.reverseOrder())
.map(Path::toFile)
.forEach(File::delete);
}
}
Assertions.assertFalse(
Files.exists(file.toPath()), "Directory still exists");
}
@Test
private static File newFile(File destinationDir, ZipEntry zipEntry) throws IOException {
File destFile = new File(destinationDir, zipEntry.getName());
String destDirPath = destinationDir.getCanonicalPath();
String destFilePath = destFile.getCanonicalPath();
if (!destFilePath.startsWith(destDirPath + File.separator)) {
throw new IOException("Entry is outside of the target dir: " + zipEntry.getName());
}
return destFile;
}
} |
Shall we use `fail.expr.Stmt.getKind()` instead of `instanceOf` check? | private BLangBlockStmt rewriteNestedOnFail(BLangOnFailClause onFailClause, BLangFail fail) {
BLangOnFailClause currentOnFail = this.onFailClause;
BLangBlockStmt onFailBody = blockStmtByFailNode.get(fail);
if (onFailBody == null) {
onFailBody = ASTBuilderUtil.createBlockStmt(onFailClause.pos);
onFailBody.stmts.addAll(onFailClause.body.stmts);
onFailBody.scope = onFailClause.body.scope;
onFailBody.mapSymbol = onFailClause.body.mapSymbol;
onFailBody.failureBreakMode = onFailClause.body.failureBreakMode;
onFailBody.isFromFailNode = true;
VariableDefinitionNode onFailVarDefNode = onFailClause.variableDefinitionNode;
if (onFailVarDefNode != null) {
BVarSymbol onFailErrorVariableSymbol =
((BLangSimpleVariableDef) onFailVarDefNode).var.symbol;
BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(onFailErrorVariableSymbol.pos,
onFailErrorVariableSymbol.name.value, onFailErrorVariableSymbol.type, rewrite(fail.expr, env),
onFailErrorVariableSymbol);
BLangSimpleVariableDef errorVarDef = ASTBuilderUtil.createVariableDef(onFailClause.pos, errorVar);
onFailBody.scope.define(onFailErrorVariableSymbol.name, onFailErrorVariableSymbol);
onFailBody.stmts.add(0, errorVarDef);
}
int currentOnFailIndex = this.enclosingOnFailClause.indexOf(this.onFailClause);
int enclosingOnFailIndex = currentOnFailIndex <= 0 ? this.enclosingOnFailClause.size() - 1
: (currentOnFailIndex - 1);
this.onFailClause = this.enclosingOnFailClause.get(enclosingOnFailIndex);
onFailBody = rewrite(onFailBody, env);
BLangFail failToEndBlock = new BLangFail();
if (onFailClause.isInternal && fail.exprStmt != null) {
if (fail.exprStmt instanceof BLangPanic) {
setPanicErrorToTrue(onFailBody, onFailClause);
} else {
onFailBody.stmts.add((BLangStatement) fail.exprStmt);
}
}
if (onFailClause.bodyContainsFail && !onFailClause.isInternal) {
onFailBody.stmts.add(failToEndBlock);
}
this.onFailClause = currentOnFail;
blockStmtByFailNode.put(fail, onFailBody);
}
return onFailBody;
} | if (fail.exprStmt instanceof BLangPanic) { | private BLangBlockStmt rewriteNestedOnFail(BLangOnFailClause onFailClause, BLangFail fail) {
BLangOnFailClause currentOnFail = this.onFailClause;
BLangBlockStmt onFailBody = blockStmtByFailNode.get(fail);
if (onFailBody == null) {
onFailBody = ASTBuilderUtil.createBlockStmt(onFailClause.pos);
onFailBody.stmts.addAll(onFailClause.body.stmts);
onFailBody.scope = onFailClause.body.scope;
onFailBody.mapSymbol = onFailClause.body.mapSymbol;
onFailBody.failureBreakMode = onFailClause.body.failureBreakMode;
onFailBody.isFromFailNode = true;
VariableDefinitionNode onFailVarDefNode = onFailClause.variableDefinitionNode;
if (onFailVarDefNode != null) {
BVarSymbol onFailErrorVariableSymbol =
((BLangSimpleVariableDef) onFailVarDefNode).var.symbol;
BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(onFailErrorVariableSymbol.pos,
onFailErrorVariableSymbol.name.value, onFailErrorVariableSymbol.type, rewrite(fail.expr, env),
onFailErrorVariableSymbol);
BLangSimpleVariableDef errorVarDef = ASTBuilderUtil.createVariableDef(onFailClause.pos, errorVar);
onFailBody.scope.define(onFailErrorVariableSymbol.name, onFailErrorVariableSymbol);
onFailBody.stmts.add(0, errorVarDef);
}
int currentOnFailIndex = this.enclosingOnFailClause.indexOf(this.onFailClause);
int enclosingOnFailIndex = currentOnFailIndex <= 0 ? (this.enclosingOnFailClause.size() - 1)
: (currentOnFailIndex - 1);
this.onFailClause = this.enclosingOnFailClause.get(enclosingOnFailIndex);
onFailBody = rewrite(onFailBody, env);
BLangFail failToEndBlock = new BLangFail();
if (onFailClause.isInternal && fail.exprStmt != null) {
if (fail.exprStmt.getKind() == NodeKind.PANIC) {
setPanicErrorToTrue(onFailBody, onFailClause);
} else {
onFailBody.stmts.add((BLangStatement) fail.exprStmt);
}
}
if (onFailClause.bodyContainsFail && !onFailClause.isInternal) {
onFailBody.stmts.add(failToEndBlock);
}
this.onFailClause = currentOnFail;
blockStmtByFailNode.put(fail, onFailBody);
}
return onFailBody;
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return rewrite(generatedInitFunc, env);
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return rewrite(generatedInitFunc, env);
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} |
it may or may not be the CPU history logged in the diagnostics, but since it is close enough, so should be fine? | public String toJson() {
String snapshot = this.cachedRequestDiagnostics;
if (snapshot != null) {
return snapshot;
}
synchronized (this.spanName) {
snapshot = this.cachedRequestDiagnostics;
if (snapshot != null) {
return snapshot;
}
this.systemUsage = ClientSideRequestStatistics.fetchSystemInformation();
return this.cachedRequestDiagnostics = getRequestDiagnostics();
}
} | this.systemUsage = ClientSideRequestStatistics.fetchSystemInformation(); | public String toJson() {
String snapshot = this.cachedRequestDiagnostics;
if (snapshot != null) {
return snapshot;
}
synchronized (this.spanName) {
snapshot = this.cachedRequestDiagnostics;
if (snapshot != null) {
return snapshot;
}
this.systemUsage = ClientSideRequestStatistics.fetchSystemInformation();
return this.cachedRequestDiagnostics = getRequestDiagnostics();
}
} | class CosmosDiagnosticsContext {
private final static ImplementationBridgeHelpers.CosmosDiagnosticsHelper.CosmosDiagnosticsAccessor diagAccessor =
ImplementationBridgeHelpers.CosmosDiagnosticsHelper.getCosmosDiagnosticsAccessor();
private final static ObjectMapper mapper = Utils.getSimpleObjectMapper();
private final String spanName;
private final String accountName;
private final String endpoint;
private final String databaseName;
private final String collectionName;
private final ResourceType resourceType;
private final String resourceTypeString;
private final OperationType operationType;
private final String operationTypeString;
private final ConsistencyLevel consistencyLevel;
private final ConcurrentLinkedDeque<CosmosDiagnostics> diagnostics;
private final Integer maxItemCount;
private final CosmosDiagnosticsThresholds thresholds;
private final String operationId;
private final String trackingId;
private final String connectionMode;
private final String userAgent;
private Throwable finalError;
private Instant startTime = null;
private Duration duration = null;
private int statusCode = 0;
private int subStatusCode = 0;
private final AtomicInteger actualItemCount = new AtomicInteger(-1);
private float totalRequestCharge = 0;
private int maxRequestSize = 0;
private int maxResponseSize = 0;
private String cachedRequestDiagnostics = null;
private final AtomicBoolean isCompleted = new AtomicBoolean(false);
private CosmosDiagnosticsSystemUsageSnapshot systemUsage;
private Double samplingRateSnapshot;
private ArrayList<CosmosDiagnosticsRequestInfo> requestInfo = null;
CosmosDiagnosticsContext(
String spanName,
String accountName,
String endpoint,
String databaseName,
String collectionName,
ResourceType resourceType,
OperationType operationType,
String operationId,
ConsistencyLevel consistencyLevel,
Integer maxItemCount,
CosmosDiagnosticsThresholds thresholds,
String trackingId,
String connectionMode,
String userAgent) {
checkNotNull(spanName, "Argument 'spanName' must not be null.");
checkNotNull(accountName, "Argument 'accountName' must not be null.");
checkNotNull(endpoint, "Argument 'endpoint' must not be null.");
checkNotNull(resourceType, "Argument 'resourceType' must not be null.");
checkNotNull(operationType, "Argument 'operationType' must not be null.");
checkNotNull(consistencyLevel, "Argument 'consistencyLevel' must not be null.");
checkNotNull(thresholds, "Argument 'thresholds' must not be null.");
checkNotNull(connectionMode, "Argument 'connectionMode' must not be null.");
checkNotNull(userAgent, "Argument 'userAgent' must not be null.");
this.spanName = spanName;
this.accountName = accountName;
this.endpoint = endpoint;
this.databaseName = databaseName != null ? databaseName : "";
this.collectionName = collectionName != null ? collectionName : "";
this.resourceType = resourceType;
this.resourceTypeString = resourceType.toString();
this.operationType = operationType;
this.operationTypeString = operationType.toString();
this.operationId = operationId != null ? operationId : "";
this.diagnostics = new ConcurrentLinkedDeque<>();
this.consistencyLevel = consistencyLevel;
this.maxItemCount = maxItemCount;
this.thresholds = thresholds;
this.trackingId = trackingId;
this.userAgent = userAgent;
this.connectionMode = connectionMode;
}
/**
* The name of the account related to the operation
* @return the name of the account related to the operation
*/
public String getAccountName() {
return this.accountName;
}
String getEndpoint() { return this.endpoint; }
/**
* The name of the database related to the operation
* @return the name of the database related to the operation
*/
public String getDatabaseName() {
return this.databaseName;
}
/**
* The name of the container related to the operation
* @return the name of the collection related to the operation
*/
public String getContainerName() {
return this.collectionName;
}
/**
* The resource type of the operation
* @return the resource type of the operation
*/
public String getResourceType() {
return this.resourceTypeString;
}
ResourceType getResourceTypeInternal() {
return this.resourceType;
}
/**
* The operation type of the operation
* @return the operation type of the operation
*/
public String getOperationType() {
return this.operationTypeString;
}
/**
* The trackingId of a write operation. Will be null for read-/query- or feed operations or when non-idempotent
* writes are disabled for writes or only enabled without trackingId propagation.
* @return the trackingId of an operation
*/
public String getTrackingId() {
return this.trackingId;
}
/**
* A flag indicating whether the operation is a point operation or not.
* @return a flag indicating whether the operation is a point operation or not.
*/
public boolean isPointOperation() {
return this.operationType.isPointOperation();
}
OperationType getOperationTypeInternal() {
return this.operationType;
}
/**
* The operation identifier of the operation - this can be used to
* add a dimension for feed operations - like queries -
* so, metrics and diagnostics can be separated for different query types etc.
* @return the operation identifier of the operation
*/
public String getOperationId() {
return this.operationId;
}
/**
* The effective consistency level of the operation
* @return the effective consistency level of the operation
*/
public ConsistencyLevel getEffectiveConsistencyLevel() {
return this.consistencyLevel;
}
/**
* The max. number of items requested in a feed operation
* @return the max. number of items requested in a feed operation. Will be null for point operations.
*/
public Integer getMaxItemCount() {
return this.maxItemCount;
}
/**
* The actual number of items returned by a feed operation
* @return the actual number of items returned by a feed operation. Will be null for point operations.
*/
public Integer getActualItemCount() {
int snapshot = this.actualItemCount.get();
if (snapshot < 0) {
return null;
}
return snapshot;
}
/**
* The span name as a logical identifier for an operation
* @return the span name as a logical identifier for an operation
*/
String getSpanName() {
return this.spanName;
}
/**
* Indicates whether the latency, request charge or payload size of the operation exceeded the given threshold
* @return a flag indicating whether the latency, request charge or payload size of the operation
* exceeded its threshold.
*/
public boolean isThresholdViolated() {
if (!this.isCompleted()) {
return false;
}
if (this.thresholds.isFailureCondition(this.statusCode, this.subStatusCode)) {
return true;
}
if (this.operationType.isPointOperation()) {
if (this.thresholds.getPointOperationLatencyThreshold().compareTo(this.duration) < 0) {
return true;
}
} else {
if (this.thresholds.getNonPointOperationLatencyThreshold().compareTo(this.duration) < 0) {
return true;
}
}
if (this.thresholds.getRequestChargeThreshold() < this.totalRequestCharge) {
return true;
}
return this.thresholds.getPayloadSizeThreshold() < Math.max(this.maxRequestSize, this.maxResponseSize);
}
void addDiagnostics(CosmosDiagnostics cosmosDiagnostics) {
checkNotNull(cosmosDiagnostics, "Argument 'cosmosDiagnostics' must not be null.");
synchronized (this.spanName) {
if (this.samplingRateSnapshot != null) {
diagAccessor.setSamplingRateSnapshot(cosmosDiagnostics, this.samplingRateSnapshot);
}
this.addRequestSize(diagAccessor.getRequestPayloadSizeInBytes(cosmosDiagnostics));
this.addResponseSize(diagAccessor.getTotalResponsePayloadSizeInBytes(cosmosDiagnostics));
this.diagnostics.add(cosmosDiagnostics);
this.cachedRequestDiagnostics = null;
this.requestInfo = null;
cosmosDiagnostics.setDiagnosticsContext(this);
}
}
Collection<ClientSideRequestStatistics> getDistinctCombinedClientSideRequestStatistics() {
DistinctClientSideRequestStatisticsCollection combinedClientSideRequestStatistics =
new DistinctClientSideRequestStatisticsCollection();
for (CosmosDiagnostics diagnostics: this.getDiagnostics()) {
combinedClientSideRequestStatistics.addAll(
diagnostics.getClientSideRequestStatistics());
FeedResponseDiagnostics feedResponseDiagnostics =
diagnostics.getFeedResponseDiagnostics();
if (feedResponseDiagnostics != null) {
combinedClientSideRequestStatistics.addAll(
feedResponseDiagnostics.getClientSideRequestStatistics());
}
}
return combinedClientSideRequestStatistics;
}
/**
* The final status code of the operation (possibly after retries)
* @return the final status code of the operation (possibly after retries)
*/
public int getStatusCode() {
return this.statusCode;
}
/**
* The final sub-status code of the operation (possibly after retries)
* @return the final sub-status code of the operation (possibly after retries)
*/
public int getSubStatusCode() {
return this.subStatusCode;
}
/**
* The final error when the operation failed
* @return the final error when the operation failed
*/
public Throwable getFinalError() {
return this.finalError;
}
/**
* The max. request payload size in bytes
* @return the max. request payload size in bytes
*/
public int getMaxRequestPayloadSizeInBytes() {
return this.maxRequestSize;
}
/**
* The max. response payload size in bytes.
* @return the max. response payload size in bytes
*/
public int getMaxResponsePayloadSizeInBytes() {
return this.maxResponseSize;
}
/**
* The total request charge across all retries.
* @return the total request charge across all retries.
*/
public float getTotalRequestCharge() {
return this.totalRequestCharge;
}
/**
* Returns the set of contacted regions
* @return the set of contacted regions
*/
public Set<String> getContactedRegionNames() {
TreeSet<String> regionsContacted = new TreeSet<>();
if (this.diagnostics == null) {
return regionsContacted;
}
for (CosmosDiagnostics d: this.diagnostics) {
regionsContacted.addAll(d.getContactedRegionNames());
}
return regionsContacted;
}
/**
* Returns the system usage
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included. Calling this method will lazily collect the user agent - which can be useful when writing
* a custom {@link CosmosDiagnosticsHandler}
* @return the system usage
*/
public CosmosDiagnosticsSystemUsageSnapshot getSystemUsage() {
synchronized (this.spanName) {
CosmosDiagnosticsSystemUsageSnapshot snapshot = this.systemUsage;
if (snapshot != null) {
return snapshot;
}
return this.systemUsage = ClientSideRequestStatistics.fetchSystemInformation();
}
}
/**
* Returns the number of retries and/or attempts for speculative processing.
* @return the number of retries and/or attempts for speculative processing.
*/
public int getRetryCount() {
if (this.diagnostics == null) {
return 0;
}
int totalRetryCount = 0;
for (ClientSideRequestStatistics c: this.getDistinctCombinedClientSideRequestStatistics()) {
totalRetryCount += getRetryCount(c);
}
return Math.max(0, totalRetryCount);
}
private int getRetryCount(ClientSideRequestStatistics c) {
if (c == null || c.getRetryContext() == null) {
return 0;
}
return c.getRetryContext().getRetryCount();
}
void addRequestCharge(float requestCharge) {
synchronized (this.spanName) {
this.totalRequestCharge += requestCharge;
}
}
void addRequestSize(int bytes) {
synchronized (this.spanName) {
this.maxRequestSize = Math.max(this.maxRequestSize, bytes);
}
}
void addResponseSize(int bytes) {
synchronized (this.spanName) {
this.maxResponseSize = Math.max(this.maxResponseSize, bytes);
}
}
/**
* The diagnostic records for service interactions within the scope of this SDK operation
* @return the diagnostic records for service interactions within the scope of this SDK operation
*/
public Collection<CosmosDiagnostics> getDiagnostics() {
return this.diagnostics;
}
/**
* Returns a flag indicating whether the operation has been completed yet.
* @return a flag indicating whether the operation has been completed yet.
*/
public boolean isCompleted() {
return this.isCompleted.get();
}
/**
* The total end-to-end duration of the operation.
* @return the total end-to-end duration of the operation.
*/
public Duration getDuration() {
return this.duration;
}
/**
* A flag indicating whether the operation should be considered failed or not based on the status code handling
* rules in {@link CosmosDiagnosticsThresholds
* @return a flag indicating whether the operation should be considered failed or not
*/
public boolean isFailure() {
if (!this.isCompleted()) {
return false;
}
return this.thresholds.isFailureCondition(this.statusCode, this.subStatusCode);
}
void startOperation() {
checkState(
this.startTime == null,
"Method 'startOperation' must not be called multiple times.");
synchronized (this.spanName) {
this.startTime = Instant.now();
this.cachedRequestDiagnostics = null;
}
}
synchronized boolean endOperation(int statusCode, int subStatusCode, Integer actualItemCount, Throwable finalError) {
synchronized (this.spanName) {
boolean hasCompletedOperation = this.isCompleted.compareAndSet(false, true);
if (hasCompletedOperation) {
this.recordOperation(statusCode, subStatusCode, actualItemCount, finalError);
}
return hasCompletedOperation;
}
}
synchronized void recordOperation(int statusCode, int subStatusCode, Integer actualItemCount, Throwable finalError) {
synchronized (this.spanName) {
this.statusCode = statusCode;
this.subStatusCode = subStatusCode;
this.finalError = finalError;
if (actualItemCount != null) {
if (!this.actualItemCount.compareAndSet(-1, actualItemCount)) {
this.actualItemCount.addAndGet(actualItemCount);
}
}
this.duration = Duration.between(this.startTime, Instant.now());
this.cachedRequestDiagnostics = null;
}
}
synchronized void setSamplingRateSnapshot(double samplingRate) {
this.samplingRateSnapshot = samplingRate;
for (CosmosDiagnostics d : this.diagnostics) {
diagAccessor.setSamplingRateSnapshot(d, samplingRate);
}
}
String getRequestDiagnostics() {
ObjectNode ctxNode = mapper.createObjectNode();
ctxNode.put("spanName", this.spanName);
ctxNode.put("account", this.accountName);
ctxNode.put("db", this.databaseName);
if (!this.collectionName.isEmpty()) {
ctxNode.put("container", this.collectionName);
}
ctxNode.put("resource", this.resourceType.toString());
ctxNode.put("operation", this.operationType.toString());
if (!this.operationId.isEmpty()) {
ctxNode.put("operationId", this.operationId);
}
if (this.trackingId != null && !this.trackingId.isEmpty()) {
ctxNode.put("trackingId", this.trackingId);
}
ctxNode.put("consistency", this.consistencyLevel.toString());
ctxNode.put("status", this.statusCode);
if (this.subStatusCode != 0) {
ctxNode.put("subStatus", this.subStatusCode);
}
ctxNode.put("RUs", this.totalRequestCharge);
ctxNode.put("maxRequestSizeInBytes", this.maxRequestSize);
ctxNode.put("maxResponseSizeInBytes", this.maxResponseSize);
if (this.maxItemCount != null) {
ctxNode.put("maxItems", this.maxItemCount);
}
if (this.actualItemCount.get() >= 0) {
ctxNode.put("actualItems", this.actualItemCount.get());
}
if (this.finalError != null) {
ctxNode.put("exception", this.finalError.toString());
}
if (this.diagnostics != null && this.diagnostics.size() > 0) {
ArrayNode diagnosticsNode = ctxNode.putArray("diagnostics");
for (CosmosDiagnostics d: this.diagnostics) {
ObjectNode childNode = mapper.createObjectNode();
d.fillCosmosDiagnostics(childNode, null);
diagnosticsNode.add(childNode);
}
}
try {
return mapper.writeValueAsString(ctxNode);
} catch (JsonProcessingException e) {
ctxNode = mapper.createObjectNode();
ctxNode.put("exception", e.toString());
try {
return mapper.writeValueAsString(ctxNode);
} catch (JsonProcessingException ex) {
throw new RuntimeException(ex);
}
}
}
/**
* Returns a json-string representation of the diagnostics context. This string uses json format for readability,
* but it should be treated as an opaque string - the format can and will change between SDK versions - for any
* automatic processing of the diagnostics information the get-properties of public API should be used.
* @return a json-string representation of the diagnostics context. This string uses json format for readability,
* but it should be treated as an opaque string - the format can and will change between SDK versions -
* for any
* automatic processing of the diagnostics information the get-properties of public API should be used.
*/
/**
* Gets the UserAgent header value used by the client issueing this operation
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included. Calling this method will lazily collect the user agent - which can be useful when writing
* a custom {@link CosmosDiagnosticsHandler}
* @return the UserAgent header value used for the client that issued this operation
*/
public String getUserAgent() {
return this.userAgent;
}
/**
* Returns the connection mode used in the client.
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included. Calling this method will lazily collect the user agent - which can be useful when writing
* a custom {@link CosmosDiagnosticsHandler}
* @return the connection mode used in the client.
*/
public String getConnectionMode() {
return this.connectionMode;
}
private static void addRequestInfoForGatewayStatistics(
ClientSideRequestStatistics requestStats,
ArrayList<CosmosDiagnosticsRequestInfo> requestInfo,
ClientSideRequestStatistics.GatewayStatistics gatewayStats) {
if (gatewayStats == null) {
return;
}
CosmosDiagnosticsRequestInfo info = new CosmosDiagnosticsRequestInfo(
requestStats.getActivityId(),
null,
gatewayStats.getPartitionKeyRangeId(),
gatewayStats.getResourceType() + ":" + gatewayStats.getOperationType(),
requestStats.getRequestStartTimeUTC(),
requestStats.getDuration(),
null,
gatewayStats.getRequestCharge(),
gatewayStats.getResponsePayloadSizeInBytes(),
gatewayStats.getStatusCode(),
gatewayStats.getSubStatusCode(),
new ArrayList<>()
);
requestInfo.add(info);
}
private static void addRequestInfoForStoreResponses(
ClientSideRequestStatistics requestStats,
ArrayList<CosmosDiagnosticsRequestInfo> requestInfo,
List<ClientSideRequestStatistics.StoreResponseStatistics> storeResponses) {
for (ClientSideRequestStatistics.StoreResponseStatistics responseStats: storeResponses) {
StoreResultDiagnostics resultDiagnostics = responseStats.getStoreResult();
if (resultDiagnostics == null) {
continue;
}
StoreResponseDiagnostics responseDiagnostics = resultDiagnostics.getStoreResponseDiagnostics();
String partitionId = null;
String[] partitionAndReplicaId = resultDiagnostics.getPartitionAndReplicaId();
if (partitionAndReplicaId.length == 2) {
partitionId = partitionAndReplicaId[0];
}
Collection<CosmosDiagnosticsRequestEvent> events = new ArrayList<>();
String pkRangeId = "";
double requestCharge = 0;
int responsePayloadLength = 0;
int statusCode = 0;
int subStatusCode = 0;
String activityId = requestStats.getActivityId();
if (responseDiagnostics != null) {
activityId = responseDiagnostics.getActivityId();
requestCharge = responseDiagnostics.getRequestCharge();
responsePayloadLength = responseDiagnostics.getResponsePayloadLength();
statusCode = responseDiagnostics.getStatusCode();
subStatusCode = responseDiagnostics.getSubStatusCode();
if (responseDiagnostics.getPartitionKeyRangeId() != null) {
pkRangeId = responseDiagnostics.getPartitionKeyRangeId();
}
RequestTimeline timeline = responseDiagnostics.getRequestTimeline();
timeline.forEach( e -> {
if (e.getStartTime() != null && e.getDuration() != null && !e.getDuration().equals(Duration.ZERO)) {
events.add(new CosmosDiagnosticsRequestEvent(e.getStartTime(), e.getDuration(), e.getName()));
}
});
}
Duration backendLatency = null;
if (resultDiagnostics.getBackendLatencyInMs() != null) {
backendLatency = Duration.ofNanos((long)(resultDiagnostics.getBackendLatencyInMs() * 1000000d));
}
CosmosDiagnosticsRequestInfo info = new CosmosDiagnosticsRequestInfo(
activityId,
partitionId,
pkRangeId,
responseStats.getRequestResourceType() + ":" + responseStats.getRequestOperationType(),
requestStats.getRequestStartTimeUTC(),
responseStats.getDuration(),
backendLatency,
requestCharge,
responsePayloadLength,
statusCode,
subStatusCode,
events
);
requestInfo.add(info);
}
}
private void addRequestInfoForAddressResolution(
ClientSideRequestStatistics requestStats,
ArrayList<CosmosDiagnosticsRequestInfo> requestInfo,
Map<String, ClientSideRequestStatistics.AddressResolutionStatistics> addressResolutionStatisticsMap
) {
if (addressResolutionStatisticsMap == null || addressResolutionStatisticsMap.size() == 0) {
return;
}
for (ClientSideRequestStatistics.AddressResolutionStatistics addressResolutionStatistics
: addressResolutionStatisticsMap.values()) {
if (addressResolutionStatistics.isInflightRequest() ||
addressResolutionStatistics.getEndTimeUTC() == null) {
continue;
}
Duration latency = Duration.between(
addressResolutionStatistics.getStartTimeUTC(),
addressResolutionStatistics.getEndTimeUTC());
StringBuilder sb = new StringBuilder();
sb.append("AddressResolution|");
sb.append(addressResolutionStatistics.getTargetEndpoint());
sb.append("|");
if (addressResolutionStatistics.isForceRefresh()) {
sb.append("1|");
} else {
sb.append("0|");
}
if (addressResolutionStatistics.isForceCollectionRoutingMapRefresh()) {
sb.append("1");
} else {
sb.append("0");
}
CosmosDiagnosticsRequestInfo info = new CosmosDiagnosticsRequestInfo(
requestStats.getActivityId(),
null,
null,
sb.toString(),
addressResolutionStatistics.getStartTimeUTC(),
latency,
null,
0,
0,
0,
0,
new ArrayList<>()
);
requestInfo.add(info);
}
}
/**
* Gets a collection of {@link CosmosDiagnosticsRequestInfo} records providing more information about
* individual requests issued in the transport layer to process this operation.
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included. Calling this method will lazily collect the user agent - which can be useful when writing
* a custom {@link CosmosDiagnosticsHandler}
* @return a collection of {@link CosmosDiagnosticsRequestInfo} records providing more information about
* individual requests issued in the transport layer to process this operation.
*/
public Collection<CosmosDiagnosticsRequestInfo> getRequestInfo() {
synchronized (this.spanName) {
ArrayList<CosmosDiagnosticsRequestInfo> snapshot = this.requestInfo;
if (snapshot != null) {
return snapshot;
}
snapshot = new ArrayList<>();
for (ClientSideRequestStatistics requestStats: this.getDistinctCombinedClientSideRequestStatistics()) {
addRequestInfoForStoreResponses(
requestStats,
snapshot,
requestStats.getResponseStatisticsList());
addRequestInfoForStoreResponses(
requestStats,
snapshot,
requestStats.getSupplementalResponseStatisticsList());
addRequestInfoForGatewayStatistics(requestStats, snapshot, requestStats.getGatewayStatistics());
addRequestInfoForAddressResolution(
requestStats,
snapshot,
requestStats.getAddressResolutionStatistics());
}
this.requestInfo = snapshot;
return snapshot;
}
}
static void initialize() {
ImplementationBridgeHelpers
.CosmosDiagnosticsContextHelper
.setCosmosDiagnosticsContextAccessor(
new ImplementationBridgeHelpers
.CosmosDiagnosticsContextHelper
.CosmosDiagnosticsContextAccessor() {
@Override
public CosmosDiagnosticsContext create(String spanName, String account, String endpoint,
String databaseId,String containerId,
ResourceType resourceType, OperationType operationType,
String operationId,
ConsistencyLevel consistencyLevel, Integer maxItemCount,
CosmosDiagnosticsThresholds thresholds, String trackingId,
String connectionMode, String userAgent) {
return new CosmosDiagnosticsContext(
spanName,
account,
endpoint,
databaseId,
containerId,
resourceType,
operationType,
operationId,
consistencyLevel,
maxItemCount,
thresholds,
trackingId,
connectionMode,
userAgent);
}
@Override
public CosmosDiagnosticsSystemUsageSnapshot createSystemUsageSnapshot(String cpu, String used, String available, int cpuCount) {
return new CosmosDiagnosticsSystemUsageSnapshot(cpu, used, available, cpuCount);
}
@Override
public void startOperation(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.startOperation();
}
@Override
public void recordOperation(CosmosDiagnosticsContext ctx, int statusCode, int subStatusCode,
Integer actualItemCount, Double requestCharge,
CosmosDiagnostics diagnostics, Throwable finalError) {
validateAndRecordOperationResult(ctx, requestCharge, diagnostics);
ctx.recordOperation(statusCode, subStatusCode, actualItemCount, finalError);
}
private void validateAndRecordOperationResult(
CosmosDiagnosticsContext ctx,
Double requestCharge,
CosmosDiagnostics diagnostics) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
if (diagnostics != null) {
ctx.addDiagnostics(diagnostics);
}
if (requestCharge != null) {
ctx.addRequestCharge(requestCharge.floatValue());
}
}
@Override
public boolean endOperation(CosmosDiagnosticsContext ctx, int statusCode, int subStatusCode,
Integer actualItemCount, Double requestCharge,
CosmosDiagnostics diagnostics, Throwable finalError) {
validateAndRecordOperationResult(ctx, requestCharge, diagnostics);
return ctx.endOperation(statusCode, subStatusCode, actualItemCount, finalError);
}
@Override
public void addRequestCharge(CosmosDiagnosticsContext ctx, float requestCharge) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.addRequestCharge(requestCharge);
}
@Override
public void addRequestSize(CosmosDiagnosticsContext ctx, int bytes) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.addRequestSize(bytes);
}
@Override
public void addResponseSize(CosmosDiagnosticsContext ctx, int bytes) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.addResponseSize(bytes);
}
@Override
public void addDiagnostics(CosmosDiagnosticsContext ctx, CosmosDiagnostics diagnostics) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
checkNotNull(ctx, "Argument 'diagnostics' must not be null.");
ctx.addDiagnostics(diagnostics);
}
@Override
public Collection<CosmosDiagnostics> getDiagnostics(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getDiagnostics();
}
@Override
public ResourceType getResourceType(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getResourceTypeInternal();
}
@Override
public OperationType getOperationType(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getOperationTypeInternal();
}
@Override
public String getEndpoint(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getEndpoint();
}
@Override
public Collection<ClientSideRequestStatistics> getDistinctCombinedClientSideRequestStatistics(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getDistinctCombinedClientSideRequestStatistics();
}
@Override
public String getSpanName(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getSpanName();
}
@Override
public void setSamplingRateSnapshot(CosmosDiagnosticsContext ctx, double samplingRate) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.setSamplingRateSnapshot(samplingRate);
}
});
}
} | class CosmosDiagnosticsContext {
private final static ImplementationBridgeHelpers.CosmosDiagnosticsHelper.CosmosDiagnosticsAccessor diagAccessor =
ImplementationBridgeHelpers.CosmosDiagnosticsHelper.getCosmosDiagnosticsAccessor();
private final static ObjectMapper mapper = Utils.getSimpleObjectMapper();
private final String spanName;
private final String accountName;
private final String endpoint;
private final String databaseName;
private final String collectionName;
private final ResourceType resourceType;
private final String resourceTypeString;
private final OperationType operationType;
private final String operationTypeString;
private final ConsistencyLevel consistencyLevel;
private final ConcurrentLinkedDeque<CosmosDiagnostics> diagnostics;
private final Integer maxItemCount;
private final CosmosDiagnosticsThresholds thresholds;
private final String operationId;
private final String trackingId;
private final String connectionMode;
private final String userAgent;
private Throwable finalError;
private Instant startTime = null;
private Duration duration = null;
private int statusCode = 0;
private int subStatusCode = 0;
private final AtomicInteger actualItemCount = new AtomicInteger(-1);
private float totalRequestCharge = 0;
private int maxRequestSize = 0;
private int maxResponseSize = 0;
private String cachedRequestDiagnostics = null;
private final AtomicBoolean isCompleted = new AtomicBoolean(false);
private CosmosDiagnosticsSystemUsageSnapshot systemUsage;
private Double samplingRateSnapshot;
private ArrayList<CosmosDiagnosticsRequestInfo> requestInfo = null;
CosmosDiagnosticsContext(
String spanName,
String accountName,
String endpoint,
String databaseName,
String collectionName,
ResourceType resourceType,
OperationType operationType,
String operationId,
ConsistencyLevel consistencyLevel,
Integer maxItemCount,
CosmosDiagnosticsThresholds thresholds,
String trackingId,
String connectionMode,
String userAgent) {
checkNotNull(spanName, "Argument 'spanName' must not be null.");
checkNotNull(accountName, "Argument 'accountName' must not be null.");
checkNotNull(endpoint, "Argument 'endpoint' must not be null.");
checkNotNull(resourceType, "Argument 'resourceType' must not be null.");
checkNotNull(operationType, "Argument 'operationType' must not be null.");
checkNotNull(consistencyLevel, "Argument 'consistencyLevel' must not be null.");
checkNotNull(thresholds, "Argument 'thresholds' must not be null.");
checkNotNull(connectionMode, "Argument 'connectionMode' must not be null.");
checkNotNull(userAgent, "Argument 'userAgent' must not be null.");
this.spanName = spanName;
this.accountName = accountName;
this.endpoint = endpoint;
this.databaseName = databaseName != null ? databaseName : "";
this.collectionName = collectionName != null ? collectionName : "";
this.resourceType = resourceType;
this.resourceTypeString = resourceType.toString();
this.operationType = operationType;
this.operationTypeString = operationType.toString();
this.operationId = operationId != null ? operationId : "";
this.diagnostics = new ConcurrentLinkedDeque<>();
this.consistencyLevel = consistencyLevel;
this.maxItemCount = maxItemCount;
this.thresholds = thresholds;
this.trackingId = trackingId;
this.userAgent = userAgent;
this.connectionMode = connectionMode;
}
/**
* The name of the account related to the operation
* @return the name of the account related to the operation
*/
public String getAccountName() {
return this.accountName;
}
String getEndpoint() { return this.endpoint; }
/**
* The name of the database related to the operation
* @return the name of the database related to the operation
*/
public String getDatabaseName() {
return this.databaseName;
}
/**
* The name of the container related to the operation
* @return the name of the collection related to the operation
*/
public String getContainerName() {
return this.collectionName;
}
/**
* The resource type of the operation
* @return the resource type of the operation
*/
public String getResourceType() {
return this.resourceTypeString;
}
ResourceType getResourceTypeInternal() {
return this.resourceType;
}
/**
* The operation type of the operation
* @return the operation type of the operation
*/
public String getOperationType() {
return this.operationTypeString;
}
/**
* The trackingId of a write operation. Will be null for read-/query- or feed operations or when non-idempotent
* writes are disabled for writes or only enabled without trackingId propagation.
* @return the trackingId of an operation
*/
public String getTrackingId() {
return this.trackingId;
}
/**
* A flag indicating whether the operation is a point operation or not.
* @return a flag indicating whether the operation is a point operation or not.
*/
public boolean isPointOperation() {
return this.operationType.isPointOperation();
}
OperationType getOperationTypeInternal() {
return this.operationType;
}
/**
* The operation identifier of the operation - this can be used to
* add a dimension for feed operations - like queries -
* so, metrics and diagnostics can be separated for different query types etc.
* @return the operation identifier of the operation
*/
public String getOperationId() {
return this.operationId;
}
/**
* The effective consistency level of the operation
* @return the effective consistency level of the operation
*/
public ConsistencyLevel getEffectiveConsistencyLevel() {
return this.consistencyLevel;
}
/**
* The max. number of items requested in a feed operation
* @return the max. number of items requested in a feed operation. Will be null for point operations.
*/
public Integer getMaxItemCount() {
return this.maxItemCount;
}
/**
* The actual number of items returned by a feed operation
* @return the actual number of items returned by a feed operation. Will be null for point operations.
*/
public Integer getActualItemCount() {
int snapshot = this.actualItemCount.get();
if (snapshot < 0) {
return null;
}
return snapshot;
}
/**
* The span name as a logical identifier for an operation
* @return the span name as a logical identifier for an operation
*/
String getSpanName() {
return this.spanName;
}
/**
* Indicates whether the latency, request charge or payload size of the operation exceeded the given threshold
* @return a flag indicating whether the latency, request charge or payload size of the operation
* exceeded its threshold.
*/
public boolean isThresholdViolated() {
if (!this.isCompleted()) {
return false;
}
if (this.thresholds.isFailureCondition(this.statusCode, this.subStatusCode)) {
return true;
}
if (this.operationType.isPointOperation()) {
if (this.thresholds.getPointOperationLatencyThreshold().compareTo(this.duration) < 0) {
return true;
}
} else {
if (this.thresholds.getNonPointOperationLatencyThreshold().compareTo(this.duration) < 0) {
return true;
}
}
if (this.thresholds.getRequestChargeThreshold() < this.totalRequestCharge) {
return true;
}
return this.thresholds.getPayloadSizeThreshold() < Math.max(this.maxRequestSize, this.maxResponseSize);
}
void addDiagnostics(CosmosDiagnostics cosmosDiagnostics) {
checkNotNull(cosmosDiagnostics, "Argument 'cosmosDiagnostics' must not be null.");
synchronized (this.spanName) {
if (this.samplingRateSnapshot != null) {
diagAccessor.setSamplingRateSnapshot(cosmosDiagnostics, this.samplingRateSnapshot);
}
this.addRequestSize(diagAccessor.getRequestPayloadSizeInBytes(cosmosDiagnostics));
this.addResponseSize(diagAccessor.getTotalResponsePayloadSizeInBytes(cosmosDiagnostics));
this.diagnostics.add(cosmosDiagnostics);
this.cachedRequestDiagnostics = null;
this.requestInfo = null;
cosmosDiagnostics.setDiagnosticsContext(this);
}
}
Collection<ClientSideRequestStatistics> getDistinctCombinedClientSideRequestStatistics() {
DistinctClientSideRequestStatisticsCollection combinedClientSideRequestStatistics =
new DistinctClientSideRequestStatisticsCollection();
for (CosmosDiagnostics diagnostics: this.getDiagnostics()) {
combinedClientSideRequestStatistics.addAll(
diagnostics.getClientSideRequestStatistics());
FeedResponseDiagnostics feedResponseDiagnostics =
diagnostics.getFeedResponseDiagnostics();
if (feedResponseDiagnostics != null) {
combinedClientSideRequestStatistics.addAll(
feedResponseDiagnostics.getClientSideRequestStatistics());
}
}
return combinedClientSideRequestStatistics;
}
/**
* The final status code of the operation (possibly after retries)
* @return the final status code of the operation (possibly after retries)
*/
public int getStatusCode() {
return this.statusCode;
}
/**
* The final sub-status code of the operation (possibly after retries)
* @return the final sub-status code of the operation (possibly after retries)
*/
public int getSubStatusCode() {
return this.subStatusCode;
}
/**
* The final error when the operation failed
* @return the final error when the operation failed
*/
public Throwable getFinalError() {
return this.finalError;
}
/**
* The max. request payload size in bytes
* @return the max. request payload size in bytes
*/
public int getMaxRequestPayloadSizeInBytes() {
return this.maxRequestSize;
}
/**
* The max. response payload size in bytes.
* @return the max. response payload size in bytes
*/
public int getMaxResponsePayloadSizeInBytes() {
return this.maxResponseSize;
}
/**
* The total request charge across all retries.
* @return the total request charge across all retries.
*/
public float getTotalRequestCharge() {
return this.totalRequestCharge;
}
/**
* Returns the set of contacted regions
* @return the set of contacted regions
*/
public Set<String> getContactedRegionNames() {
TreeSet<String> regionsContacted = new TreeSet<>();
if (this.diagnostics == null) {
return regionsContacted;
}
for (CosmosDiagnostics d: this.diagnostics) {
regionsContacted.addAll(d.getContactedRegionNames());
}
return regionsContacted;
}
/**
* Returns the system usage
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included. Calling this method will lazily collect the system usage - which can be useful when writing
* a custom {@link CosmosDiagnosticsHandler}
* @return the system usage
*/
public CosmosDiagnosticsSystemUsageSnapshot getSystemUsage() {
synchronized (this.spanName) {
CosmosDiagnosticsSystemUsageSnapshot snapshot = this.systemUsage;
if (snapshot != null) {
return snapshot;
}
return this.systemUsage = ClientSideRequestStatistics.fetchSystemInformation();
}
}
/**
* Returns the number of retries and/or attempts for speculative processing.
* @return the number of retries and/or attempts for speculative processing.
*/
public int getRetryCount() {
if (this.diagnostics == null) {
return 0;
}
int totalRetryCount = 0;
for (ClientSideRequestStatistics c: this.getDistinctCombinedClientSideRequestStatistics()) {
totalRetryCount += getRetryCount(c);
}
return Math.max(0, totalRetryCount);
}
private int getRetryCount(ClientSideRequestStatistics c) {
if (c == null || c.getRetryContext() == null) {
return 0;
}
return c.getRetryContext().getRetryCount();
}
void addRequestCharge(float requestCharge) {
synchronized (this.spanName) {
this.totalRequestCharge += requestCharge;
}
}
void addRequestSize(int bytes) {
synchronized (this.spanName) {
this.maxRequestSize = Math.max(this.maxRequestSize, bytes);
}
}
void addResponseSize(int bytes) {
synchronized (this.spanName) {
this.maxResponseSize = Math.max(this.maxResponseSize, bytes);
}
}
/**
* The diagnostic records for service interactions within the scope of this SDK operation
* @return the diagnostic records for service interactions within the scope of this SDK operation
*/
public Collection<CosmosDiagnostics> getDiagnostics() {
return this.diagnostics;
}
/**
* Returns a flag indicating whether the operation has been completed yet.
* @return a flag indicating whether the operation has been completed yet.
*/
public boolean isCompleted() {
return this.isCompleted.get();
}
/**
* The total end-to-end duration of the operation.
* @return the total end-to-end duration of the operation.
*/
public Duration getDuration() {
return this.duration;
}
/**
* A flag indicating whether the operation should be considered failed or not based on the status code handling
* rules in {@link CosmosDiagnosticsThresholds
* @return a flag indicating whether the operation should be considered failed or not
*/
public boolean isFailure() {
if (!this.isCompleted()) {
return false;
}
return this.thresholds.isFailureCondition(this.statusCode, this.subStatusCode);
}
void startOperation() {
checkState(
this.startTime == null,
"Method 'startOperation' must not be called multiple times.");
synchronized (this.spanName) {
this.startTime = Instant.now();
this.cachedRequestDiagnostics = null;
}
}
synchronized boolean endOperation(int statusCode, int subStatusCode, Integer actualItemCount, Throwable finalError) {
synchronized (this.spanName) {
boolean hasCompletedOperation = this.isCompleted.compareAndSet(false, true);
if (hasCompletedOperation) {
this.recordOperation(statusCode, subStatusCode, actualItemCount, finalError);
}
return hasCompletedOperation;
}
}
synchronized void recordOperation(int statusCode, int subStatusCode, Integer actualItemCount, Throwable finalError) {
synchronized (this.spanName) {
this.statusCode = statusCode;
this.subStatusCode = subStatusCode;
this.finalError = finalError;
if (actualItemCount != null) {
if (!this.actualItemCount.compareAndSet(-1, actualItemCount)) {
this.actualItemCount.addAndGet(actualItemCount);
}
}
this.duration = Duration.between(this.startTime, Instant.now());
this.cachedRequestDiagnostics = null;
}
}
synchronized void setSamplingRateSnapshot(double samplingRate) {
this.samplingRateSnapshot = samplingRate;
for (CosmosDiagnostics d : this.diagnostics) {
diagAccessor.setSamplingRateSnapshot(d, samplingRate);
}
}
String getRequestDiagnostics() {
ObjectNode ctxNode = mapper.createObjectNode();
ctxNode.put("spanName", this.spanName);
ctxNode.put("account", this.accountName);
ctxNode.put("db", this.databaseName);
if (!this.collectionName.isEmpty()) {
ctxNode.put("container", this.collectionName);
}
ctxNode.put("resource", this.resourceType.toString());
ctxNode.put("operation", this.operationType.toString());
if (!this.operationId.isEmpty()) {
ctxNode.put("operationId", this.operationId);
}
if (this.trackingId != null && !this.trackingId.isEmpty()) {
ctxNode.put("trackingId", this.trackingId);
}
ctxNode.put("consistency", this.consistencyLevel.toString());
ctxNode.put("status", this.statusCode);
if (this.subStatusCode != 0) {
ctxNode.put("subStatus", this.subStatusCode);
}
ctxNode.put("RUs", this.totalRequestCharge);
ctxNode.put("maxRequestSizeInBytes", this.maxRequestSize);
ctxNode.put("maxResponseSizeInBytes", this.maxResponseSize);
if (this.maxItemCount != null) {
ctxNode.put("maxItems", this.maxItemCount);
}
if (this.actualItemCount.get() >= 0) {
ctxNode.put("actualItems", this.actualItemCount.get());
}
if (this.finalError != null) {
ctxNode.put("exception", this.finalError.toString());
}
if (this.diagnostics != null && this.diagnostics.size() > 0) {
ArrayNode diagnosticsNode = ctxNode.putArray("diagnostics");
for (CosmosDiagnostics d: this.diagnostics) {
ObjectNode childNode = mapper.createObjectNode();
d.fillCosmosDiagnostics(childNode, null);
diagnosticsNode.add(childNode);
}
}
try {
return mapper.writeValueAsString(ctxNode);
} catch (JsonProcessingException e) {
ctxNode = mapper.createObjectNode();
ctxNode.put("exception", e.toString());
try {
return mapper.writeValueAsString(ctxNode);
} catch (JsonProcessingException ex) {
throw new RuntimeException(ex);
}
}
}
/**
* Returns a json-string representation of the diagnostics context. This string uses json format for readability,
* but it should be treated as an opaque string - the format can and will change between SDK versions - for any
* automatic processing of the diagnostics information the get-properties of public API should be used.
* @return a json-string representation of the diagnostics context. This string uses json format for readability,
* but it should be treated as an opaque string - the format can and will change between SDK versions -
* for any
* automatic processing of the diagnostics information the get-properties of public API should be used.
*/
/**
* Gets the UserAgent header value used by the client issuing this operation
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included.
* @return the UserAgent header value used for the client that issued this operation
*/
public String getUserAgent() {
return this.userAgent;
}
/**
* Returns the connection mode used in the client.
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included.
* @return the connection mode used in the client.
*/
public String getConnectionMode() {
return this.connectionMode;
}
private static void addRequestInfoForGatewayStatistics(
ClientSideRequestStatistics requestStats,
List<CosmosDiagnosticsRequestInfo> requestInfo) {
ClientSideRequestStatistics.GatewayStatistics gatewayStats = requestStats.getGatewayStatistics();
if (gatewayStats == null) {
return;
}
CosmosDiagnosticsRequestInfo info = new CosmosDiagnosticsRequestInfo(
requestStats.getActivityId(),
null,
gatewayStats.getPartitionKeyRangeId(),
gatewayStats.getResourceType() + ":" + gatewayStats.getOperationType(),
requestStats.getRequestStartTimeUTC(),
requestStats.getDuration(),
null,
gatewayStats.getRequestCharge(),
gatewayStats.getResponsePayloadSizeInBytes(),
gatewayStats.getStatusCode(),
gatewayStats.getSubStatusCode(),
new ArrayList<>()
);
requestInfo.add(info);
}
private static void addRequestInfoForStoreResponses(
ClientSideRequestStatistics requestStats,
List<CosmosDiagnosticsRequestInfo> requestInfo,
List<ClientSideRequestStatistics.StoreResponseStatistics> storeResponses) {
for (ClientSideRequestStatistics.StoreResponseStatistics responseStats: storeResponses) {
StoreResultDiagnostics resultDiagnostics = responseStats.getStoreResult();
if (resultDiagnostics == null) {
continue;
}
StoreResponseDiagnostics responseDiagnostics = resultDiagnostics.getStoreResponseDiagnostics();
String partitionId = null;
String[] partitionAndReplicaId = resultDiagnostics.getPartitionAndReplicaId();
if (partitionAndReplicaId.length == 2) {
partitionId = partitionAndReplicaId[0];
}
List<CosmosDiagnosticsRequestEvent> events = new ArrayList<>();
String pkRangeId = "";
double requestCharge = 0;
int responsePayloadLength = 0;
int statusCode = 0;
int subStatusCode = 0;
String activityId = requestStats.getActivityId();
if (responseDiagnostics != null) {
activityId = responseDiagnostics.getActivityId();
requestCharge = responseDiagnostics.getRequestCharge();
responsePayloadLength = responseDiagnostics.getResponsePayloadLength();
statusCode = responseDiagnostics.getStatusCode();
subStatusCode = responseDiagnostics.getSubStatusCode();
if (responseDiagnostics.getPartitionKeyRangeId() != null) {
pkRangeId = responseDiagnostics.getPartitionKeyRangeId();
}
RequestTimeline timeline = responseDiagnostics.getRequestTimeline();
timeline.forEach( e -> {
if (e.getStartTime() != null && e.getDuration() != null && !e.getDuration().equals(Duration.ZERO)) {
events.add(new CosmosDiagnosticsRequestEvent(e.getStartTime(), e.getDuration(), e.getName()));
}
});
}
Duration backendLatency = null;
if (resultDiagnostics.getBackendLatencyInMs() != null) {
backendLatency = Duration.ofNanos((long)(resultDiagnostics.getBackendLatencyInMs() * 1000000d));
}
CosmosDiagnosticsRequestInfo info = new CosmosDiagnosticsRequestInfo(
activityId,
partitionId,
pkRangeId,
responseStats.getRequestResourceType() + ":" + responseStats.getRequestOperationType(),
requestStats.getRequestStartTimeUTC(),
responseStats.getDuration(),
backendLatency,
requestCharge,
responsePayloadLength,
statusCode,
subStatusCode,
events
);
requestInfo.add(info);
}
}
private void addRequestInfoForAddressResolution(
ClientSideRequestStatistics requestStats,
List<CosmosDiagnosticsRequestInfo> requestInfo,
Map<String, ClientSideRequestStatistics.AddressResolutionStatistics> addressResolutionStatisticsMap
) {
if (addressResolutionStatisticsMap == null || addressResolutionStatisticsMap.isEmpty()) {
return;
}
for (Map.Entry<String, ClientSideRequestStatistics.AddressResolutionStatistics> current
: addressResolutionStatisticsMap.entrySet()) {
ClientSideRequestStatistics.AddressResolutionStatistics addressResolutionStatistics = current.getValue();
String addressResolutionActivityId = current.getKey();
if (addressResolutionStatistics.isInflightRequest() ||
addressResolutionStatistics.getEndTimeUTC() == null) {
continue;
}
Duration latency = Duration.between(
addressResolutionStatistics.getStartTimeUTC(),
addressResolutionStatistics.getEndTimeUTC());
StringBuilder sb = new StringBuilder();
sb.append("AddressResolution|");
sb.append(addressResolutionStatistics.getTargetEndpoint());
sb.append("|");
if (addressResolutionStatistics.isForceRefresh()) {
sb.append("1|");
} else {
sb.append("0|");
}
if (addressResolutionStatistics.isForceCollectionRoutingMapRefresh()) {
sb.append("1");
} else {
sb.append("0");
}
CosmosDiagnosticsRequestInfo info = new CosmosDiagnosticsRequestInfo(
addressResolutionActivityId,
null,
null,
sb.toString(),
addressResolutionStatistics.getStartTimeUTC(),
latency,
null,
0,
0,
0,
0,
new ArrayList<>()
);
requestInfo.add(info);
}
}
/**
* Gets a collection of {@link CosmosDiagnosticsRequestInfo} records providing more information about
* individual requests issued in the transport layer to process this operation.
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included. Calling this method will lazily collect the user agent - which can be useful when writing
* a custom {@link CosmosDiagnosticsHandler}
* @return a collection of {@link CosmosDiagnosticsRequestInfo} records providing more information about
* individual requests issued in the transport layer to process this operation.
*/
public Collection<CosmosDiagnosticsRequestInfo> getRequestInfo() {
synchronized (this.spanName) {
ArrayList<CosmosDiagnosticsRequestInfo> snapshot = this.requestInfo;
if (snapshot != null) {
return snapshot;
}
snapshot = new ArrayList<>();
for (ClientSideRequestStatistics requestStats: this.getDistinctCombinedClientSideRequestStatistics()) {
addRequestInfoForStoreResponses(
requestStats,
snapshot,
requestStats.getResponseStatisticsList());
addRequestInfoForStoreResponses(
requestStats,
snapshot,
requestStats.getSupplementalResponseStatisticsList());
addRequestInfoForGatewayStatistics(requestStats, snapshot);
addRequestInfoForAddressResolution(
requestStats,
snapshot,
requestStats.getAddressResolutionStatistics());
}
this.requestInfo = snapshot;
return snapshot;
}
}
static void initialize() {
ImplementationBridgeHelpers
.CosmosDiagnosticsContextHelper
.setCosmosDiagnosticsContextAccessor(
new ImplementationBridgeHelpers
.CosmosDiagnosticsContextHelper
.CosmosDiagnosticsContextAccessor() {
@Override
public CosmosDiagnosticsContext create(String spanName, String account, String endpoint,
String databaseId,String containerId,
ResourceType resourceType, OperationType operationType,
String operationId,
ConsistencyLevel consistencyLevel, Integer maxItemCount,
CosmosDiagnosticsThresholds thresholds, String trackingId,
String connectionMode, String userAgent) {
return new CosmosDiagnosticsContext(
spanName,
account,
endpoint,
databaseId,
containerId,
resourceType,
operationType,
operationId,
consistencyLevel,
maxItemCount,
thresholds,
trackingId,
connectionMode,
userAgent);
}
@Override
public CosmosDiagnosticsSystemUsageSnapshot createSystemUsageSnapshot(String cpu, String used, String available, int cpuCount) {
return new CosmosDiagnosticsSystemUsageSnapshot(cpu, used, available, cpuCount);
}
@Override
public void startOperation(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.startOperation();
}
@Override
public void recordOperation(CosmosDiagnosticsContext ctx, int statusCode, int subStatusCode,
Integer actualItemCount, Double requestCharge,
CosmosDiagnostics diagnostics, Throwable finalError) {
validateAndRecordOperationResult(ctx, requestCharge, diagnostics);
ctx.recordOperation(statusCode, subStatusCode, actualItemCount, finalError);
}
private void validateAndRecordOperationResult(
CosmosDiagnosticsContext ctx,
Double requestCharge,
CosmosDiagnostics diagnostics) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
if (diagnostics != null) {
ctx.addDiagnostics(diagnostics);
}
if (requestCharge != null) {
ctx.addRequestCharge(requestCharge.floatValue());
}
}
@Override
public boolean endOperation(CosmosDiagnosticsContext ctx, int statusCode, int subStatusCode,
Integer actualItemCount, Double requestCharge,
CosmosDiagnostics diagnostics, Throwable finalError) {
validateAndRecordOperationResult(ctx, requestCharge, diagnostics);
return ctx.endOperation(statusCode, subStatusCode, actualItemCount, finalError);
}
@Override
public void addRequestCharge(CosmosDiagnosticsContext ctx, float requestCharge) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.addRequestCharge(requestCharge);
}
@Override
public void addRequestSize(CosmosDiagnosticsContext ctx, int bytes) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.addRequestSize(bytes);
}
@Override
public void addResponseSize(CosmosDiagnosticsContext ctx, int bytes) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.addResponseSize(bytes);
}
@Override
public void addDiagnostics(CosmosDiagnosticsContext ctx, CosmosDiagnostics diagnostics) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
checkNotNull(ctx, "Argument 'diagnostics' must not be null.");
ctx.addDiagnostics(diagnostics);
}
@Override
public Collection<CosmosDiagnostics> getDiagnostics(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getDiagnostics();
}
@Override
public ResourceType getResourceType(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getResourceTypeInternal();
}
@Override
public OperationType getOperationType(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getOperationTypeInternal();
}
@Override
public String getEndpoint(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getEndpoint();
}
@Override
public Collection<ClientSideRequestStatistics> getDistinctCombinedClientSideRequestStatistics(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getDistinctCombinedClientSideRequestStatistics();
}
@Override
public String getSpanName(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getSpanName();
}
@Override
public void setSamplingRateSnapshot(CosmosDiagnosticsContext ctx, double samplingRate) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.setSamplingRateSnapshot(samplingRate);
}
});
}
} |
It's already exposed ~~for the other classes~~ but yes, it's better not to use it. I'll change it to a private logger. | private void cleanup() throws Exception {
StreamTask.LOG.debug(
"Cleanup AsyncCheckpointRunnable for checkpoint {} of {}.",
checkpointMetaData.getCheckpointId(),
taskName);
Exception exception = null;
for (OperatorSnapshotFutures operatorSnapshotResult : operatorSnapshotsInProgress.values()) {
if (operatorSnapshotResult != null) {
try {
operatorSnapshotResult.cancel();
} catch (Exception cancelException) {
exception = ExceptionUtils.firstOrSuppressed(cancelException, exception);
}
}
}
if (null != exception) {
throw exception;
}
} | StreamTask.LOG.debug( | private void cleanup() throws Exception {
LOG.debug(
"Cleanup AsyncCheckpointRunnable for checkpoint {} of {}.",
checkpointMetaData.getCheckpointId(),
taskName);
Exception exception = null;
for (OperatorSnapshotFutures operatorSnapshotResult : operatorSnapshotsInProgress.values()) {
if (operatorSnapshotResult != null) {
try {
operatorSnapshotResult.cancel();
} catch (Exception cancelException) {
exception = ExceptionUtils.firstOrSuppressed(cancelException, exception);
}
}
}
if (null != exception) {
throw exception;
}
} | class AsyncCheckpointRunnable implements Runnable, Closeable {
private final String taskName;
private final CloseableRegistry closeableRegistry;
private final Environment taskEnvironment;
private enum AsyncCheckpointState {
RUNNING,
DISCARDED,
COMPLETED
}
private final AsyncExceptionHandler asyncExceptionHandler;
private final Map<OperatorID, OperatorSnapshotFutures> operatorSnapshotsInProgress;
private final CheckpointMetaData checkpointMetaData;
private final CheckpointMetrics checkpointMetrics;
private final long asyncStartNanos;
private final AtomicReference<AsyncCheckpointState> asyncCheckpointState = new AtomicReference<>(AsyncCheckpointState.RUNNING);
AsyncCheckpointRunnable(
Map<OperatorID, OperatorSnapshotFutures> operatorSnapshotsInProgress,
CheckpointMetaData checkpointMetaData,
CheckpointMetrics checkpointMetrics,
long asyncStartNanos,
String taskName,
CloseableRegistry closeableRegistry,
Environment taskEnvironment,
AsyncExceptionHandler asyncExceptionHandler) {
this.operatorSnapshotsInProgress = checkNotNull(operatorSnapshotsInProgress);
this.checkpointMetaData = checkNotNull(checkpointMetaData);
this.checkpointMetrics = checkNotNull(checkpointMetrics);
this.asyncStartNanos = asyncStartNanos;
this.taskName = checkNotNull(taskName);
this.closeableRegistry = checkNotNull(closeableRegistry);
this.taskEnvironment = checkNotNull(taskEnvironment);
this.asyncExceptionHandler = checkNotNull(asyncExceptionHandler);
}
@Override
public void run() {
FileSystemSafetyNet.initializeSafetyNetForThread();
try {
closeableRegistry.registerCloseable(this);
TaskStateSnapshot jobManagerTaskOperatorSubtaskStates = new TaskStateSnapshot(operatorSnapshotsInProgress.size());
TaskStateSnapshot localTaskOperatorSubtaskStates = new TaskStateSnapshot(operatorSnapshotsInProgress.size());
for (Map.Entry<OperatorID, OperatorSnapshotFutures> entry : operatorSnapshotsInProgress.entrySet()) {
OperatorID operatorID = entry.getKey();
OperatorSnapshotFutures snapshotInProgress = entry.getValue();
OperatorSnapshotFinalizer finalizedSnapshots =
new OperatorSnapshotFinalizer(snapshotInProgress);
jobManagerTaskOperatorSubtaskStates.putSubtaskStateByOperatorID(
operatorID,
finalizedSnapshots.getJobManagerOwnedState());
localTaskOperatorSubtaskStates.putSubtaskStateByOperatorID(
operatorID,
finalizedSnapshots.getTaskLocalState());
}
final long asyncEndNanos = System.nanoTime();
final long asyncDurationMillis = (asyncEndNanos - asyncStartNanos) / 1_000_000L;
checkpointMetrics.setAsyncDurationMillis(asyncDurationMillis);
if (asyncCheckpointState.compareAndSet(AsyncCheckpointState.RUNNING, AsyncCheckpointState.COMPLETED)) {
reportCompletedSnapshotStates(
jobManagerTaskOperatorSubtaskStates,
localTaskOperatorSubtaskStates,
asyncDurationMillis);
} else {
StreamTask.LOG.debug("{} - asynchronous part of checkpoint {} could not be completed because it was closed before.",
taskName,
checkpointMetaData.getCheckpointId());
}
} catch (Exception e) {
if (StreamTask.LOG.isDebugEnabled()) {
StreamTask.LOG.debug("{} - asynchronous part of checkpoint {} could not be completed.",
taskName,
checkpointMetaData.getCheckpointId(),
e);
}
handleExecutionException(e);
} finally {
closeableRegistry.unregisterCloseable(this);
FileSystemSafetyNet.closeSafetyNetAndGuardedResourcesForThread();
}
}
private void reportCompletedSnapshotStates(
TaskStateSnapshot acknowledgedTaskStateSnapshot,
TaskStateSnapshot localTaskStateSnapshot,
long asyncDurationMillis) {
boolean hasAckState = acknowledgedTaskStateSnapshot.hasState();
boolean hasLocalState = localTaskStateSnapshot.hasState();
Preconditions.checkState(hasAckState || !hasLocalState,
"Found cached state but no corresponding primary state is reported to the job " +
"manager. This indicates a problem.");
taskEnvironment.getTaskStateManager().reportTaskStateSnapshots(
checkpointMetaData,
checkpointMetrics,
hasAckState ? acknowledgedTaskStateSnapshot : null,
hasLocalState ? localTaskStateSnapshot : null);
StreamTask.LOG.debug("{} - finished asynchronous part of checkpoint {}. Asynchronous duration: {} ms",
taskName, checkpointMetaData.getCheckpointId(), asyncDurationMillis);
StreamTask.LOG.trace("{} - reported the following states in snapshot for checkpoint {}: {}.",
taskName, checkpointMetaData.getCheckpointId(), acknowledgedTaskStateSnapshot);
}
private void handleExecutionException(Exception e) {
boolean didCleanup = false;
AsyncCheckpointState currentState = asyncCheckpointState.get();
while (AsyncCheckpointState.DISCARDED != currentState) {
if (asyncCheckpointState.compareAndSet(currentState, AsyncCheckpointState.DISCARDED)) {
didCleanup = true;
try {
cleanup();
} catch (Exception cleanupException) {
e.addSuppressed(cleanupException);
}
Exception checkpointException = new Exception(
"Could not materialize checkpoint " + checkpointMetaData.getCheckpointId() + " for operator " +
taskName + '.',
e);
try {
taskEnvironment.declineCheckpoint(checkpointMetaData.getCheckpointId(), checkpointException);
} catch (Exception unhandled) {
AsynchronousException asyncException = new AsynchronousException(unhandled);
asyncExceptionHandler.handleAsyncException("Failure in asynchronous checkpoint materialization", asyncException);
}
currentState = AsyncCheckpointState.DISCARDED;
} else {
currentState = asyncCheckpointState.get();
}
}
if (!didCleanup) {
StreamTask.LOG.trace("Caught followup exception from a failed checkpoint thread. This can be ignored.", e);
}
}
@Override
public void close() {
if (asyncCheckpointState.compareAndSet(AsyncCheckpointState.RUNNING, AsyncCheckpointState.DISCARDED)) {
try {
cleanup();
} catch (Exception cleanupException) {
StreamTask.LOG.warn("Could not properly clean up the async checkpoint runnable.", cleanupException);
}
} else {
logFailedCleanupAttempt();
}
}
private void logFailedCleanupAttempt() {
StreamTask.LOG.debug("{} - asynchronous checkpointing operation for checkpoint {} has " +
"already been completed. Thus, the state handles are not cleaned up.",
taskName,
checkpointMetaData.getCheckpointId());
}
} | class AsyncCheckpointRunnable implements Runnable, Closeable {
public static final Logger LOG = LoggerFactory.getLogger(AsyncCheckpointRunnable.class);
private final String taskName;
private final CloseableRegistry closeableRegistry;
private final Environment taskEnvironment;
private enum AsyncCheckpointState {
RUNNING,
DISCARDED,
COMPLETED
}
private final AsyncExceptionHandler asyncExceptionHandler;
private final Map<OperatorID, OperatorSnapshotFutures> operatorSnapshotsInProgress;
private final CheckpointMetaData checkpointMetaData;
private final CheckpointMetrics checkpointMetrics;
private final long asyncStartNanos;
private final AtomicReference<AsyncCheckpointState> asyncCheckpointState = new AtomicReference<>(AsyncCheckpointState.RUNNING);
AsyncCheckpointRunnable(
Map<OperatorID, OperatorSnapshotFutures> operatorSnapshotsInProgress,
CheckpointMetaData checkpointMetaData,
CheckpointMetrics checkpointMetrics,
long asyncStartNanos,
String taskName,
CloseableRegistry closeableRegistry,
Environment taskEnvironment,
AsyncExceptionHandler asyncExceptionHandler) {
this.operatorSnapshotsInProgress = checkNotNull(operatorSnapshotsInProgress);
this.checkpointMetaData = checkNotNull(checkpointMetaData);
this.checkpointMetrics = checkNotNull(checkpointMetrics);
this.asyncStartNanos = asyncStartNanos;
this.taskName = checkNotNull(taskName);
this.closeableRegistry = checkNotNull(closeableRegistry);
this.taskEnvironment = checkNotNull(taskEnvironment);
this.asyncExceptionHandler = checkNotNull(asyncExceptionHandler);
}
@Override
public void run() {
FileSystemSafetyNet.initializeSafetyNetForThread();
try {
closeableRegistry.registerCloseable(this);
TaskStateSnapshot jobManagerTaskOperatorSubtaskStates = new TaskStateSnapshot(operatorSnapshotsInProgress.size());
TaskStateSnapshot localTaskOperatorSubtaskStates = new TaskStateSnapshot(operatorSnapshotsInProgress.size());
for (Map.Entry<OperatorID, OperatorSnapshotFutures> entry : operatorSnapshotsInProgress.entrySet()) {
OperatorID operatorID = entry.getKey();
OperatorSnapshotFutures snapshotInProgress = entry.getValue();
OperatorSnapshotFinalizer finalizedSnapshots =
new OperatorSnapshotFinalizer(snapshotInProgress);
jobManagerTaskOperatorSubtaskStates.putSubtaskStateByOperatorID(
operatorID,
finalizedSnapshots.getJobManagerOwnedState());
localTaskOperatorSubtaskStates.putSubtaskStateByOperatorID(
operatorID,
finalizedSnapshots.getTaskLocalState());
}
final long asyncEndNanos = System.nanoTime();
final long asyncDurationMillis = (asyncEndNanos - asyncStartNanos) / 1_000_000L;
checkpointMetrics.setAsyncDurationMillis(asyncDurationMillis);
if (asyncCheckpointState.compareAndSet(AsyncCheckpointState.RUNNING, AsyncCheckpointState.COMPLETED)) {
reportCompletedSnapshotStates(
jobManagerTaskOperatorSubtaskStates,
localTaskOperatorSubtaskStates,
asyncDurationMillis);
} else {
LOG.debug("{} - asynchronous part of checkpoint {} could not be completed because it was closed before.",
taskName,
checkpointMetaData.getCheckpointId());
}
} catch (Exception e) {
if (LOG.isDebugEnabled()) {
LOG.debug("{} - asynchronous part of checkpoint {} could not be completed.",
taskName,
checkpointMetaData.getCheckpointId(),
e);
}
handleExecutionException(e);
} finally {
closeableRegistry.unregisterCloseable(this);
FileSystemSafetyNet.closeSafetyNetAndGuardedResourcesForThread();
}
}
private void reportCompletedSnapshotStates(
TaskStateSnapshot acknowledgedTaskStateSnapshot,
TaskStateSnapshot localTaskStateSnapshot,
long asyncDurationMillis) {
boolean hasAckState = acknowledgedTaskStateSnapshot.hasState();
boolean hasLocalState = localTaskStateSnapshot.hasState();
Preconditions.checkState(hasAckState || !hasLocalState,
"Found cached state but no corresponding primary state is reported to the job " +
"manager. This indicates a problem.");
taskEnvironment.getTaskStateManager().reportTaskStateSnapshots(
checkpointMetaData,
checkpointMetrics,
hasAckState ? acknowledgedTaskStateSnapshot : null,
hasLocalState ? localTaskStateSnapshot : null);
LOG.debug("{} - finished asynchronous part of checkpoint {}. Asynchronous duration: {} ms",
taskName, checkpointMetaData.getCheckpointId(), asyncDurationMillis);
LOG.trace("{} - reported the following states in snapshot for checkpoint {}: {}.",
taskName, checkpointMetaData.getCheckpointId(), acknowledgedTaskStateSnapshot);
}
private void handleExecutionException(Exception e) {
boolean didCleanup = false;
AsyncCheckpointState currentState = asyncCheckpointState.get();
while (AsyncCheckpointState.DISCARDED != currentState) {
if (asyncCheckpointState.compareAndSet(currentState, AsyncCheckpointState.DISCARDED)) {
didCleanup = true;
try {
cleanup();
} catch (Exception cleanupException) {
e.addSuppressed(cleanupException);
}
Exception checkpointException = new Exception(
"Could not materialize checkpoint " + checkpointMetaData.getCheckpointId() + " for operator " +
taskName + '.',
e);
try {
taskEnvironment.declineCheckpoint(checkpointMetaData.getCheckpointId(), checkpointException);
} catch (Exception unhandled) {
AsynchronousException asyncException = new AsynchronousException(unhandled);
asyncExceptionHandler.handleAsyncException("Failure in asynchronous checkpoint materialization", asyncException);
}
currentState = AsyncCheckpointState.DISCARDED;
} else {
currentState = asyncCheckpointState.get();
}
}
if (!didCleanup) {
LOG.trace("Caught followup exception from a failed checkpoint thread. This can be ignored.", e);
}
}
@Override
public void close() {
if (asyncCheckpointState.compareAndSet(AsyncCheckpointState.RUNNING, AsyncCheckpointState.DISCARDED)) {
try {
cleanup();
} catch (Exception cleanupException) {
LOG.warn("Could not properly clean up the async checkpoint runnable.", cleanupException);
}
} else {
logFailedCleanupAttempt();
}
}
private void logFailedCleanupAttempt() {
LOG.debug("{} - asynchronous checkpointing operation for checkpoint {} has " +
"already been completed. Thus, the state handles are not cleaned up.",
taskName,
checkpointMetaData.getCheckpointId());
}
} |
I'd modify these to return a collection instead and make them static. | private void initRegionExecutionViewByVertex(final Set<PipelinedRegion> pipelinedRegions) {
for (PipelinedRegion pipelinedRegion : pipelinedRegions) {
final PipelinedRegionExecutionView regionExecutionView = new PipelinedRegionExecutionView(pipelinedRegion);
for (ExecutionVertexID executionVertexId : pipelinedRegion) {
regionExecutionViewByVertex.put(executionVertexId, regionExecutionView);
}
}
} | for (ExecutionVertexID executionVertexId : pipelinedRegion) { | private void initRegionExecutionViewByVertex(final Set<PipelinedRegion> pipelinedRegions) {
for (PipelinedRegion pipelinedRegion : pipelinedRegions) {
final PipelinedRegionExecutionView regionExecutionView = new PipelinedRegionExecutionView(pipelinedRegion);
for (ExecutionVertexID executionVertexId : pipelinedRegion) {
regionExecutionViewByVertex.put(executionVertexId, regionExecutionView);
}
}
} | class RegionPartitionReleaseStrategy implements PartitionReleaseStrategy {
private final SchedulingTopology schedulingTopology;
private final Map<PipelinedRegion, PipelinedRegionConsumedBlockingPartitions> consumedBlockingPartitionsByRegion = new IdentityHashMap<>();
private final Map<ExecutionVertexID, PipelinedRegionExecutionView> regionExecutionViewByVertex = new HashMap<>();
public RegionPartitionReleaseStrategy(
final SchedulingTopology schedulingTopology,
final Set<PipelinedRegion> pipelinedRegions) {
this.schedulingTopology = checkNotNull(schedulingTopology);
checkNotNull(pipelinedRegions);
initConsumedBlockingPartitionsByRegion(pipelinedRegions);
initRegionExecutionViewByVertex(pipelinedRegions);
}
private void initConsumedBlockingPartitionsByRegion(final Set<PipelinedRegion> pipelinedRegions) {
for (PipelinedRegion pipelinedRegion : pipelinedRegions) {
final PipelinedRegionConsumedBlockingPartitions consumedPartitions = computeConsumedPartitionsOfVertexRegion(pipelinedRegion);
consumedBlockingPartitionsByRegion.put(pipelinedRegion, consumedPartitions);
}
}
private PipelinedRegionConsumedBlockingPartitions computeConsumedPartitionsOfVertexRegion(final PipelinedRegion pipelinedRegion) {
final Set<IntermediateResultPartitionID> resultPartitionsOutsideOfRegion = findResultPartitionsOutsideOfRegion(pipelinedRegion);
return new PipelinedRegionConsumedBlockingPartitions(pipelinedRegion, resultPartitionsOutsideOfRegion);
}
private Set<IntermediateResultPartitionID> findResultPartitionsOutsideOfRegion(final PipelinedRegion pipelinedRegion) {
final Set<SchedulingResultPartition> allConsumedPartitionsInRegion = pipelinedRegion
.getExecutionVertexIds()
.stream()
.map(schedulingTopology::getVertexOrThrow)
.flatMap(schedulingExecutionVertex -> schedulingExecutionVertex.getConsumedResultPartitions().stream())
.collect(Collectors.toSet());
return filterResultPartitionsOutsideOfRegion(allConsumedPartitionsInRegion, pipelinedRegion);
}
private Set<IntermediateResultPartitionID> filterResultPartitionsOutsideOfRegion(
final Collection<SchedulingResultPartition> resultPartitions,
final PipelinedRegion pipelinedRegion) {
final Set<IntermediateResultPartitionID> result = new HashSet<>();
for (final SchedulingResultPartition maybeOutsidePartition : resultPartitions) {
final SchedulingExecutionVertex producer = maybeOutsidePartition.getProducer();
if (!pipelinedRegion.contains(producer.getId())) {
result.add(maybeOutsidePartition.getId());
}
}
return result;
}
@Override
public List<IntermediateResultPartitionID> vertexFinished(final ExecutionVertexID finishedVertex) {
final PipelinedRegionExecutionView regionExecutionView = getPipelinedRegionExecutionViewForVertex(finishedVertex);
regionExecutionView.finish(finishedVertex);
if (regionExecutionView.isFinished()) {
final PipelinedRegion pipelinedRegion = getPipelinedRegionForVertex(finishedVertex);
final PipelinedRegionConsumedBlockingPartitions consumedPartitionsOfVertexRegion = getConsumedBlockingPartitionsForRegion(pipelinedRegion);
return filterReleasablePartitions(consumedPartitionsOfVertexRegion);
}
return Collections.emptyList();
}
@Override
public void vertexUnfinished(final ExecutionVertexID executionVertexId) {
final PipelinedRegionExecutionView regionExecutionView = getPipelinedRegionExecutionViewForVertex(executionVertexId);
regionExecutionView.unfinish(executionVertexId);
}
private PipelinedRegionExecutionView getPipelinedRegionExecutionViewForVertex(final ExecutionVertexID executionVertexId) {
final PipelinedRegionExecutionView pipelinedRegionExecutionView = regionExecutionViewByVertex.get(executionVertexId);
checkState(pipelinedRegionExecutionView != null,
"PipelinedRegionExecutionView not found for execution vertex %s", executionVertexId);
return pipelinedRegionExecutionView;
}
private PipelinedRegion getPipelinedRegionForVertex(final ExecutionVertexID executionVertexId) {
final PipelinedRegionExecutionView pipelinedRegionExecutionView = getPipelinedRegionExecutionViewForVertex(executionVertexId);
return pipelinedRegionExecutionView.getPipelinedRegion();
}
private PipelinedRegionConsumedBlockingPartitions getConsumedBlockingPartitionsForRegion(final PipelinedRegion pipelinedRegion) {
final PipelinedRegionConsumedBlockingPartitions pipelinedRegionConsumedBlockingPartitions = consumedBlockingPartitionsByRegion.get(pipelinedRegion);
checkState(pipelinedRegionConsumedBlockingPartitions != null,
"Consumed partitions not found for pipelined region %s", pipelinedRegion);
checkState(pipelinedRegionConsumedBlockingPartitions.getPipelinedRegion() == pipelinedRegion);
return pipelinedRegionConsumedBlockingPartitions;
}
private List<IntermediateResultPartitionID> filterReleasablePartitions(final PipelinedRegionConsumedBlockingPartitions consumedPartitionsOfVertexRegion) {
return consumedPartitionsOfVertexRegion
.getConsumedBlockingPartitions()
.stream()
.filter(this::areConsumerRegionsFinished)
.collect(Collectors.toList());
}
private boolean areConsumerRegionsFinished(final IntermediateResultPartitionID resultPartitionId) {
final SchedulingResultPartition resultPartition = schedulingTopology.getResultPartitionOrThrow(resultPartitionId);
final Collection<SchedulingExecutionVertex> consumers = resultPartition.getConsumers();
return consumers
.stream()
.map(SchedulingExecutionVertex::getId)
.allMatch(this::isRegionOfVertexFinished);
}
private boolean isRegionOfVertexFinished(final ExecutionVertexID executionVertexId) {
final PipelinedRegionExecutionView regionExecutionView = getPipelinedRegionExecutionViewForVertex(executionVertexId);
return regionExecutionView.isFinished();
}
} | class RegionPartitionReleaseStrategy implements PartitionReleaseStrategy {
private final SchedulingTopology schedulingTopology;
private final Map<PipelinedRegion, PipelinedRegionConsumedBlockingPartitions> consumedBlockingPartitionsByRegion = new IdentityHashMap<>();
private final Map<ExecutionVertexID, PipelinedRegionExecutionView> regionExecutionViewByVertex = new HashMap<>();
public RegionPartitionReleaseStrategy(
final SchedulingTopology schedulingTopology,
final Set<PipelinedRegion> pipelinedRegions) {
this.schedulingTopology = checkNotNull(schedulingTopology);
checkNotNull(pipelinedRegions);
initConsumedBlockingPartitionsByRegion(pipelinedRegions);
initRegionExecutionViewByVertex(pipelinedRegions);
}
private void initConsumedBlockingPartitionsByRegion(final Set<PipelinedRegion> pipelinedRegions) {
for (PipelinedRegion pipelinedRegion : pipelinedRegions) {
final PipelinedRegionConsumedBlockingPartitions consumedPartitions = computeConsumedPartitionsOfVertexRegion(pipelinedRegion);
consumedBlockingPartitionsByRegion.put(pipelinedRegion, consumedPartitions);
}
}
private PipelinedRegionConsumedBlockingPartitions computeConsumedPartitionsOfVertexRegion(final PipelinedRegion pipelinedRegion) {
final Set<IntermediateResultPartitionID> resultPartitionsOutsideOfRegion = findResultPartitionsOutsideOfRegion(pipelinedRegion);
return new PipelinedRegionConsumedBlockingPartitions(pipelinedRegion, resultPartitionsOutsideOfRegion);
}
private Set<IntermediateResultPartitionID> findResultPartitionsOutsideOfRegion(final PipelinedRegion pipelinedRegion) {
final Set<SchedulingResultPartition> allConsumedPartitionsInRegion = pipelinedRegion
.getExecutionVertexIds()
.stream()
.map(schedulingTopology::getVertexOrThrow)
.flatMap(schedulingExecutionVertex -> schedulingExecutionVertex.getConsumedResultPartitions().stream())
.collect(Collectors.toSet());
return filterResultPartitionsOutsideOfRegion(allConsumedPartitionsInRegion, pipelinedRegion);
}
private static Set<IntermediateResultPartitionID> filterResultPartitionsOutsideOfRegion(
final Collection<SchedulingResultPartition> resultPartitions,
final PipelinedRegion pipelinedRegion) {
final Set<IntermediateResultPartitionID> result = new HashSet<>();
for (final SchedulingResultPartition maybeOutsidePartition : resultPartitions) {
final SchedulingExecutionVertex producer = maybeOutsidePartition.getProducer();
if (!pipelinedRegion.contains(producer.getId())) {
result.add(maybeOutsidePartition.getId());
}
}
return result;
}
@Override
public List<IntermediateResultPartitionID> vertexFinished(final ExecutionVertexID finishedVertex) {
final PipelinedRegionExecutionView regionExecutionView = getPipelinedRegionExecutionViewForVertex(finishedVertex);
regionExecutionView.vertexFinished(finishedVertex);
if (regionExecutionView.isFinished()) {
final PipelinedRegion pipelinedRegion = getPipelinedRegionForVertex(finishedVertex);
final PipelinedRegionConsumedBlockingPartitions consumedPartitionsOfVertexRegion = getConsumedBlockingPartitionsForRegion(pipelinedRegion);
return filterReleasablePartitions(consumedPartitionsOfVertexRegion);
}
return Collections.emptyList();
}
@Override
public void vertexUnfinished(final ExecutionVertexID executionVertexId) {
final PipelinedRegionExecutionView regionExecutionView = getPipelinedRegionExecutionViewForVertex(executionVertexId);
regionExecutionView.vertexUnfinished(executionVertexId);
}
private PipelinedRegionExecutionView getPipelinedRegionExecutionViewForVertex(final ExecutionVertexID executionVertexId) {
final PipelinedRegionExecutionView pipelinedRegionExecutionView = regionExecutionViewByVertex.get(executionVertexId);
checkState(pipelinedRegionExecutionView != null,
"PipelinedRegionExecutionView not found for execution vertex %s", executionVertexId);
return pipelinedRegionExecutionView;
}
private PipelinedRegion getPipelinedRegionForVertex(final ExecutionVertexID executionVertexId) {
final PipelinedRegionExecutionView pipelinedRegionExecutionView = getPipelinedRegionExecutionViewForVertex(executionVertexId);
return pipelinedRegionExecutionView.getPipelinedRegion();
}
private PipelinedRegionConsumedBlockingPartitions getConsumedBlockingPartitionsForRegion(final PipelinedRegion pipelinedRegion) {
final PipelinedRegionConsumedBlockingPartitions pipelinedRegionConsumedBlockingPartitions = consumedBlockingPartitionsByRegion.get(pipelinedRegion);
checkState(pipelinedRegionConsumedBlockingPartitions != null,
"Consumed partitions not found for pipelined region %s", pipelinedRegion);
checkState(pipelinedRegionConsumedBlockingPartitions.getPipelinedRegion() == pipelinedRegion);
return pipelinedRegionConsumedBlockingPartitions;
}
private List<IntermediateResultPartitionID> filterReleasablePartitions(final PipelinedRegionConsumedBlockingPartitions consumedPartitionsOfVertexRegion) {
return consumedPartitionsOfVertexRegion
.getConsumedBlockingPartitions()
.stream()
.filter(this::areConsumerRegionsFinished)
.collect(Collectors.toList());
}
private boolean areConsumerRegionsFinished(final IntermediateResultPartitionID resultPartitionId) {
final SchedulingResultPartition resultPartition = schedulingTopology.getResultPartitionOrThrow(resultPartitionId);
final Collection<SchedulingExecutionVertex> consumers = resultPartition.getConsumers();
return consumers
.stream()
.map(SchedulingExecutionVertex::getId)
.allMatch(this::isRegionOfVertexFinished);
}
private boolean isRegionOfVertexFinished(final ExecutionVertexID executionVertexId) {
final PipelinedRegionExecutionView regionExecutionView = getPipelinedRegionExecutionViewForVertex(executionVertexId);
return regionExecutionView.isFinished();
}
/**
* Factory for {@link PartitionReleaseStrategy}.
*/
public static class Factory implements PartitionReleaseStrategy.Factory {
@Override
public PartitionReleaseStrategy createInstance(
final SchedulingTopology schedulingStrategy,
final FailoverTopology failoverTopology) {
final Set<Set<FailoverVertex>> distinctRegions = PipelinedRegionComputeUtil.computePipelinedRegions(failoverTopology);
return new RegionPartitionReleaseStrategy(
schedulingStrategy,
PipelinedRegionComputeUtil.toPipelinedRegionsSet(distinctRegions));
}
}
} |
Actually `srcRel.getTable().getQualifiedName()` returns a `List`, in Calcite there's a concept of `database` which we don't touch so far. | private BeamSqlSeekableTable getSeekableTableFromRelNode(BeamRelNode relNode, BeamSqlEnv sqlEnv) {
BeamIOSourceRel srcRel = (BeamIOSourceRel) relNode;
String tableName = Joiner.on('.').join(srcRel.getTable().getQualifiedName());
BeamSqlTable sourceTable = sqlEnv.findTable(tableName);
return (BeamSqlSeekableTable) sourceTable;
} | String tableName = Joiner.on('.').join(srcRel.getTable().getQualifiedName()); | private BeamSqlSeekableTable getSeekableTableFromRelNode(BeamRelNode relNode, BeamSqlEnv sqlEnv) {
BeamIOSourceRel srcRel = (BeamIOSourceRel) relNode;
String tableName = Joiner.on('.').join(srcRel.getTable().getQualifiedName());
BeamSqlTable sourceTable = sqlEnv.findTable(tableName);
return (BeamSqlSeekableTable) sourceTable;
} | class BeamJoinRel extends Join implements BeamRelNode {
public BeamJoinRel(RelOptCluster cluster, RelTraitSet traits, RelNode left, RelNode right,
RexNode condition, Set<CorrelationId> variablesSet, JoinRelType joinType) {
super(cluster, traits, left, right, condition, variablesSet, joinType);
}
@Override public Join copy(RelTraitSet traitSet, RexNode conditionExpr, RelNode left,
RelNode right, JoinRelType joinType, boolean semiJoinDone) {
return new BeamJoinRel(getCluster(), traitSet, left, right, conditionExpr, variablesSet,
joinType);
}
@Override public PCollection<BeamRecord> buildBeamPipeline(PCollectionTuple inputPCollections,
BeamSqlEnv sqlEnv)
throws Exception {
BeamRelNode leftRelNode = BeamSqlRelUtils.getBeamRelInput(left);
BeamRecordSqlType leftRowType = CalciteUtils.toBeamRowType(left.getRowType());
final BeamRelNode rightRelNode = BeamSqlRelUtils.getBeamRelInput(right);
if (!seekable(leftRelNode, sqlEnv) && seekable(rightRelNode, sqlEnv)) {
return joinAsLookup(leftRelNode, rightRelNode, inputPCollections, sqlEnv)
.setCoder(CalciteUtils.toBeamRowType(getRowType()).getRecordCoder());
}
PCollection<BeamRecord> leftRows = leftRelNode.buildBeamPipeline(inputPCollections, sqlEnv);
PCollection<BeamRecord> rightRows = rightRelNode.buildBeamPipeline(inputPCollections, sqlEnv);
String stageName = BeamSqlRelUtils.getStageName(this);
WindowFn leftWinFn = leftRows.getWindowingStrategy().getWindowFn();
WindowFn rightWinFn = rightRows.getWindowingStrategy().getWindowFn();
List<Pair<Integer, Integer>> pairs = extractJoinColumns(
leftRelNode.getRowType().getFieldCount());
List<String> names = new ArrayList<>(pairs.size());
List<Integer> types = new ArrayList<>(pairs.size());
for (int i = 0; i < pairs.size(); i++) {
names.add("c" + i);
types.add(leftRowType.getFieldTypeByIndex(pairs.get(i).getKey()));
}
BeamRecordSqlType extractKeyRowType = BeamRecordSqlType.create(names, types);
Coder extractKeyRowCoder = extractKeyRowType.getRecordCoder();
PCollection<KV<BeamRecord, BeamRecord>> extractedLeftRows = leftRows
.apply(stageName + "_left_ExtractJoinFields",
MapElements.via(new BeamJoinTransforms.ExtractJoinFields(true, pairs)))
.setCoder(KvCoder.of(extractKeyRowCoder, leftRows.getCoder()));
PCollection<KV<BeamRecord, BeamRecord>> extractedRightRows = rightRows
.apply(stageName + "_right_ExtractJoinFields",
MapElements.via(new BeamJoinTransforms.ExtractJoinFields(false, pairs)))
.setCoder(KvCoder.of(extractKeyRowCoder, rightRows.getCoder()));
BeamRecord leftNullRow = buildNullRow(leftRelNode);
BeamRecord rightNullRow = buildNullRow(rightRelNode);
if ((leftRows.isBounded() == PCollection.IsBounded.BOUNDED
&& rightRows.isBounded() == PCollection.IsBounded.BOUNDED)
|| (leftRows.isBounded() == PCollection.IsBounded.UNBOUNDED
&& rightRows.isBounded() == PCollection.IsBounded.UNBOUNDED)) {
try {
leftWinFn.verifyCompatibility(rightWinFn);
} catch (IncompatibleWindowException e) {
throw new IllegalArgumentException(
"WindowFns must match for a bounded-vs-bounded/unbounded-vs-unbounded join.", e);
}
return standardJoin(extractedLeftRows, extractedRightRows,
leftNullRow, rightNullRow, stageName);
} else if (
(leftRows.isBounded() == PCollection.IsBounded.BOUNDED
&& rightRows.isBounded() == PCollection.IsBounded.UNBOUNDED)
|| (leftRows.isBounded() == PCollection.IsBounded.UNBOUNDED
&& rightRows.isBounded() == PCollection.IsBounded.BOUNDED)
) {
if (joinType == JoinRelType.FULL) {
throw new UnsupportedOperationException("FULL OUTER JOIN is not supported when join "
+ "a bounded table with an unbounded table.");
}
if ((joinType == JoinRelType.LEFT
&& leftRows.isBounded() == PCollection.IsBounded.BOUNDED)
|| (joinType == JoinRelType.RIGHT
&& rightRows.isBounded() == PCollection.IsBounded.BOUNDED)) {
throw new UnsupportedOperationException(
"LEFT side of an OUTER JOIN must be Unbounded table.");
}
return sideInputJoin(extractedLeftRows, extractedRightRows,
leftNullRow, rightNullRow);
} else {
throw new UnsupportedOperationException(
"The inputs to the JOIN have un-joinnable windowFns: " + leftWinFn + ", " + rightWinFn);
}
}
private PCollection<BeamRecord> standardJoin(
PCollection<KV<BeamRecord, BeamRecord>> extractedLeftRows,
PCollection<KV<BeamRecord, BeamRecord>> extractedRightRows,
BeamRecord leftNullRow, BeamRecord rightNullRow, String stageName) {
PCollection<KV<BeamRecord, KV<BeamRecord, BeamRecord>>> joinedRows = null;
switch (joinType) {
case LEFT:
joinedRows = org.apache.beam.sdk.extensions.joinlibrary.Join
.leftOuterJoin(extractedLeftRows, extractedRightRows, rightNullRow);
break;
case RIGHT:
joinedRows = org.apache.beam.sdk.extensions.joinlibrary.Join
.rightOuterJoin(extractedLeftRows, extractedRightRows, leftNullRow);
break;
case FULL:
joinedRows = org.apache.beam.sdk.extensions.joinlibrary.Join
.fullOuterJoin(extractedLeftRows, extractedRightRows, leftNullRow,
rightNullRow);
break;
case INNER:
default:
joinedRows = org.apache.beam.sdk.extensions.joinlibrary.Join
.innerJoin(extractedLeftRows, extractedRightRows);
break;
}
PCollection<BeamRecord> ret = joinedRows
.apply(stageName + "_JoinParts2WholeRow",
MapElements.via(new BeamJoinTransforms.JoinParts2WholeRow()))
.setCoder(CalciteUtils.toBeamRowType(getRowType()).getRecordCoder());
return ret;
}
public PCollection<BeamRecord> sideInputJoin(
PCollection<KV<BeamRecord, BeamRecord>> extractedLeftRows,
PCollection<KV<BeamRecord, BeamRecord>> extractedRightRows,
BeamRecord leftNullRow, BeamRecord rightNullRow) {
boolean swapped = (extractedLeftRows.isBounded() == PCollection.IsBounded.BOUNDED);
JoinRelType realJoinType =
(swapped && joinType != JoinRelType.INNER) ? JoinRelType.LEFT : joinType;
PCollection<KV<BeamRecord, BeamRecord>> realLeftRows =
swapped ? extractedRightRows : extractedLeftRows;
PCollection<KV<BeamRecord, BeamRecord>> realRightRows =
swapped ? extractedLeftRows : extractedRightRows;
BeamRecord realRightNullRow = swapped ? leftNullRow : rightNullRow;
return sideInputJoinHelper(realJoinType, realLeftRows, realRightRows,
realRightNullRow, swapped);
}
private PCollection<BeamRecord> sideInputJoinHelper(
JoinRelType joinType,
PCollection<KV<BeamRecord, BeamRecord>> leftRows,
PCollection<KV<BeamRecord, BeamRecord>> rightRows,
BeamRecord rightNullRow, boolean swapped) {
final PCollectionView<Map<BeamRecord, Iterable<BeamRecord>>> rowsView = rightRows
.apply(View.<BeamRecord, BeamRecord>asMultimap());
PCollection<BeamRecord> ret = leftRows
.apply(ParDo.of(new BeamJoinTransforms.SideInputJoinDoFn(
joinType, rightNullRow, rowsView, swapped)).withSideInputs(rowsView))
.setCoder(CalciteUtils.toBeamRowType(getRowType()).getRecordCoder());
return ret;
}
private BeamRecord buildNullRow(BeamRelNode relNode) {
BeamRecordSqlType leftType = CalciteUtils.toBeamRowType(relNode.getRowType());
return new BeamRecord(leftType, Collections.nCopies(leftType.getFieldCount(), null));
}
private List<Pair<Integer, Integer>> extractJoinColumns(int leftRowColumnCount) {
if (condition instanceof RexLiteral && (Boolean) ((RexLiteral) condition).getValue()) {
throw new UnsupportedOperationException("CROSS JOIN is not supported!");
}
RexCall call = (RexCall) condition;
List<Pair<Integer, Integer>> pairs = new ArrayList<>();
if ("AND".equals(call.getOperator().getName())) {
List<RexNode> operands = call.getOperands();
for (RexNode rexNode : operands) {
Pair<Integer, Integer> pair = extractOneJoinColumn((RexCall) rexNode, leftRowColumnCount);
pairs.add(pair);
}
} else if ("=".equals(call.getOperator().getName())) {
pairs.add(extractOneJoinColumn(call, leftRowColumnCount));
} else {
throw new UnsupportedOperationException(
"Operator " + call.getOperator().getName() + " is not supported in join condition");
}
return pairs;
}
private Pair<Integer, Integer> extractOneJoinColumn(RexCall oneCondition,
int leftRowColumnCount) {
List<RexNode> operands = oneCondition.getOperands();
final int leftIndex = Math.min(((RexInputRef) operands.get(0)).getIndex(),
((RexInputRef) operands.get(1)).getIndex());
final int rightIndex1 = Math.max(((RexInputRef) operands.get(0)).getIndex(),
((RexInputRef) operands.get(1)).getIndex());
final int rightIndex = rightIndex1 - leftRowColumnCount;
return new Pair<>(leftIndex, rightIndex);
}
private PCollection<BeamRecord> joinAsLookup(BeamRelNode leftRelNode, BeamRelNode rightRelNode,
PCollectionTuple inputPCollections, BeamSqlEnv sqlEnv) throws Exception {
PCollection<BeamRecord> factStream = leftRelNode.buildBeamPipeline(inputPCollections, sqlEnv);
BeamSqlSeekableTable seekableTable = getSeekableTableFromRelNode(rightRelNode, sqlEnv);
return factStream.apply("join_as_lookup",
new BeamJoinTransforms.JoinAsLookup(condition, seekableTable,
CalciteUtils.toBeamRowType(rightRelNode.getRowType()),
CalciteUtils.toBeamRowType(leftRelNode.getRowType()).getFieldCount()));
}
/**
* check if {@code BeamRelNode} implements {@code BeamSeekableTable}.
*/
private boolean seekable(BeamRelNode relNode, BeamSqlEnv sqlEnv) {
if (relNode instanceof BeamIOSourceRel) {
BeamIOSourceRel srcRel = (BeamIOSourceRel) relNode;
String tableName = Joiner.on('.').join(srcRel.getTable().getQualifiedName());
BeamSqlTable sourceTable = sqlEnv.findTable(tableName);
if (sourceTable instanceof BeamSqlSeekableTable) {
return true;
}
}
return false;
}
} | class BeamJoinRel extends Join implements BeamRelNode {
public BeamJoinRel(RelOptCluster cluster, RelTraitSet traits, RelNode left, RelNode right,
RexNode condition, Set<CorrelationId> variablesSet, JoinRelType joinType) {
super(cluster, traits, left, right, condition, variablesSet, joinType);
}
@Override public Join copy(RelTraitSet traitSet, RexNode conditionExpr, RelNode left,
RelNode right, JoinRelType joinType, boolean semiJoinDone) {
return new BeamJoinRel(getCluster(), traitSet, left, right, conditionExpr, variablesSet,
joinType);
}
@Override public PCollection<BeamRecord> buildBeamPipeline(PCollectionTuple inputPCollections,
BeamSqlEnv sqlEnv)
throws Exception {
BeamRelNode leftRelNode = BeamSqlRelUtils.getBeamRelInput(left);
BeamRecordSqlType leftRowType = CalciteUtils.toBeamRowType(left.getRowType());
final BeamRelNode rightRelNode = BeamSqlRelUtils.getBeamRelInput(right);
if (!seekable(leftRelNode, sqlEnv) && seekable(rightRelNode, sqlEnv)) {
return joinAsLookup(leftRelNode, rightRelNode, inputPCollections, sqlEnv)
.setCoder(CalciteUtils.toBeamRowType(getRowType()).getRecordCoder());
}
PCollection<BeamRecord> leftRows = leftRelNode.buildBeamPipeline(inputPCollections, sqlEnv);
PCollection<BeamRecord> rightRows = rightRelNode.buildBeamPipeline(inputPCollections, sqlEnv);
String stageName = BeamSqlRelUtils.getStageName(this);
WindowFn leftWinFn = leftRows.getWindowingStrategy().getWindowFn();
WindowFn rightWinFn = rightRows.getWindowingStrategy().getWindowFn();
List<Pair<Integer, Integer>> pairs = extractJoinColumns(
leftRelNode.getRowType().getFieldCount());
List<String> names = new ArrayList<>(pairs.size());
List<Integer> types = new ArrayList<>(pairs.size());
for (int i = 0; i < pairs.size(); i++) {
names.add("c" + i);
types.add(leftRowType.getFieldTypeByIndex(pairs.get(i).getKey()));
}
BeamRecordSqlType extractKeyRowType = BeamRecordSqlType.create(names, types);
Coder extractKeyRowCoder = extractKeyRowType.getRecordCoder();
PCollection<KV<BeamRecord, BeamRecord>> extractedLeftRows = leftRows
.apply(stageName + "_left_ExtractJoinFields",
MapElements.via(new BeamJoinTransforms.ExtractJoinFields(true, pairs)))
.setCoder(KvCoder.of(extractKeyRowCoder, leftRows.getCoder()));
PCollection<KV<BeamRecord, BeamRecord>> extractedRightRows = rightRows
.apply(stageName + "_right_ExtractJoinFields",
MapElements.via(new BeamJoinTransforms.ExtractJoinFields(false, pairs)))
.setCoder(KvCoder.of(extractKeyRowCoder, rightRows.getCoder()));
BeamRecord leftNullRow = buildNullRow(leftRelNode);
BeamRecord rightNullRow = buildNullRow(rightRelNode);
if ((leftRows.isBounded() == PCollection.IsBounded.BOUNDED
&& rightRows.isBounded() == PCollection.IsBounded.BOUNDED)
|| (leftRows.isBounded() == PCollection.IsBounded.UNBOUNDED
&& rightRows.isBounded() == PCollection.IsBounded.UNBOUNDED)) {
try {
leftWinFn.verifyCompatibility(rightWinFn);
} catch (IncompatibleWindowException e) {
throw new IllegalArgumentException(
"WindowFns must match for a bounded-vs-bounded/unbounded-vs-unbounded join.", e);
}
return standardJoin(extractedLeftRows, extractedRightRows,
leftNullRow, rightNullRow, stageName);
} else if (
(leftRows.isBounded() == PCollection.IsBounded.BOUNDED
&& rightRows.isBounded() == PCollection.IsBounded.UNBOUNDED)
|| (leftRows.isBounded() == PCollection.IsBounded.UNBOUNDED
&& rightRows.isBounded() == PCollection.IsBounded.BOUNDED)
) {
if (joinType == JoinRelType.FULL) {
throw new UnsupportedOperationException("FULL OUTER JOIN is not supported when join "
+ "a bounded table with an unbounded table.");
}
if ((joinType == JoinRelType.LEFT
&& leftRows.isBounded() == PCollection.IsBounded.BOUNDED)
|| (joinType == JoinRelType.RIGHT
&& rightRows.isBounded() == PCollection.IsBounded.BOUNDED)) {
throw new UnsupportedOperationException(
"LEFT side of an OUTER JOIN must be Unbounded table.");
}
return sideInputJoin(extractedLeftRows, extractedRightRows,
leftNullRow, rightNullRow);
} else {
throw new UnsupportedOperationException(
"The inputs to the JOIN have un-joinnable windowFns: " + leftWinFn + ", " + rightWinFn);
}
}
private PCollection<BeamRecord> standardJoin(
PCollection<KV<BeamRecord, BeamRecord>> extractedLeftRows,
PCollection<KV<BeamRecord, BeamRecord>> extractedRightRows,
BeamRecord leftNullRow, BeamRecord rightNullRow, String stageName) {
PCollection<KV<BeamRecord, KV<BeamRecord, BeamRecord>>> joinedRows = null;
switch (joinType) {
case LEFT:
joinedRows = org.apache.beam.sdk.extensions.joinlibrary.Join
.leftOuterJoin(extractedLeftRows, extractedRightRows, rightNullRow);
break;
case RIGHT:
joinedRows = org.apache.beam.sdk.extensions.joinlibrary.Join
.rightOuterJoin(extractedLeftRows, extractedRightRows, leftNullRow);
break;
case FULL:
joinedRows = org.apache.beam.sdk.extensions.joinlibrary.Join
.fullOuterJoin(extractedLeftRows, extractedRightRows, leftNullRow,
rightNullRow);
break;
case INNER:
default:
joinedRows = org.apache.beam.sdk.extensions.joinlibrary.Join
.innerJoin(extractedLeftRows, extractedRightRows);
break;
}
PCollection<BeamRecord> ret = joinedRows
.apply(stageName + "_JoinParts2WholeRow",
MapElements.via(new BeamJoinTransforms.JoinParts2WholeRow()))
.setCoder(CalciteUtils.toBeamRowType(getRowType()).getRecordCoder());
return ret;
}
public PCollection<BeamRecord> sideInputJoin(
PCollection<KV<BeamRecord, BeamRecord>> extractedLeftRows,
PCollection<KV<BeamRecord, BeamRecord>> extractedRightRows,
BeamRecord leftNullRow, BeamRecord rightNullRow) {
boolean swapped = (extractedLeftRows.isBounded() == PCollection.IsBounded.BOUNDED);
JoinRelType realJoinType =
(swapped && joinType != JoinRelType.INNER) ? JoinRelType.LEFT : joinType;
PCollection<KV<BeamRecord, BeamRecord>> realLeftRows =
swapped ? extractedRightRows : extractedLeftRows;
PCollection<KV<BeamRecord, BeamRecord>> realRightRows =
swapped ? extractedLeftRows : extractedRightRows;
BeamRecord realRightNullRow = swapped ? leftNullRow : rightNullRow;
return sideInputJoinHelper(realJoinType, realLeftRows, realRightRows,
realRightNullRow, swapped);
}
private PCollection<BeamRecord> sideInputJoinHelper(
JoinRelType joinType,
PCollection<KV<BeamRecord, BeamRecord>> leftRows,
PCollection<KV<BeamRecord, BeamRecord>> rightRows,
BeamRecord rightNullRow, boolean swapped) {
final PCollectionView<Map<BeamRecord, Iterable<BeamRecord>>> rowsView = rightRows
.apply(View.<BeamRecord, BeamRecord>asMultimap());
PCollection<BeamRecord> ret = leftRows
.apply(ParDo.of(new BeamJoinTransforms.SideInputJoinDoFn(
joinType, rightNullRow, rowsView, swapped)).withSideInputs(rowsView))
.setCoder(CalciteUtils.toBeamRowType(getRowType()).getRecordCoder());
return ret;
}
private BeamRecord buildNullRow(BeamRelNode relNode) {
BeamRecordSqlType leftType = CalciteUtils.toBeamRowType(relNode.getRowType());
return new BeamRecord(leftType, Collections.nCopies(leftType.getFieldCount(), null));
}
private List<Pair<Integer, Integer>> extractJoinColumns(int leftRowColumnCount) {
if (condition instanceof RexLiteral && (Boolean) ((RexLiteral) condition).getValue()) {
throw new UnsupportedOperationException("CROSS JOIN is not supported!");
}
RexCall call = (RexCall) condition;
List<Pair<Integer, Integer>> pairs = new ArrayList<>();
if ("AND".equals(call.getOperator().getName())) {
List<RexNode> operands = call.getOperands();
for (RexNode rexNode : operands) {
Pair<Integer, Integer> pair = extractOneJoinColumn((RexCall) rexNode, leftRowColumnCount);
pairs.add(pair);
}
} else if ("=".equals(call.getOperator().getName())) {
pairs.add(extractOneJoinColumn(call, leftRowColumnCount));
} else {
throw new UnsupportedOperationException(
"Operator " + call.getOperator().getName() + " is not supported in join condition");
}
return pairs;
}
private Pair<Integer, Integer> extractOneJoinColumn(RexCall oneCondition,
int leftRowColumnCount) {
List<RexNode> operands = oneCondition.getOperands();
final int leftIndex = Math.min(((RexInputRef) operands.get(0)).getIndex(),
((RexInputRef) operands.get(1)).getIndex());
final int rightIndex1 = Math.max(((RexInputRef) operands.get(0)).getIndex(),
((RexInputRef) operands.get(1)).getIndex());
final int rightIndex = rightIndex1 - leftRowColumnCount;
return new Pair<>(leftIndex, rightIndex);
}
private PCollection<BeamRecord> joinAsLookup(BeamRelNode leftRelNode, BeamRelNode rightRelNode,
PCollectionTuple inputPCollections, BeamSqlEnv sqlEnv) throws Exception {
PCollection<BeamRecord> factStream = leftRelNode.buildBeamPipeline(inputPCollections, sqlEnv);
BeamSqlSeekableTable seekableTable = getSeekableTableFromRelNode(rightRelNode, sqlEnv);
return factStream.apply("join_as_lookup",
new BeamJoinTransforms.JoinAsLookup(condition, seekableTable,
CalciteUtils.toBeamRowType(rightRelNode.getRowType()),
CalciteUtils.toBeamRowType(leftRelNode.getRowType()).getFieldCount()));
}
/**
* check if {@code BeamRelNode} implements {@code BeamSeekableTable}.
*/
private boolean seekable(BeamRelNode relNode, BeamSqlEnv sqlEnv) {
if (relNode instanceof BeamIOSourceRel) {
BeamIOSourceRel srcRel = (BeamIOSourceRel) relNode;
String tableName = Joiner.on('.').join(srcRel.getTable().getQualifiedName());
BeamSqlTable sourceTable = sqlEnv.findTable(tableName);
if (sourceTable instanceof BeamSqlSeekableTable) {
return true;
}
}
return false;
}
} |
I am wondering whether it wouldn't be simpler to change `result.handleAsync` to `result.whenAsync` and then to add the result of this operation to the `incompleteFuturesTracker`? That way we are sure that we will have handled the result before doing any other operations (e.g. failing/completing checkpoints). | public CompletableFuture<Acknowledge> sendEvent(OperatorEvent evt) {
if (!isReady()) {
throw new FlinkRuntimeException("SubtaskGateway is not ready, task not yet running.");
}
final SerializedValue<OperatorEvent> serializedEvent;
try {
serializedEvent = new SerializedValue<>(evt);
} catch (IOException e) {
throw new FlinkRuntimeException("Cannot serialize operator event", e);
}
final Callable<CompletableFuture<Acknowledge>> sendAction =
subtaskAccess.createEventSendAction(serializedEvent);
final CompletableFuture<Acknowledge> result = new CompletableFuture<>();
FutureUtils.assertNoException(
result.handleAsync(
(success, failure) -> {
if (failure != null) {
if (subtaskAccess.isStillRunning()) {
String msg =
String.format(
EVENT_LOSS_ERROR_MESSAGE,
evt,
subtaskAccess.subtaskName());
subtaskAccess.triggerTaskFailover(
new FlinkException(msg, failure));
}
nonSuccessFuturesTrack.removeFailedFuture(result);
}
return null;
},
sendingExecutor));
sendingExecutor.execute(
() -> {
nonSuccessFuturesTrack.trackFuture(result);
sender.sendEvent(sendAction, result);
});
return result;
} | nonSuccessFuturesTrack.removeFailedFuture(result); | public CompletableFuture<Acknowledge> sendEvent(OperatorEvent evt) {
if (!isReady()) {
throw new FlinkRuntimeException("SubtaskGateway is not ready, task not yet running.");
}
final SerializedValue<OperatorEvent> serializedEvent;
try {
serializedEvent = new SerializedValue<>(evt);
} catch (IOException e) {
throw new FlinkRuntimeException("Cannot serialize operator event", e);
}
final Callable<CompletableFuture<Acknowledge>> sendAction =
subtaskAccess.createEventSendAction(serializedEvent);
final CompletableFuture<Acknowledge> sendResult = new CompletableFuture<>();
final CompletableFuture<Acknowledge> result =
sendResult.whenCompleteAsync(
(success, failure) -> {
if (failure != null && subtaskAccess.isStillRunning()) {
String msg =
String.format(
EVENT_LOSS_ERROR_MESSAGE,
evt,
subtaskAccess.subtaskName());
Runnables.assertNoException(
() ->
subtaskAccess.triggerTaskFailover(
new FlinkException(msg, failure)));
}
},
sendingExecutor);
sendingExecutor.execute(
() -> {
sender.sendEvent(sendAction, sendResult);
incompleteFuturesTracker.trackFutureWhileIncomplete(result);
});
return result;
} | class SubtaskGatewayImpl implements OperatorCoordinator.SubtaskGateway {
private static final String EVENT_LOSS_ERROR_MESSAGE =
"An OperatorEvent from an OperatorCoordinator to a task was lost. "
+ "Triggering task failover to ensure consistency. Event: '%s', targetTask: %s";
private final SubtaskAccess subtaskAccess;
private final EventSender sender;
private final Executor sendingExecutor;
private final NonSuccessFuturesTrack nonSuccessFuturesTrack;
SubtaskGatewayImpl(
SubtaskAccess subtaskAccess,
EventSender sender,
Executor sendingExecutor,
NonSuccessFuturesTrack nonSuccessFuturesTrack) {
this.subtaskAccess = subtaskAccess;
this.sender = sender;
this.sendingExecutor = sendingExecutor;
this.nonSuccessFuturesTrack = nonSuccessFuturesTrack;
}
@Override
@Override
public ExecutionAttemptID getExecution() {
return subtaskAccess.currentAttempt();
}
@Override
public int getSubtask() {
return subtaskAccess.getSubtaskIndex();
}
private boolean isReady() {
return subtaskAccess.hasSwitchedToRunning().isDone();
}
} | class SubtaskGatewayImpl implements OperatorCoordinator.SubtaskGateway {
private static final String EVENT_LOSS_ERROR_MESSAGE =
"An OperatorEvent from an OperatorCoordinator to a task was lost. "
+ "Triggering task failover to ensure consistency. Event: '%s', targetTask: %s";
private final SubtaskAccess subtaskAccess;
private final EventSender sender;
private final Executor sendingExecutor;
private final IncompleteFuturesTracker incompleteFuturesTracker;
SubtaskGatewayImpl(
SubtaskAccess subtaskAccess,
EventSender sender,
Executor sendingExecutor,
IncompleteFuturesTracker incompleteFuturesTracker) {
this.subtaskAccess = subtaskAccess;
this.sender = sender;
this.sendingExecutor = sendingExecutor;
this.incompleteFuturesTracker = incompleteFuturesTracker;
}
@Override
@Override
public ExecutionAttemptID getExecution() {
return subtaskAccess.currentAttempt();
}
@Override
public int getSubtask() {
return subtaskAccess.getSubtaskIndex();
}
private boolean isReady() {
return subtaskAccess.hasSwitchedToRunning().isDone();
}
} |
This isn't quite right - we're now invalidating all of the StreamWriters when any one of them fails. I think instead you want to just null out the one that failed and allow it to be recreated the next get. | void invalidateWriteStream() {
if (streamAppendClient != null) {
synchronized (APPEND_CLIENTS) {
runAsyncIgnoreFailure(closeWriterExecutor, streamAppendClient::unpin);
@Nullable
List<StreamAppendClient> cachedAppendClients = APPEND_CLIENTS.getIfPresent(streamName);
if (cachedAppendClients != null
&& System.identityHashCode(cachedAppendClients.get(clientNumber))
== System.identityHashCode(streamAppendClient)) {
APPEND_CLIENTS.invalidate(streamName);
}
}
streamAppendClient = null;
}
} | == System.identityHashCode(streamAppendClient)) { | void invalidateWriteStream() {
if (streamAppendClient != null) {
synchronized (APPEND_CLIENTS) {
runAsyncIgnoreFailure(closeWriterExecutor, streamAppendClient::unpin);
String cacheEntryKey = getStreamAppendClientCacheEntryKey();
@Nullable
StreamAppendClient cachedAppendClient = APPEND_CLIENTS.getIfPresent(cacheEntryKey);
if (cachedAppendClient != null
&& System.identityHashCode(cachedAppendClient)
== System.identityHashCode(streamAppendClient)) {
APPEND_CLIENTS.invalidate(cacheEntryKey);
}
}
streamAppendClient = null;
}
} | class DestinationState {
private final String tableUrn;
private final MessageConverter<ElementT> messageConverter;
private String streamName = "";
private @Nullable StreamAppendClient streamAppendClient = null;
private long currentOffset = 0;
private List<ByteString> pendingMessages;
private transient @Nullable DatasetService datasetService;
private final Counter recordsAppended =
Metrics.counter(WriteRecordsDoFn.class, "recordsAppended");
private final Counter appendFailures =
Metrics.counter(WriteRecordsDoFn.class, "appendFailures");
private final Counter schemaMismatches =
Metrics.counter(WriteRecordsDoFn.class, "schemaMismatches");
private final Distribution inflightWaitSecondsDistribution =
Metrics.distribution(WriteRecordsDoFn.class, "streamWriterWaitSeconds");
private final boolean useDefaultStream;
private DescriptorWrapper descriptorWrapper;
private Instant nextCacheTickle;
private final int streamAppendClientCount;
private final int clientNumber;
public DestinationState(
String tableUrn,
MessageConverter<ElementT> messageConverter,
DatasetService datasetService,
boolean useDefaultStream,
int streamAppendClientCount) {
this.tableUrn = tableUrn;
this.messageConverter = messageConverter;
this.pendingMessages = Lists.newArrayList();
this.datasetService = datasetService;
this.useDefaultStream = useDefaultStream;
this.descriptorWrapper = messageConverter.getSchemaDescriptor();
this.streamAppendClientCount = streamAppendClientCount;
this.clientNumber = new Random().nextInt(streamAppendClientCount);
}
void teardown() {
maybeTickleCache();
if (streamAppendClient != null) {
runAsyncIgnoreFailure(closeWriterExecutor, streamAppendClient::unpin);
streamAppendClient = null;
}
}
String getDefaultStreamName() {
return BigQueryHelpers.stripPartitionDecorator(tableUrn) + "/streams/_default";
}
String createStreamIfNeeded() {
try {
if (!useDefaultStream) {
this.streamName =
Preconditions.checkNotNull(datasetService)
.createWriteStream(tableUrn, Type.PENDING)
.getName();
} else {
this.streamName = getDefaultStreamName();
}
} catch (Exception e) {
throw new RuntimeException(e);
}
return this.streamName;
}
List<StreamAppendClient> generateClients() {
return IntStream.range(0, streamAppendClientCount)
.mapToObj(
i -> {
try {
StreamAppendClient client =
datasetService.getStreamAppendClient(
streamName, descriptorWrapper.descriptor);
return client;
} catch (Exception ex) {
throw new RuntimeException(ex);
}
})
.collect(Collectors.toList());
}
StreamAppendClient getStreamAppendClient(boolean lookupCache) {
try {
if (streamAppendClient == null) {
createStreamIfNeeded();
synchronized (APPEND_CLIENTS) {
if (lookupCache) {
this.streamAppendClient =
APPEND_CLIENTS.get(streamName, () -> generateClients()).get(clientNumber);
} else {
APPEND_CLIENTS.put(streamName, generateClients());
this.streamAppendClient =
APPEND_CLIENTS.get(streamName, () -> generateClients()).get(clientNumber);
}
this.streamAppendClient.pin();
}
this.currentOffset = 0;
nextCacheTickle = Instant.now().plus(java.time.Duration.ofMinutes(1));
}
return streamAppendClient;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
void maybeTickleCache() {
if (streamAppendClient != null && Instant.now().isAfter(nextCacheTickle)) {
synchronized (APPEND_CLIENTS) {
APPEND_CLIENTS.getIfPresent(streamName);
}
nextCacheTickle = Instant.now().plus(java.time.Duration.ofMinutes(1));
}
}
void addMessage(StorageApiWritePayload payload) throws Exception {
maybeTickleCache();
if (payload.getSchemaHash() != descriptorWrapper.hash) {
schemaMismatches.inc();
messageConverter.refreshSchema(payload.getSchemaHash());
descriptorWrapper = messageConverter.getSchemaDescriptor();
invalidateWriteStream();
if (useDefaultStream) {
getStreamAppendClient(false);
}
DynamicMessage msg =
DynamicMessage.parseFrom(descriptorWrapper.descriptor, payload.getPayload());
if (msg.getUnknownFields() != null && !msg.getUnknownFields().asMap().isEmpty()) {
throw new RuntimeException(
"Record schema does not match table. Unknown fields: " + msg.getUnknownFields());
}
}
pendingMessages.add(ByteString.copyFrom(payload.getPayload()));
}
@SuppressWarnings({"nullness"})
void flush(RetryManager<AppendRowsResponse, Context<AppendRowsResponse>> retryManager)
throws Exception {
if (pendingMessages.isEmpty()) {
return;
}
final ProtoRows.Builder inserts = ProtoRows.newBuilder();
inserts.addAllSerializedRows(pendingMessages);
ProtoRows protoRows = inserts.build();
pendingMessages.clear();
retryManager.addOperation(
c -> {
try {
StreamAppendClient writeStream = getStreamAppendClient(true);
long offset = -1;
if (!this.useDefaultStream) {
offset = this.currentOffset;
this.currentOffset += inserts.getSerializedRowsCount();
}
ApiFuture<AppendRowsResponse> response = writeStream.appendRows(offset, protoRows);
inflightWaitSecondsDistribution.update(writeStream.getInflightWaitSeconds());
if (writeStream.getInflightWaitSeconds() > 5) {
LOG.warn(
"Storage Api write delay more than " + writeStream.getInflightWaitSeconds());
}
return response;
} catch (Exception e) {
throw new RuntimeException(e);
}
},
contexts -> {
LOG.info(
"Append to stream "
+ streamName
+ " failed with error "
+ Iterables.getFirst(contexts, null).getError());
invalidateWriteStream();
appendFailures.inc();
return RetryType.RETRY_ALL_OPERATIONS;
},
response -> {
recordsAppended.inc(protoRows.getSerializedRowsCount());
},
new Context<>());
maybeTickleCache();
}
} | class DestinationState {
private final String tableUrn;
private final MessageConverter<ElementT> messageConverter;
private String streamName = "";
private @Nullable StreamAppendClient streamAppendClient = null;
private long currentOffset = 0;
private List<ByteString> pendingMessages;
private transient @Nullable DatasetService datasetService;
private final Counter recordsAppended =
Metrics.counter(WriteRecordsDoFn.class, "recordsAppended");
private final Counter appendFailures =
Metrics.counter(WriteRecordsDoFn.class, "appendFailures");
private final Counter schemaMismatches =
Metrics.counter(WriteRecordsDoFn.class, "schemaMismatches");
private final Distribution inflightWaitSecondsDistribution =
Metrics.distribution(WriteRecordsDoFn.class, "streamWriterWaitSeconds");
private final boolean useDefaultStream;
private DescriptorWrapper descriptorWrapper;
private Instant nextCacheTickle;
private final int clientNumber;
public DestinationState(
String tableUrn,
MessageConverter<ElementT> messageConverter,
DatasetService datasetService,
boolean useDefaultStream,
int streamAppendClientCount) {
this.tableUrn = tableUrn;
this.messageConverter = messageConverter;
this.pendingMessages = Lists.newArrayList();
this.datasetService = datasetService;
this.useDefaultStream = useDefaultStream;
this.descriptorWrapper = messageConverter.getSchemaDescriptor();
this.clientNumber = new Random().nextInt(streamAppendClientCount);
}
void teardown() {
maybeTickleCache();
if (streamAppendClient != null) {
runAsyncIgnoreFailure(closeWriterExecutor, streamAppendClient::unpin);
streamAppendClient = null;
}
}
String getDefaultStreamName() {
return BigQueryHelpers.stripPartitionDecorator(tableUrn) + "/streams/_default";
}
String getStreamAppendClientCacheEntryKey() {
if (useDefaultStream) {
return getDefaultStreamName() + "-client" + clientNumber;
}
return this.streamName;
}
String createStreamIfNeeded() {
try {
if (!useDefaultStream) {
this.streamName =
Preconditions.checkNotNull(datasetService)
.createWriteStream(tableUrn, Type.PENDING)
.getName();
} else {
this.streamName = getDefaultStreamName();
}
} catch (Exception e) {
throw new RuntimeException(e);
}
return this.streamName;
}
StreamAppendClient generateClient() throws Exception {
return datasetService.getStreamAppendClient(streamName, descriptorWrapper.descriptor);
}
StreamAppendClient getStreamAppendClient(boolean lookupCache) {
try {
if (streamAppendClient == null) {
createStreamIfNeeded();
synchronized (APPEND_CLIENTS) {
if (lookupCache) {
this.streamAppendClient =
APPEND_CLIENTS.get(
getStreamAppendClientCacheEntryKey(), () -> generateClient());
} else {
this.streamAppendClient = generateClient();
APPEND_CLIENTS.put(getStreamAppendClientCacheEntryKey(), streamAppendClient);
}
this.streamAppendClient.pin();
}
this.currentOffset = 0;
nextCacheTickle = Instant.now().plus(java.time.Duration.ofMinutes(1));
}
return streamAppendClient;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
void maybeTickleCache() {
if (streamAppendClient != null && Instant.now().isAfter(nextCacheTickle)) {
synchronized (APPEND_CLIENTS) {
APPEND_CLIENTS.getIfPresent(getStreamAppendClientCacheEntryKey());
}
nextCacheTickle = Instant.now().plus(java.time.Duration.ofMinutes(1));
}
}
void addMessage(StorageApiWritePayload payload) throws Exception {
maybeTickleCache();
if (payload.getSchemaHash() != descriptorWrapper.hash) {
schemaMismatches.inc();
messageConverter.refreshSchema(payload.getSchemaHash());
descriptorWrapper = messageConverter.getSchemaDescriptor();
invalidateWriteStream();
if (useDefaultStream) {
getStreamAppendClient(false);
}
DynamicMessage msg =
DynamicMessage.parseFrom(descriptorWrapper.descriptor, payload.getPayload());
if (msg.getUnknownFields() != null && !msg.getUnknownFields().asMap().isEmpty()) {
throw new RuntimeException(
"Record schema does not match table. Unknown fields: " + msg.getUnknownFields());
}
}
pendingMessages.add(ByteString.copyFrom(payload.getPayload()));
}
@SuppressWarnings({"nullness"})
void flush(RetryManager<AppendRowsResponse, Context<AppendRowsResponse>> retryManager)
throws Exception {
if (pendingMessages.isEmpty()) {
return;
}
final ProtoRows.Builder inserts = ProtoRows.newBuilder();
inserts.addAllSerializedRows(pendingMessages);
ProtoRows protoRows = inserts.build();
pendingMessages.clear();
retryManager.addOperation(
c -> {
try {
StreamAppendClient writeStream = getStreamAppendClient(true);
long offset = -1;
if (!this.useDefaultStream) {
offset = this.currentOffset;
this.currentOffset += inserts.getSerializedRowsCount();
}
ApiFuture<AppendRowsResponse> response = writeStream.appendRows(offset, protoRows);
inflightWaitSecondsDistribution.update(writeStream.getInflightWaitSeconds());
if (writeStream.getInflightWaitSeconds() > 5) {
LOG.warn(
"Storage Api write delay more than {} seconds.",
writeStream.getInflightWaitSeconds());
}
return response;
} catch (Exception e) {
throw new RuntimeException(e);
}
},
contexts -> {
LOG.warn(
"Append to stream {} by client
streamName,
clientNumber,
retrieveErrorDetails(contexts));
invalidateWriteStream();
appendFailures.inc();
return RetryType.RETRY_ALL_OPERATIONS;
},
response -> {
recordsAppended.inc(protoRows.getSerializedRowsCount());
},
new Context<>());
maybeTickleCache();
}
String retrieveErrorDetails(Iterable<Context<AppendRowsResponse>> contexts) {
return StreamSupport.stream(contexts.spliterator(), false)
.map(ctx -> ctx.getError())
.map(
err ->
String.format(
"message: %s, stacktrace: %s",
err.toString(),
Lists.newArrayList(err.getStackTrace()).stream()
.map(se -> se.toString())
.collect(Collectors.joining("\n"))))
.collect(Collectors.joining(","));
}
} |
This change is caused by code formatting, and have nothing to do with the business logic of this PR. I have rolled it back to make this PR more focused. | public String getErrorRespWhenUnauthorized(AccessDeniedException accessDeniedException) {
if (Strings.isNullOrEmpty(accessDeniedException.getMessage())) {
ConnectContext context = ConnectContext.get();
if (context != null) {
AuthorizationMgr authorizationMgr = GlobalStateMgr.getCurrentState().getAuthorizationMgr();
UserIdentity userIdentity = context.getCurrentUserIdentity();
List<String> activatedRoles = authorizationMgr.getRoleNamesByRoleIds(context.getCurrentRoleIds());
List<String> inactivatedRoles =
authorizationMgr.getInactivatedRoleNamesByUser(userIdentity, activatedRoles);
return "Access denied for user " + userIdentity + ". " +
String.format(ErrorCode.ERR_ACCESS_DENIED_HINT_MSG_FORMAT, activatedRoles, inactivatedRoles);
}
return "Access denied.";
} else {
return accessDeniedException.getMessage();
}
} | return "Access denied for user " + userIdentity + ". " + | public String getErrorRespWhenUnauthorized(AccessDeniedException accessDeniedException) {
if (Strings.isNullOrEmpty(accessDeniedException.getMessage())) {
ConnectContext context = ConnectContext.get();
if (context != null) {
AuthorizationMgr authorizationMgr = GlobalStateMgr.getCurrentState().getAuthorizationMgr();
UserIdentity userIdentity = context.getCurrentUserIdentity();
List<String> activatedRoles = authorizationMgr.getRoleNamesByRoleIds(context.getCurrentRoleIds());
List<String> inactivatedRoles =
authorizationMgr.getInactivatedRoleNamesByUser(userIdentity, activatedRoles);
return "Access denied for user " + userIdentity + ". " +
String.format(ErrorCode.ERR_ACCESS_DENIED_HINT_MSG_FORMAT, activatedRoles, inactivatedRoles);
}
return "Access denied.";
} else {
return accessDeniedException.getMessage();
}
} | class RestBaseAction extends BaseAction {
private static final Logger LOG = LogManager.getLogger(RestBaseAction.class);
protected static final String CATALOG_KEY = "catalog";
protected static final String DB_KEY = "db";
protected static final String TABLE_KEY = "table";
protected static final String LABEL_KEY = "label";
protected static final String WAREHOUSE_KEY = "warehouse";
protected static final String PAGE_NUM_KEY = "page_num";
protected static final String PAGE_SIZE_KEY = "page_size";
protected static final int DEFAULT_PAGE_NUM = 0;
protected static final int DEFAULT_PAGE_SIZE = 100;
protected static final String JSON_CONTENT_TYPE = "application/json; charset=UTF-8";
protected static ObjectMapper mapper = new ObjectMapper();
public RestBaseAction(ActionController controller) {
super(controller);
}
@Override
public void handleRequest(BaseRequest request) {
BaseResponse response = new BaseResponse();
try {
execute(request, response);
} catch (AccessDeniedException accessDeniedException) {
LOG.warn("failed to process url: {}", request.getRequest().uri(), accessDeniedException);
response.updateHeader(HttpHeaderNames.WWW_AUTHENTICATE.toString(), "Basic realm=\"\"");
response.appendContent(new RestBaseResult(getErrorRespWhenUnauthorized(accessDeniedException)).toJson());
writeResponse(request, response, HttpResponseStatus.UNAUTHORIZED);
} catch (DdlException e) {
LOG.warn("fail to process url: {}", request.getRequest().uri(), e);
sendResult(request, response, new RestBaseResult(e.getMessage()));
} catch (Exception e) {
LOG.warn("fail to process url: {}", request.getRequest().uri(), e);
String msg = e.getMessage();
if (msg == null) {
msg = e.toString();
}
response.appendContent(new RestBaseResult(msg).toJson());
writeResponse(request, response, HttpResponseStatus.INTERNAL_SERVER_ERROR);
}
}
@VisibleForTesting
@Override
public void execute(BaseRequest request, BaseResponse response) throws DdlException, AccessDeniedException {
ActionAuthorizationInfo authInfo = getAuthorizationInfo(request);
UserIdentity currentUser = checkPassword(authInfo);
HttpConnectContext ctx = request.getConnectContext();
ctx.setGlobalStateMgr(GlobalStateMgr.getCurrentState());
ctx.setNettyChannel(request.getContext());
ctx.setQualifiedUser(authInfo.fullUserName);
ctx.setQueryId(UUIDUtil.genUUID());
ctx.setRemoteIP(authInfo.remoteIp);
ctx.setCurrentUserIdentity(currentUser);
ctx.setCurrentRoleIds(currentUser);
ctx.setThreadLocalInfo();
executeWithoutPassword(request, response);
}
protected void executeWithoutPassword(BaseRequest request, BaseResponse response)
throws DdlException, AccessDeniedException {
throw new DdlException("Not implemented");
}
public void sendResult(BaseRequest request, BaseResponse response, RestBaseResult result) {
sendResult(request, response, HttpResponseStatus.OK, result);
}
public void sendResult(BaseRequest request, BaseResponse response, HttpResponseStatus status) {
sendResult(request, response, status, null);
}
public void sendResult(BaseRequest request, BaseResponse response) {
sendResult(request, response, HttpResponseStatus.OK);
}
public void sendResult(BaseRequest request,
BaseResponse response,
HttpResponseStatus status,
RestBaseResult result) {
if (null != result) {
response.setContentType(JSON_CONTENT_TYPE);
response.appendContent(result.toJson());
}
writeResponse(request, response, status);
}
public void sendResultByJson(BaseRequest request, BaseResponse response, Object obj) {
String result = "";
try {
result = mapper.writeValueAsString(obj);
} catch (Exception e) {
}
response.setContentType(JSON_CONTENT_TYPE);
response.getContent().append(result);
sendResult(request, response);
}
public void redirectTo(BaseRequest request, BaseResponse response, TNetworkAddress addr)
throws DdlException {
String urlStr = request.getRequest().uri();
URI urlObj;
URI resultUriObj;
try {
urlObj = new URI(urlStr);
resultUriObj = new URI("http", null, addr.getHostname(),
addr.getPort(), urlObj.getPath(), urlObj.getQuery(), null);
} catch (URISyntaxException e) {
LOG.warn(e.getMessage());
throw new DdlException(e.getMessage());
}
response.updateHeader(HttpHeaderNames.LOCATION.toString(), resultUriObj.toString());
writeResponse(request, response, HttpResponseStatus.TEMPORARY_REDIRECT);
}
public boolean redirectToLeader(BaseRequest request, BaseResponse response) throws DdlException {
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
if (globalStateMgr.isLeader()) {
return false;
}
Pair<String, Integer> leaderIpAndPort = globalStateMgr.getNodeMgr().getLeaderIpAndHttpPort();
redirectTo(request, response,
new TNetworkAddress(leaderIpAndPort.first, leaderIpAndPort.second));
return true;
}
/**
* Get single parameter value.
*
* @param request http request
* @param paramName parameter name
* @param typeConverter convert the String parameter value to target type
* @return parameter value, or {@code null} if missing
*/
protected static <T> T getSingleParameter(BaseRequest request,
String paramName,
Function<String, T> typeConverter) {
return getSingleParameterOrDefault(request, paramName, null, typeConverter);
}
/**
* Get single parameter value.
*
* @param request http request
* @param paramName parameter name
* @param typeConverter convert the String parameter value to target type
* @return parameter value
* @throws StarRocksHttpException if parameter is missing
*/
protected static <T> T getSingleParameterRequired(BaseRequest request,
String paramName,
Function<String, T> typeConverter) {
String value = request.getSingleParameter(paramName);
if (null == value) {
throw new StarRocksHttpException(
HttpResponseStatus.BAD_REQUEST,
String.format("Missing parameter %s", paramName)
);
}
return typeConverter.apply(value);
}
/**
* Get single parameter value.
*
* @param request http request
* @param paramName parameter name
* @param defaultValue default parameter value if missing
* @param typeConverter convert the String parameter value to target type
* @return parameter value, or {@code defaultValue} if missing
*/
protected static <T> T getSingleParameterOrDefault(BaseRequest request,
String paramName,
T defaultValue,
Function<String, T> typeConverter) {
String value = request.getSingleParameter(paramName);
return Optional.ofNullable(value).map(typeConverter).orElse(defaultValue);
}
protected static int getPageNum(BaseRequest request) {
return getSingleParameterOrDefault(request, PAGE_NUM_KEY, DEFAULT_PAGE_NUM, value -> {
int pn = NumberUtils.toInt(value, DEFAULT_PAGE_NUM);
return pn <= 0 ? DEFAULT_PAGE_NUM : pn;
});
}
protected static int getPageSize(BaseRequest request) {
return getSingleParameterOrDefault(request, PAGE_SIZE_KEY, DEFAULT_PAGE_SIZE, value -> {
int ps = NumberUtils.toInt(value, DEFAULT_PAGE_SIZE);
return ps <= 0 ? DEFAULT_PAGE_SIZE : ps;
});
}
} | class RestBaseAction extends BaseAction {
private static final Logger LOG = LogManager.getLogger(RestBaseAction.class);
protected static final String CATALOG_KEY = "catalog";
protected static final String DB_KEY = "db";
protected static final String TABLE_KEY = "table";
protected static final String LABEL_KEY = "label";
protected static final String WAREHOUSE_KEY = "warehouse";
protected static final String PAGE_NUM_KEY = "page_num";
protected static final String PAGE_SIZE_KEY = "page_size";
protected static final int DEFAULT_PAGE_NUM = 0;
protected static final int DEFAULT_PAGE_SIZE = 100;
protected static final String JSON_CONTENT_TYPE = "application/json; charset=UTF-8";
protected static ObjectMapper mapper = new ObjectMapper();
public RestBaseAction(ActionController controller) {
super(controller);
}
@Override
public void handleRequest(BaseRequest request) {
BaseResponse response = new BaseResponse();
try {
execute(request, response);
} catch (AccessDeniedException accessDeniedException) {
LOG.warn("failed to process url: {}", request.getRequest().uri(), accessDeniedException);
response.updateHeader(HttpHeaderNames.WWW_AUTHENTICATE.toString(), "Basic realm=\"\"");
response.appendContent(new RestBaseResult(getErrorRespWhenUnauthorized(accessDeniedException)).toJson());
writeResponse(request, response, HttpResponseStatus.UNAUTHORIZED);
} catch (DdlException e) {
LOG.warn("fail to process url: {}", request.getRequest().uri(), e);
sendResult(request, response, new RestBaseResult(e.getMessage()));
} catch (Exception e) {
LOG.warn("fail to process url: {}", request.getRequest().uri(), e);
String msg = e.getMessage();
if (msg == null) {
msg = e.toString();
}
response.appendContent(new RestBaseResult(msg).toJson());
writeResponse(request, response, HttpResponseStatus.INTERNAL_SERVER_ERROR);
}
}
@VisibleForTesting
@Override
public void execute(BaseRequest request, BaseResponse response) throws DdlException, AccessDeniedException {
ActionAuthorizationInfo authInfo = getAuthorizationInfo(request);
UserIdentity currentUser = checkPassword(authInfo);
HttpConnectContext ctx = request.getConnectContext();
ctx.setGlobalStateMgr(GlobalStateMgr.getCurrentState());
ctx.setNettyChannel(request.getContext());
ctx.setQualifiedUser(authInfo.fullUserName);
ctx.setQueryId(UUIDUtil.genUUID());
ctx.setRemoteIP(authInfo.remoteIp);
ctx.setCurrentUserIdentity(currentUser);
ctx.setCurrentRoleIds(currentUser);
ctx.setThreadLocalInfo();
executeWithoutPassword(request, response);
}
protected void executeWithoutPassword(BaseRequest request, BaseResponse response)
throws DdlException, AccessDeniedException {
throw new DdlException("Not implemented");
}
public void sendResult(BaseRequest request, BaseResponse response, RestBaseResult result) {
sendResult(request, response, HttpResponseStatus.OK, result);
}
public void sendResult(BaseRequest request, BaseResponse response, HttpResponseStatus status) {
sendResult(request, response, status, null);
}
public void sendResult(BaseRequest request, BaseResponse response) {
sendResult(request, response, HttpResponseStatus.OK);
}
public void sendResult(BaseRequest request,
BaseResponse response,
HttpResponseStatus status,
RestBaseResult result) {
if (null != result) {
response.setContentType(JSON_CONTENT_TYPE);
response.appendContent(result.toJson());
}
writeResponse(request, response, status);
}
public void sendResultByJson(BaseRequest request, BaseResponse response, Object obj) {
String result = "";
try {
result = mapper.writeValueAsString(obj);
} catch (Exception e) {
}
response.setContentType(JSON_CONTENT_TYPE);
response.getContent().append(result);
sendResult(request, response);
}
public void redirectTo(BaseRequest request, BaseResponse response, TNetworkAddress addr)
throws DdlException {
String urlStr = request.getRequest().uri();
URI urlObj;
URI resultUriObj;
try {
urlObj = new URI(urlStr);
resultUriObj = new URI("http", null, addr.getHostname(),
addr.getPort(), urlObj.getPath(), urlObj.getQuery(), null);
} catch (URISyntaxException e) {
LOG.warn(e.getMessage());
throw new DdlException(e.getMessage());
}
response.updateHeader(HttpHeaderNames.LOCATION.toString(), resultUriObj.toString());
writeResponse(request, response, HttpResponseStatus.TEMPORARY_REDIRECT);
}
public boolean redirectToLeader(BaseRequest request, BaseResponse response) throws DdlException {
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
if (globalStateMgr.isLeader()) {
return false;
}
Pair<String, Integer> leaderIpAndPort = globalStateMgr.getNodeMgr().getLeaderIpAndHttpPort();
redirectTo(request, response,
new TNetworkAddress(leaderIpAndPort.first, leaderIpAndPort.second));
return true;
}
/**
* Get single parameter value.
*
* @param request http request
* @param paramName parameter name
* @param typeConverter convert the String parameter value to target type
* @return parameter value, or {@code null} if missing
*/
protected static <T> T getSingleParameter(BaseRequest request,
String paramName,
Function<String, T> typeConverter) {
return getSingleParameterOrDefault(request, paramName, null, typeConverter);
}
/**
* Get single parameter value.
*
* @param request http request
* @param paramName parameter name
* @param typeConverter convert the String parameter value to target type
* @return parameter value
* @throws StarRocksHttpException if parameter is missing
*/
protected static <T> T getSingleParameterRequired(BaseRequest request,
String paramName,
Function<String, T> typeConverter) {
String value = request.getSingleParameter(paramName);
if (null == value) {
throw new StarRocksHttpException(
HttpResponseStatus.BAD_REQUEST,
String.format("Missing parameter %s", paramName)
);
}
return typeConverter.apply(value);
}
/**
* Get single parameter value.
*
* @param request http request
* @param paramName parameter name
* @param defaultValue default parameter value if missing
* @param typeConverter convert the String parameter value to target type
* @return parameter value, or {@code defaultValue} if missing
*/
protected static <T> T getSingleParameterOrDefault(BaseRequest request,
String paramName,
T defaultValue,
Function<String, T> typeConverter) {
String value = request.getSingleParameter(paramName);
return Optional.ofNullable(value).map(typeConverter).orElse(defaultValue);
}
protected static int getPageNum(BaseRequest request) {
return getSingleParameterOrDefault(request, PAGE_NUM_KEY, DEFAULT_PAGE_NUM, value -> {
int pn = NumberUtils.toInt(value, DEFAULT_PAGE_NUM);
return pn <= 0 ? DEFAULT_PAGE_NUM : pn;
});
}
protected static int getPageSize(BaseRequest request) {
return getSingleParameterOrDefault(request, PAGE_SIZE_KEY, DEFAULT_PAGE_SIZE, value -> {
int ps = NumberUtils.toInt(value, DEFAULT_PAGE_SIZE);
return ps <= 0 ? DEFAULT_PAGE_SIZE : ps;
});
}
} |
the path could still refer to an object instead of a directory | private void deleteRecursively(Path path) throws IOException {
final FileStatus[] containingFiles =
Preconditions.checkNotNull(
listStatus(path),
"Hadoop FileSystem.listStatus should never return null based on its contract.");
if (containingFiles.length == 0) {
deleteObject(path);
return;
}
IOException exception = null;
for (FileStatus fileStatus : containingFiles) {
final Path childPath = fileStatus.getPath();
try {
if (fileStatus.isDir()) {
deleteRecursively(childPath);
} else {
deleteObject(childPath);
}
} catch (IOException e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
}
if (exception != null) {
throw exception;
}
} | IOException exception = null; | private void deleteRecursively(Path path) throws IOException {
final FileStatus[] containingFiles =
Preconditions.checkNotNull(
listStatus(path),
"Hadoop FileSystem.listStatus should never return null based on its contract.");
if (containingFiles.length == 0) {
deleteObject(path);
return;
}
IOException exception = null;
for (FileStatus fileStatus : containingFiles) {
final Path childPath = fileStatus.getPath();
try {
if (fileStatus.isDir()) {
deleteRecursively(childPath);
} else {
deleteObject(childPath);
}
} catch (IOException e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
}
if (exception != null) {
throw exception;
}
} | class FlinkS3PrestoFileSystem extends FlinkS3FileSystem {
public FlinkS3PrestoFileSystem(
FileSystem hadoopS3FileSystem,
String localTmpDirectory,
@Nullable String entropyInjectionKey,
int entropyLength,
@Nullable S3AccessHelper s3UploadHelper,
long s3uploadPartSize,
int maxConcurrentUploadsPerStream) {
super(
hadoopS3FileSystem,
localTmpDirectory,
entropyInjectionKey,
entropyLength,
s3UploadHelper,
s3uploadPartSize,
maxConcurrentUploadsPerStream);
}
@Override
public boolean delete(Path path, boolean recursive) throws IOException {
if (recursive) {
deleteRecursively(path);
} else {
deleteObject(path);
}
return true;
}
/**
* Deletes the object referenced by the passed {@code path}.
*
* @param path The path referring to the object that shall be deleted.
* @throws IOException if an error occurred while deleting the file other than the {@code path}
* referring to a non-empty directory.
*/
private void deleteObject(Path path) throws IOException {
if (!exists(path)) {
return;
}
boolean success = true;
IOException actualException = null;
try {
success = super.delete(path, false);
} catch (IOException e) {
actualException = e;
}
if (!success || actualException != null) {
if (exists(path)) {
throw Optional.ofNullable(actualException)
.orElse(
new IOException(
path.getPath()
+ " could not be deleted for unknown reasons."));
}
}
}
} | class FlinkS3PrestoFileSystem extends FlinkS3FileSystem {
public FlinkS3PrestoFileSystem(
FileSystem hadoopS3FileSystem,
String localTmpDirectory,
@Nullable String entropyInjectionKey,
int entropyLength,
@Nullable S3AccessHelper s3UploadHelper,
long s3uploadPartSize,
int maxConcurrentUploadsPerStream) {
super(
hadoopS3FileSystem,
localTmpDirectory,
entropyInjectionKey,
entropyLength,
s3UploadHelper,
s3uploadPartSize,
maxConcurrentUploadsPerStream);
}
@Override
public boolean delete(Path path, boolean recursive) throws IOException {
if (recursive) {
deleteRecursively(path);
} else {
deleteObject(path);
}
return true;
}
/**
* Deletes the object referenced by the passed {@code path}. This method is used to work around
* the fact that Presto doesn't allow us to differentiate between deleting a non-existing object
* and some other errors. Therefore, a final check for existence is necessary in case of an
* error or false return value.
*
* @param path The path referring to the object that shall be deleted.
* @throws IOException if an error occurred while deleting the file other than the {@code path}
* referring to a non-empty directory.
*/
private void deleteObject(Path path) throws IOException {
boolean success = true;
IOException actualException = null;
try {
success = super.delete(path, false);
} catch (IOException e) {
actualException = e;
}
if (!success || actualException != null) {
if (exists(path)) {
throw Optional.ofNullable(actualException)
.orElse(
new IOException(
path.getPath()
+ " could not be deleted for unknown reasons."));
}
}
}
} |
```suggestion private static final Map<Byte, TransactionState> BYTE_TO_STATE = Arrays.stream(TransactionState.values()) .collect(Collectors.toMap(e -> e.state, e -> e)); static TransactionState fromByte(byte state) { TransactionState transactionState = BYTE_TO_STATE.get(state); if (transactionState == null) { throw new IllegalArgumentException( String.format("The given state %s is not supported.", state)); } return transactionState; } ``` | private static byte readTransactionState(ByteBuffer buffer) {
buffer.getLong();
buffer.getShort();
buffer.getInt();
return buffer.get();
} | private static byte readTransactionState(ByteBuffer buffer) {
buffer.getLong();
buffer.getShort();
buffer.getInt();
return buffer.get();
} | class KafkaTransactionLog implements AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(KafkaTransactionLog.class);
private static final Duration CONSUMER_POLL_DURATION = Duration.ofSeconds(1);
private static final Set<TransactionState> TERMINAL_TRANSACTION_STATES =
ImmutableSet.of(CompleteCommit, CompleteAbort, Dead);
private static final String TRANSACTIONAL_ID_DELIMITER = "-";
private static final int SUPPORTED_KAFKA_SCHEMA_VERSION = 0;
private final KafkaConsumer<byte[], byte[]> consumer;
private final KafkaWriterState main;
private final int numberOfParallelSubtasks;
private final List<KafkaWriterState> combined;
private final OptionalInt minOthers;
/**
* Constructor creating a KafkaTransactionLog.
*
* @param kafkaConfig used to configure the {@link KafkaConsumer} to query the topic containing
* the transaction information
* @param main the {@link KafkaWriterState} which was previously snapshotted by this subtask
* @param others the {@link KafkaWriterState}s which are from different subtasks i.e. in case of
* a scale-in
* @param numberOfParallelSubtasks current number of parallel sink tasks
*/
KafkaTransactionLog(
Properties kafkaConfig,
KafkaWriterState main,
List<KafkaWriterState> others,
int numberOfParallelSubtasks) {
this.main = checkNotNull(main, "mainState");
checkNotNull(others, "othersState");
this.minOthers =
others.stream()
.map(KafkaWriterState::getSubtaskId)
.mapToInt(Integer::intValue)
.min();
this.combined =
new ImmutableList.Builder<KafkaWriterState>().add(main).addAll(others).build();
this.numberOfParallelSubtasks = numberOfParallelSubtasks;
final Properties copy = new Properties();
copy.putAll(checkNotNull(kafkaConfig, "kafkaConfig"));
copy.put("enable.auto.commit", false);
copy.put("key.deserializer", ByteArrayDeserializer.class.getName());
copy.put(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString());
copy.put("auto.offset.reset", "earliest");
copy.put("value.deserializer", ByteArrayDeserializer.class.getName());
copy.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed");
this.consumer = new KafkaConsumer<>(copy);
this.consumer.subscribe(ImmutableList.of(TRANSACTION_STATE_TOPIC_NAME));
}
/**
* This method queries Kafka's internal transaction topic and filters the transactions for the
* following rules.
* <li>transaction is in no terminal state {@link
* KafkaTransactionLog
* <li>transactionalIdPrefix equals the one from {@link
* <li>Either [transaction's subtaskId % {@link
* subtaskId] or [transaction's subtaskId == {@link
* checkpointOffset >= {@link
* {@link
*
* @return all transactionIds which must be aborted before starting new transactions.
*/
public Set<String> getTransactionsToAbort() {
ConsumerRecords<byte[], byte[]> records = consumer.poll(CONSUMER_POLL_DURATION);
final Set<String> openTransactions = new HashSet<>();
while (!records.isEmpty()) {
records.records(TRANSACTION_STATE_TOPIC_NAME)
.forEach(maybeAddTransaction(openTransactions));
records = consumer.poll(CONSUMER_POLL_DURATION);
}
return openTransactions;
}
/**
* Constructs a transactionalId with the following format {@code
* transactionalIdPrefix-subtaskId-offset}.
*
* @param transactionalIdPrefix prefix for the id
* @param subtaskId describing the subtask which is opening the transaction
* @param offset an always incrementing number usually capturing the number of checkpoints taken
* by the subtask
* @return transactionalId
*/
public static String buildTransactionalId(
String transactionalIdPrefix, int subtaskId, long offset) {
final StringBuilder sb = new StringBuilder();
return sb.append(transactionalIdPrefix)
.append(TRANSACTIONAL_ID_DELIMITER)
.append(subtaskId)
.append(TRANSACTIONAL_ID_DELIMITER)
.append(offset)
.toString();
}
private Consumer<ConsumerRecord<byte[], byte[]>> maybeAddTransaction(
Set<String> openTransactions) {
return record -> {
final ByteBuffer keyBuffer = ByteBuffer.wrap(record.key());
checkKafkaSchemaVersionMatches(keyBuffer);
keyBuffer.getShort();
final String transactionalId = StandardCharsets.US_ASCII.decode(keyBuffer).toString();
final ByteBuffer valueBuffer = ByteBuffer.wrap(record.value());
checkKafkaSchemaVersionMatches(valueBuffer);
final TransactionState state =
TransactionState.fromByte(readTransactionState(valueBuffer));
LOG.debug("Transaction {} is in state {}", transactionalId, state);
if (isTransactionInFinalState(state)) {
openTransactions.remove(transactionalId);
return;
}
final Optional<KafkaWriterState> openTransactionOpt =
fromTransactionalId(transactionalId);
if (!openTransactionOpt.isPresent()) {
return;
}
final KafkaWriterState openTransaction = openTransactionOpt.get();
if (!openTransaction
.getTransactionalIdPrefix()
.equals(main.getTransactionalIdPrefix())) {
LOG.debug(
"The transactionalId prefixes differ. Open: {}, Recovered: {}",
openTransaction.getTransactionalIdPrefix(),
main.getTransactionalIdPrefix());
return;
}
final int openSubtaskIndex = openTransaction.getSubtaskId();
final long openCheckpointOffset = openTransaction.getTransactionalIdOffset();
final boolean commonRemainder =
openSubtaskIndex % numberOfParallelSubtasks == main.getSubtaskId()
&& openSubtaskIndex != main.getSubtaskId();
final boolean sameSubtaskHigherCheckpoint =
hasSameSubtaskWithHigherCheckpoint(openSubtaskIndex, openCheckpointOffset);
final boolean higherSubtasksWithFirstCheckpoint =
minOthers.isPresent()
&& minOthers.getAsInt() < openSubtaskIndex
&& openCheckpointOffset == 0;
if (!commonRemainder
&& !sameSubtaskHigherCheckpoint
&& !higherSubtasksWithFirstCheckpoint) {
return;
}
openTransactions.add(transactionalId);
};
}
@Override
public void close() {
consumer.close();
}
private boolean hasSameSubtaskWithHigherCheckpoint(
int openSubtaskIndex, long openCheckpointOffset) {
return combined.stream()
.anyMatch(
state ->
state.getSubtaskId() == openSubtaskIndex
&& state.getTransactionalIdOffset()
<= openCheckpointOffset);
}
private static boolean isTransactionInFinalState(TransactionState state) {
return TERMINAL_TRANSACTION_STATES.contains(state);
}
private static Optional<KafkaWriterState> fromTransactionalId(String transactionalId) {
final String[] splits = transactionalId.split(TRANSACTIONAL_ID_DELIMITER);
if (splits.length < 3) {
LOG.debug("Transaction {} was not created by the Flink Kafka sink", transactionalId);
return Optional.empty();
}
try {
final long checkpointOffset = Long.parseLong(splits[splits.length - 1]);
final int subtaskId = Integer.parseInt(splits[splits.length - 2]);
return Optional.of(
new KafkaWriterState(
String.join(
TRANSACTIONAL_ID_DELIMITER,
Arrays.copyOfRange(splits, 0, splits.length - 2)),
subtaskId,
checkpointOffset));
} catch (NumberFormatException e) {
LOG.debug(
"Transaction {} was not created by the Flink Kafka sink: {}",
transactionalId,
e);
return Optional.empty();
}
}
enum TransactionState {
Empty(Byte.parseByte("0")),
Ongoing(Byte.parseByte("1")),
PrepareCommit(Byte.parseByte("2")),
PrepareAbort(Byte.parseByte("3")),
CompleteCommit(Byte.parseByte("4")),
CompleteAbort(Byte.parseByte("5")),
Dead(Byte.parseByte("6")),
PrepareEpochFence(Byte.parseByte("7"));
private final byte state;
TransactionState(byte state) {
this.state = state;
}
static TransactionState fromByte(byte state) {
return Arrays.stream(TransactionState.values())
.filter((value) -> value.state == state)
.findFirst()
.orElseThrow(
() ->
new IllegalArgumentException(
String.format(
"The given state %s is not supported.",
state)));
}
}
private static void checkKafkaSchemaVersionMatches(ByteBuffer buffer) {
final short version = buffer.getShort();
if (version != SUPPORTED_KAFKA_SCHEMA_VERSION) {
throw new IllegalStateException(
String.format(
"Kafka has changed the schema version from %s to %s",
SUPPORTED_KAFKA_SCHEMA_VERSION, version));
}
}
} | class KafkaTransactionLog implements AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(KafkaTransactionLog.class);
private static final Duration CONSUMER_POLL_DURATION = Duration.ofSeconds(1);
private static final Set<TransactionState> TERMINAL_TRANSACTION_STATES =
ImmutableSet.of(CompleteCommit, CompleteAbort, Dead);
private static final int SUPPORTED_KAFKA_SCHEMA_VERSION = 0;
private final KafkaConsumer<byte[], byte[]> consumer;
private final KafkaWriterState main;
private final TransactionsToAbortChecker transactionToAbortChecker;
/**
* Constructor creating a KafkaTransactionLog.
*
* @param kafkaConfig used to configure the {@link KafkaConsumer} to query the topic containing
* the transaction information
* @param main the {@link KafkaWriterState} which was previously snapshotted by this subtask
* @param others the {@link KafkaWriterState}s which are from different subtasks i.e. in case of
* a scale-in
* @param numberOfParallelSubtasks current number of parallel sink tasks
*/
KafkaTransactionLog(
Properties kafkaConfig,
KafkaWriterState main,
List<KafkaWriterState> others,
int numberOfParallelSubtasks) {
this.main = checkNotNull(main, "mainState");
checkNotNull(others, "othersState");
final Map<Integer, Long> subtaskIdCheckpointOffsetMapping =
new ImmutableList.Builder<KafkaWriterState>()
.add(main).addAll(others).build().stream()
.collect(
Collectors.toMap(
KafkaWriterState::getSubtaskId,
KafkaWriterState::getTransactionalIdOffset));
final Properties consumerConfig = new Properties();
consumerConfig.putAll(checkNotNull(kafkaConfig, "kafkaConfig"));
consumerConfig.put("enable.auto.commit", false);
consumerConfig.put("key.deserializer", ByteArrayDeserializer.class.getName());
consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString());
consumerConfig.put("auto.offset.reset", "earliest");
consumerConfig.put("value.deserializer", ByteArrayDeserializer.class.getName());
consumerConfig.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed");
this.transactionToAbortChecker =
new TransactionsToAbortChecker(
numberOfParallelSubtasks,
subtaskIdCheckpointOffsetMapping,
main.getSubtaskId());
this.consumer = new KafkaConsumer<>(consumerConfig);
this.consumer.subscribe(ImmutableList.of(TRANSACTION_STATE_TOPIC_NAME));
}
/**
* This method queries Kafka's internal transaction topic and filters the transactions for the
* following rules.
* <li>transaction is in no terminal state {@link
* KafkaTransactionLog
* <li>transactionalIdPrefix equals the one from {@link
* <li>Applies the rules from {@link TransactionsToAbortChecker}
*
* @return all transactionIds which must be aborted before starting new transactions.
*/
public List<String> getTransactionsToAbort() {
final Map<Integer, Map<Long, String>> openTransactions = new HashMap<>();
final List<TopicPartition> partitions = getAllPartitions();
final Map<TopicPartition, Long> endOffsets = consumer.endOffsets(partitions);
do {
ConsumerRecords<byte[], byte[]> records = consumer.poll(CONSUMER_POLL_DURATION);
records.records(TRANSACTION_STATE_TOPIC_NAME)
.forEach(maybeAddTransaction(openTransactions));
} while (!hasReadAllRecords(endOffsets, partitions));
return transactionToAbortChecker.getTransactionsToAbort(openTransactions);
}
private boolean hasReadAllRecords(
Map<TopicPartition, Long> endOffsets, List<TopicPartition> partitions) {
final ListIterator<TopicPartition> it = partitions.listIterator();
while (it.hasNext()) {
final TopicPartition partition = it.next();
final long endOffset = endOffsets.get(partition);
if (endOffset == 0 || consumer.position(partition) >= endOffset) {
it.remove();
continue;
}
return false;
}
return true;
}
private List<TopicPartition> getAllPartitions() {
final List<PartitionInfo> partitionInfos =
consumer.partitionsFor(TRANSACTION_STATE_TOPIC_NAME);
return partitionInfos.stream()
.filter(info -> info.topic().equals(TRANSACTION_STATE_TOPIC_NAME))
.map(info -> new TopicPartition(info.topic(), info.partition()))
.collect(Collectors.toList());
}
private Consumer<ConsumerRecord<byte[], byte[]>> maybeAddTransaction(
Map<Integer, Map<Long, String>> openTransactions) {
return record -> {
final ByteBuffer keyBuffer = ByteBuffer.wrap(record.key());
checkKafkaSchemaVersionMatches(keyBuffer);
keyBuffer.getShort();
final String transactionalId = StandardCharsets.UTF_8.decode(keyBuffer).toString();
final Optional<KafkaWriterState> openTransactionOpt =
TransactionalIdFactory.parseKafkaWriterState(transactionalId);
if (!openTransactionOpt.isPresent()) {
return;
}
final KafkaWriterState openTransaction = openTransactionOpt.get();
if (!openTransaction
.getTransactionalIdPrefix()
.equals(main.getTransactionalIdPrefix())) {
LOG.debug(
"The transactionalId prefixes differ. Open: {}, Recovered: {}",
openTransaction.getTransactionalIdPrefix(),
main.getTransactionalIdPrefix());
return;
}
final ByteBuffer valueBuffer = ByteBuffer.wrap(record.value());
checkKafkaSchemaVersionMatches(valueBuffer);
final TransactionState state =
TransactionState.fromByte(readTransactionState(valueBuffer));
LOG.debug("Transaction {} is in state {}", transactionalId, state);
final int openSubtaskIndex = openTransaction.getSubtaskId();
final long openCheckpointOffset = openTransaction.getTransactionalIdOffset();
if (isTransactionInFinalState(state)) {
openTransactions.get(openSubtaskIndex).remove(openCheckpointOffset);
return;
}
if (openTransactions.containsKey(openSubtaskIndex)) {
openTransactions.get(openSubtaskIndex).put(openCheckpointOffset, transactionalId);
} else {
final Map<Long, String> map = new HashMap<>();
map.put(openCheckpointOffset, transactionalId);
openTransactions.put(openSubtaskIndex, map);
}
};
}
@Override
public void close() {
consumer.close();
}
private static boolean isTransactionInFinalState(TransactionState state) {
return TERMINAL_TRANSACTION_STATES.contains(state);
}
enum TransactionState {
Empty(Byte.parseByte("0")),
Ongoing(Byte.parseByte("1")),
PrepareCommit(Byte.parseByte("2")),
PrepareAbort(Byte.parseByte("3")),
CompleteCommit(Byte.parseByte("4")),
CompleteAbort(Byte.parseByte("5")),
Dead(Byte.parseByte("6")),
PrepareEpochFence(Byte.parseByte("7"));
private static final Map<Byte, TransactionState> BYTE_TO_STATE =
Arrays.stream(TransactionState.values())
.collect(Collectors.toMap(e -> e.state, e -> e));
private final byte state;
TransactionState(byte state) {
this.state = state;
}
static TransactionState fromByte(byte state) {
final TransactionState transactionState = BYTE_TO_STATE.get(state);
if (transactionState == null) {
throw new IllegalArgumentException(
String.format("The given state %s is not supported.", state));
}
return transactionState;
}
}
private static void checkKafkaSchemaVersionMatches(ByteBuffer buffer) {
final short version = buffer.getShort();
if (version != SUPPORTED_KAFKA_SCHEMA_VERSION) {
throw new IllegalStateException(
String.format(
"Kafka has changed the schema version from %s to %s",
SUPPORTED_KAFKA_SCHEMA_VERSION, version));
}
}
} | |
Can we share code between WriteWithResult and WriteVoid ? This introduces a significant amount of code duplication. | public PCollection<Void> expand(PCollection<T> input) {
WriteVoid<T> spec = this;
checkArgument(
(spec.getDataSourceProviderFn() != null),
"withDataSourceConfiguration() or withDataSourceProviderFn() is required");
if (input.hasSchema() && !spec.hasStatementAndSetter()) {
checkArgument(spec.getTable() != null, "table cannot be null if statement is not provided");
List<SchemaUtil.FieldWithIndex> fields = spec.getFilteredFields(input.getSchema());
spec =
spec.toBuilder()
.setStatement(spec.generateStatement(fields))
.setPreparedStatementSetter(
new AutoGeneratedPreparedStatementSetter(fields, input.getToRowFunction()))
.build();
} else {
checkArgument(spec.getStatement() != null, "withStatement() is required");
checkArgument(
spec.getPreparedStatementSetter() != null, "withPreparedStatementSetter() is required");
}
PCollection<Iterable<T>> iterables;
if (input.isBounded() == IsBounded.UNBOUNDED
&& getAutoSharding() != null
&& getAutoSharding()) {
iterables =
input
.apply(WithKeys.<String, T>of(""))
.apply(
GroupIntoBatches.<String, T>ofSize(DEFAULT_BATCH_SIZE)
.withMaxBufferingDuration(Duration.millis(200))
.withShardedKey())
.apply(Values.create());
} else {
iterables =
input.apply(
ParDo.of(
new DoFn<T, Iterable<T>>() {
List<T> outputList;
@ProcessElement
public void process(ProcessContext c) {
if (outputList == null) {
outputList = new ArrayList<>();
}
outputList.add(c.element());
}
@FinishBundle
public void finish(FinishBundleContext c) {
c.output(outputList, Instant.now(), GlobalWindow.INSTANCE);
outputList = null;
}
}));
}
return iterables
.apply(
ParDo.of(
new WriteFn<T, Void>(
WriteFnSpec.builder()
.setRetryConfiguration(spec.getRetryConfiguration())
.setRetryStrategy(spec.getRetryStrategy())
.setPreparedStatementSetter(spec.getPreparedStatementSetter())
.setDataSourceProviderFn(spec.getDataSourceProviderFn())
.setTable(spec.getTable())
.setStatement(spec.getStatement())
.setBatchSize(spec.getBatchSize())
.setReturnResults(false)
.build())))
.setCoder(VoidCoder.of());
} | PCollection<Iterable<T>> iterables; | public PCollection<Void> expand(PCollection<T> input) {
WriteVoid<T> spec = this;
checkArgument(
(spec.getDataSourceProviderFn() != null),
"withDataSourceConfiguration() or withDataSourceProviderFn() is required");
if (input.hasSchema() && !spec.hasStatementAndSetter()) {
checkArgument(spec.getTable() != null, "table cannot be null if statement is not provided");
List<SchemaUtil.FieldWithIndex> fields = spec.getFilteredFields(input.getSchema());
spec =
spec.toBuilder()
.setStatement(spec.generateStatement(fields))
.setPreparedStatementSetter(
new AutoGeneratedPreparedStatementSetter(fields, input.getToRowFunction()))
.build();
} else {
checkArgument(spec.getStatement() != null, "withStatement() is required");
checkArgument(
spec.getPreparedStatementSetter() != null, "withPreparedStatementSetter() is required");
}
PCollection<Iterable<T>> iterables = JdbcIO.<T>batchElements(input, getAutoSharding());
return iterables
.apply(
ParDo.of(
new WriteFn<T, Void>(
WriteFnSpec.builder()
.setRetryConfiguration(spec.getRetryConfiguration())
.setRetryStrategy(spec.getRetryStrategy())
.setPreparedStatementSetter(spec.getPreparedStatementSetter())
.setDataSourceProviderFn(spec.getDataSourceProviderFn())
.setTable(spec.getTable())
.setStatement(spec.getStatement())
.setBatchSize(spec.getBatchSize())
.setReturnResults(false)
.build())))
.setCoder(VoidCoder.of());
} | class Builder<T> {
abstract Builder<T> setAutoSharding(Boolean autoSharding);
abstract Builder<T> setDataSourceProviderFn(
SerializableFunction<Void, DataSource> dataSourceProviderFn);
abstract Builder<T> setStatement(ValueProvider<String> statement);
abstract Builder<T> setBatchSize(long batchSize);
abstract Builder<T> setPreparedStatementSetter(PreparedStatementSetter<T> setter);
abstract Builder<T> setRetryStrategy(RetryStrategy deadlockPredicate);
abstract Builder<T> setRetryConfiguration(RetryConfiguration retryConfiguration);
abstract Builder<T> setTable(String table);
abstract WriteVoid<T> build();
} | class Builder<T> {
abstract Builder<T> setAutoSharding(Boolean autoSharding);
abstract Builder<T> setDataSourceProviderFn(
SerializableFunction<Void, DataSource> dataSourceProviderFn);
abstract Builder<T> setStatement(ValueProvider<String> statement);
abstract Builder<T> setBatchSize(long batchSize);
abstract Builder<T> setPreparedStatementSetter(PreparedStatementSetter<T> setter);
abstract Builder<T> setRetryStrategy(RetryStrategy deadlockPredicate);
abstract Builder<T> setRetryConfiguration(RetryConfiguration retryConfiguration);
abstract Builder<T> setTable(String table);
abstract WriteVoid<T> build();
} |
Yes, you are right. I forgot that the code is waiting on the latch there. | public void taskCachedThreadPoolAllowsForSynchronousCheckpoints() throws Exception {
final Task task = createTask(SynchronousCheckpointTestingTask.class);
try (TaskCleaner ignored = new TaskCleaner(task)) {
task.startTaskThread();
executionLatch.await();
assertEquals(ExecutionState.RUNNING, task.getExecutionState());
assertEquals(CheckpointingState.NONE, synchronousCheckpointPhase.getState());
task.triggerCheckpointBarrier(
42,
156865867234L,
new CheckpointOptions(CheckpointType.SYNC_SAVEPOINT, CheckpointStorageLocationReference.getDefault()),
true);
checkpointLatch.await();
assertNull(error.get());
assertEquals(CheckpointingState.PERFORMING_CHECKPOINT, synchronousCheckpointPhase.getState());
checkpointTriggered.await();
task.notifyCheckpointComplete(42);
notifyLatch.await();
assertNull(error.get());
assertEquals(CheckpointingState.EXECUTED_CALLBACK, synchronousCheckpointPhase.getState());
checkpointCompletionLatch.trigger();
checkpointLatch.await();
assertNull(error.get());
assertEquals(CheckpointingState.FINISHED_CHECKPOINT, synchronousCheckpointPhase.getState());
assertEquals(ExecutionState.RUNNING, task.getExecutionState());
}
} | task.startTaskThread(); | public void taskCachedThreadPoolAllowsForSynchronousCheckpoints() throws Exception {
final Task task = createTask(SynchronousCheckpointTestingTask.class);
try (TaskCleaner ignored = new TaskCleaner(task)) {
task.startTaskThread();
assertThat(eventQueue.take(), is(Event.TASK_IS_RUNNING));
assertTrue(eventQueue.isEmpty());
assertEquals(ExecutionState.RUNNING, task.getExecutionState());
task.triggerCheckpointBarrier(
42,
156865867234L,
new CheckpointOptions(CheckpointType.SYNC_SAVEPOINT, CheckpointStorageLocationReference.getDefault()),
true);
assertThat(eventQueue.take(), is(Event.PRE_TRIGGER_CHECKPOINT));
assertTrue(eventQueue.isEmpty());
checkpointTriggered.await();
task.notifyCheckpointComplete(42);
assertThat(eventQueue.take(), is(Event.PRE_NOTIFY_CHECKPOINT_COMPLETE));
assertThat(
Arrays.asList(eventQueue.take(), eventQueue.take()),
containsInAnyOrder(Event.POST_NOTIFY_CHECKPOINT_COMPLETE, Event.POST_TRIGGER_CHECKPOINT));
assertTrue(eventQueue.isEmpty());
assertEquals(ExecutionState.RUNNING, task.getExecutionState());
}
} | class SynchronousCheckpointITCase {
private static OneShotLatch executionLatch;
private static OneShotLatch cancellationLatch;
private static OneShotLatch checkpointCompletionLatch;
private static OneShotLatch notifyLatch;
private static OneShotLatch checkpointTriggered = new OneShotLatch();
private static MultiShotLatch checkpointLatch;
private static AtomicReference<Throwable> error = new AtomicReference<>();
private static volatile CheckpointingStateHolder synchronousCheckpointPhase = new CheckpointingStateHolder();
@Before
public void initializeLatchesAndError() {
executionLatch = new OneShotLatch();
cancellationLatch = new OneShotLatch();
checkpointCompletionLatch = new OneShotLatch();
notifyLatch = new OneShotLatch();
checkpointLatch = new MultiShotLatch();
synchronousCheckpointPhase.setState(CheckpointingState.NONE);
error.set(null);
}
@Rule
public final Timeout timeoutPerTest = Timeout.seconds(7);
@Test
/**
* A {@link StreamTask} which makes sure that the different phases of a synchronous checkpoint
* are reflected in the {@link SynchronousCheckpointITCase
*/
public static class SynchronousCheckpointTestingTask extends StreamTask {
public SynchronousCheckpointTestingTask(Environment environment) {
super(environment);
}
@Override
protected void performDefaultAction(ActionContext context) throws Exception {
executionLatch.trigger();
cancellationLatch.await();
context.allActionsCompleted();
}
@Override
protected void cancelTask() {
cancellationLatch.trigger();
}
@Override
public boolean triggerCheckpoint(CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions, boolean advanceToEndOfEventTime) throws Exception {
SynchronousCheckpointITCase.synchronousCheckpointPhase.setState(CheckpointingState.PERFORMING_CHECKPOINT);
checkpointLatch.trigger();
super.triggerCheckpoint(checkpointMetaData, checkpointOptions, advanceToEndOfEventTime);
checkpointCompletionLatch.await();
SynchronousCheckpointITCase.synchronousCheckpointPhase.setState(CheckpointingState.FINISHED_CHECKPOINT);
checkpointLatch.trigger();
return true;
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
SynchronousCheckpointITCase.synchronousCheckpointPhase.setState(CheckpointingState.EXECUTING_CALLBACK);
super.notifyCheckpointComplete(checkpointId);
SynchronousCheckpointITCase.synchronousCheckpointPhase.setState(CheckpointingState.EXECUTED_CALLBACK);
notifyLatch.trigger();
}
@Override
protected void init() {
}
@Override
public void triggerCheckpointOnBarrier(CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions, CheckpointMetrics checkpointMetrics) {
throw new UnsupportedOperationException("Should not be called");
}
@Override
public void abortCheckpointOnBarrier(long checkpointId, Throwable cause) {
throw new UnsupportedOperationException("Should not be called");
}
@Override
protected void cleanup() {
}
@Override
protected void advanceToEndOfEventTime() throws Exception {
checkpointTriggered.trigger();
}
}
/**
* The different state transitions during a synchronous checkpoint along with their expected previous state.
*/
private enum CheckpointingState {
NONE(null),
PERFORMING_CHECKPOINT(NONE),
EXECUTING_CALLBACK(PERFORMING_CHECKPOINT),
EXECUTED_CALLBACK(EXECUTING_CALLBACK),
FINISHED_CHECKPOINT(EXECUTED_CALLBACK);
private final CheckpointingState expectedPreviousState;
CheckpointingState(final CheckpointingState previousState) {
this.expectedPreviousState = previousState;
}
void checkValidStateTransition(final CheckpointingState actualPreviousState) {
if (this.expectedPreviousState != actualPreviousState) {
error.set(new AssertionError());
}
}
}
/**
* A container holding the current {@link CheckpointingState}.
*/
private static final class CheckpointingStateHolder {
private volatile CheckpointingState checkpointingState = null;
void setState(CheckpointingState state) {
state.checkValidStateTransition(checkpointingState);
checkpointingState = state;
}
CheckpointingState getState() {
return checkpointingState;
}
}
private Task createTask(Class<? extends AbstractInvokable> invokableClass) throws Exception {
BlobCacheService blobService =
new BlobCacheService(mock(PermanentBlobCache.class), mock(TransientBlobCache.class));
LibraryCacheManager libCache = mock(LibraryCacheManager.class);
when(libCache.getClassLoader(any(JobID.class))).thenReturn(ClassLoader.getSystemClassLoader());
ResultPartitionConsumableNotifier consumableNotifier = new NoOpResultPartitionConsumableNotifier();
PartitionProducerStateChecker partitionProducerStateChecker = mock(PartitionProducerStateChecker.class);
Executor executor = mock(Executor.class);
NetworkEnvironment networkEnvironment = new NetworkEnvironmentBuilder().build();
TaskMetricGroup taskMetricGroup = UnregisteredMetricGroups.createUnregisteredTaskMetricGroup();
JobInformation jobInformation = new JobInformation(
new JobID(),
"Job Name",
new SerializedValue<>(new ExecutionConfig()),
new Configuration(),
Collections.emptyList(),
Collections.emptyList());
TaskInformation taskInformation = new TaskInformation(
new JobVertexID(),
"Test Task",
1,
1,
invokableClass.getName(),
new Configuration());
return new Task(
jobInformation,
taskInformation,
new ExecutionAttemptID(),
new AllocationID(),
0,
0,
Collections.<ResultPartitionDeploymentDescriptor>emptyList(),
Collections.<InputGateDeploymentDescriptor>emptyList(),
0,
mock(MemoryManager.class),
mock(IOManager.class),
networkEnvironment,
new KvStateService(new KvStateRegistry(), null, null),
mock(BroadcastVariableManager.class),
new TaskEventDispatcher(),
new TestTaskStateManager(),
mock(TaskManagerActions.class),
mock(InputSplitProvider.class),
mock(CheckpointResponder.class),
new TestGlobalAggregateManager(),
blobService,
libCache,
mock(FileCache.class),
new TestingTaskManagerRuntimeInfo(),
taskMetricGroup,
consumableNotifier,
partitionProducerStateChecker,
executor);
}
private static class TaskCleaner implements AutoCloseable {
private final Task task;
private TaskCleaner(Task task) {
this.task = task;
}
@Override
public void close() throws Exception {
task.cancelExecution();
task.getExecutingThread().join(5000);
}
}
} | class SynchronousCheckpointITCase {
private static OneShotLatch checkpointTriggered = new OneShotLatch();
private static LinkedBlockingQueue<Event> eventQueue = new LinkedBlockingQueue<>();
@Rule
public final Timeout timeoutPerTest = Timeout.seconds(10);
@Test
/**
* A {@link StreamTask} which makes sure that the different phases of a synchronous checkpoint
* are reflected in the {@link SynchronousCheckpointITCase
*/
public static class SynchronousCheckpointTestingTask extends StreamTask {
private boolean isRunning;
public SynchronousCheckpointTestingTask(Environment environment) {
super(environment);
}
@Override
protected void performDefaultAction(ActionContext context) throws Exception {
if (!isRunning) {
isRunning = true;
eventQueue.put(Event.TASK_IS_RUNNING);
}
if (isCanceled()) {
context.allActionsCompleted();
} else {
context.actionsUnavailable();
}
}
@Override
protected void cancelTask() {
}
@Override
public boolean triggerCheckpoint(CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions, boolean advanceToEndOfEventTime) throws Exception {
eventQueue.put(Event.PRE_TRIGGER_CHECKPOINT);
boolean result = super.triggerCheckpoint(checkpointMetaData, checkpointOptions, advanceToEndOfEventTime);
eventQueue.put(Event.POST_TRIGGER_CHECKPOINT);
return result;
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
eventQueue.put(Event.PRE_NOTIFY_CHECKPOINT_COMPLETE);
super.notifyCheckpointComplete(checkpointId);
eventQueue.put(Event.POST_NOTIFY_CHECKPOINT_COMPLETE);
}
@Override
protected void init() {
}
@Override
public void triggerCheckpointOnBarrier(CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions, CheckpointMetrics checkpointMetrics) {
throw new UnsupportedOperationException("Should not be called");
}
@Override
public void abortCheckpointOnBarrier(long checkpointId, Throwable cause) {
throw new UnsupportedOperationException("Should not be called");
}
@Override
protected void cleanup() {
}
@Override
protected void advanceToEndOfEventTime() throws Exception {
checkpointTriggered.trigger();
}
}
/**
* The different state transitions during a synchronous checkpoint along with their expected previous state.
*/
private enum Event {
TASK_IS_RUNNING,
PRE_TRIGGER_CHECKPOINT,
PRE_NOTIFY_CHECKPOINT_COMPLETE,
POST_NOTIFY_CHECKPOINT_COMPLETE,
POST_TRIGGER_CHECKPOINT,
}
private Task createTask(Class<? extends AbstractInvokable> invokableClass) throws Exception {
BlobCacheService blobService =
new BlobCacheService(mock(PermanentBlobCache.class), mock(TransientBlobCache.class));
LibraryCacheManager libCache = mock(LibraryCacheManager.class);
when(libCache.getClassLoader(any(JobID.class))).thenReturn(ClassLoader.getSystemClassLoader());
ResultPartitionConsumableNotifier consumableNotifier = new NoOpResultPartitionConsumableNotifier();
PartitionProducerStateChecker partitionProducerStateChecker = mock(PartitionProducerStateChecker.class);
Executor executor = mock(Executor.class);
NetworkEnvironment networkEnvironment = new NetworkEnvironmentBuilder().build();
TaskMetricGroup taskMetricGroup = UnregisteredMetricGroups.createUnregisteredTaskMetricGroup();
JobInformation jobInformation = new JobInformation(
new JobID(),
"Job Name",
new SerializedValue<>(new ExecutionConfig()),
new Configuration(),
Collections.emptyList(),
Collections.emptyList());
TaskInformation taskInformation = new TaskInformation(
new JobVertexID(),
"Test Task",
1,
1,
invokableClass.getName(),
new Configuration());
return new Task(
jobInformation,
taskInformation,
new ExecutionAttemptID(),
new AllocationID(),
0,
0,
Collections.<ResultPartitionDeploymentDescriptor>emptyList(),
Collections.<InputGateDeploymentDescriptor>emptyList(),
0,
mock(MemoryManager.class),
mock(IOManager.class),
networkEnvironment,
new KvStateService(new KvStateRegistry(), null, null),
mock(BroadcastVariableManager.class),
new TaskEventDispatcher(),
new TestTaskStateManager(),
mock(TaskManagerActions.class),
mock(InputSplitProvider.class),
mock(CheckpointResponder.class),
new TestGlobalAggregateManager(),
blobService,
libCache,
mock(FileCache.class),
new TestingTaskManagerRuntimeInfo(),
taskMetricGroup,
consumableNotifier,
partitionProducerStateChecker,
executor);
}
private static class TaskCleaner implements AutoCloseable {
private final Task task;
private TaskCleaner(Task task) {
this.task = task;
}
@Override
public void close() throws Exception {
task.cancelExecution();
task.getExecutingThread().join(5000);
}
}
} |
It seems this is the only use for the `jpaConfig` object. Does this stricly need some lazy initialization, or could you read the field earlier and store only an immutable boolean field rather than keeping a reference to the bootstrap proxy? | public boolean validateExistingCurrentSessions() {
return jpaConfig.isValidateTenantInCurrentSessions();
} | return jpaConfig.isValidateTenantInCurrentSessions(); | public boolean validateExistingCurrentSessions() {
return false;
} | class HibernateCurrentTenantIdentifierResolver implements CurrentTenantIdentifierResolver {
private static final Logger LOG = Logger.getLogger(HibernateCurrentTenantIdentifierResolver.class);
private final JPAConfig jpaConfig;
public HibernateCurrentTenantIdentifierResolver(final JPAConfig jpaConfig) {
super();
this.jpaConfig = jpaConfig;
}
@Override
public String resolveCurrentTenantIdentifier() {
if (!Arc.container().requestContext().isActive()) {
return null;
}
TenantResolver resolver = tenantResolver();
String tenantId = resolver.resolveTenantId();
if (tenantId == null) {
throw new IllegalStateException("Method 'TenantResolver.resolveTenantId()' returned a null value. "
+ "Unfortunately Hibernate ORM does not allow null for tenant identifiers. "
+ "Please use a non-null value!");
}
LOG.debugv("resolveCurrentTenantIdentifier(): {0}", tenantId);
return tenantId;
}
@Override
/**
* Retrieves the tenant resolver or fails if it is not available.
*
* @return Current tenant resolver.
*/
private static TenantResolver tenantResolver() {
InstanceHandle<TenantResolver> resolverInstance = Arc.container().instance(TenantResolver.class);
if (!resolverInstance.isAvailable()) {
throw new IllegalStateException("No instance of " + TenantResolver.class.getName() + " was found. "
+ "You need to create an implementation for this interface to allow resolving the current tenant identifier.");
}
return resolverInstance.get();
}
} | class HibernateCurrentTenantIdentifierResolver implements CurrentTenantIdentifierResolver {
private static final Logger LOG = Logger.getLogger(HibernateCurrentTenantIdentifierResolver.class);
@Override
public String resolveCurrentTenantIdentifier() {
if (!Arc.container().requestContext().isActive()) {
return null;
}
TenantResolver resolver = tenantResolver();
String tenantId = resolver.resolveTenantId();
if (tenantId == null) {
throw new IllegalStateException("Method 'TenantResolver.resolveTenantId()' returned a null value. "
+ "Unfortunately Hibernate ORM does not allow null for tenant identifiers. "
+ "Please use a non-null value!");
}
LOG.debugv("resolveCurrentTenantIdentifier(): {0}", tenantId);
return tenantId;
}
@Override
/**
* Retrieves the tenant resolver or fails if it is not available.
*
* @return Current tenant resolver.
*/
private static TenantResolver tenantResolver() {
InstanceHandle<TenantResolver> resolverInstance = Arc.container().instance(TenantResolver.class);
if (!resolverInstance.isAvailable()) {
throw new IllegalStateException("No instance of " + TenantResolver.class.getName() + " was found. "
+ "You need to create an implementation for this interface to allow resolving the current tenant identifier.");
}
return resolverInstance.get();
}
} |
The retryRate is not used in this method, probably can move this logic to reevaluateThresholds | private void recordOperation(boolean isRetry) {
long totalSnapshot = this.totalOperationCount.incrementAndGet();
CurrentIntervalThresholds currentThresholdsSnapshot = this.currentThresholds.get();
long currentTotalCountSnapshot = currentThresholdsSnapshot.currentOperationCount.incrementAndGet();
long currentRetryCountSnapshot;
if (isRetry) {
currentRetryCountSnapshot = currentThresholdsSnapshot.currentRetriedOperationCount.incrementAndGet();
} else {
currentRetryCountSnapshot = currentThresholdsSnapshot.currentRetriedOperationCount.get();
}
double retryRate = (double)currentRetryCountSnapshot / currentTotalCountSnapshot;
Pair<Boolean, Boolean> shouldReevaluateResult =
this.shouldReevaluateThresholds(totalSnapshot, currentTotalCountSnapshot);
boolean shouldReevaluate = shouldReevaluateResult.getLeft();
if (shouldReevaluate) {
boolean onlyUpscale = shouldReevaluateResult.getRight();
if (onlyUpscale ||
this.currentThresholds.compareAndSet(currentThresholdsSnapshot, new CurrentIntervalThresholds())) {
this.reevaluateThresholds(
totalSnapshot,
currentTotalCountSnapshot,
currentRetryCountSnapshot,
retryRate,
shouldReevaluateResult.getRight());
}
}
} | double retryRate = (double)currentRetryCountSnapshot / currentTotalCountSnapshot; | private void recordOperation(boolean isRetry) {
long totalSnapshot = this.totalOperationCount.incrementAndGet();
CurrentIntervalThresholds currentThresholdsSnapshot = this.currentThresholds.get();
long currentTotalCountSnapshot = currentThresholdsSnapshot.currentOperationCount.incrementAndGet();
long currentRetryCountSnapshot;
if (isRetry) {
currentRetryCountSnapshot = currentThresholdsSnapshot.currentRetriedOperationCount.incrementAndGet();
} else {
currentRetryCountSnapshot = currentThresholdsSnapshot.currentRetriedOperationCount.get();
}
Pair<Boolean, Boolean> shouldReevaluateResult =
this.shouldReevaluateThresholds(totalSnapshot, currentTotalCountSnapshot);
boolean shouldReevaluate = shouldReevaluateResult.getLeft();
if (shouldReevaluate) {
boolean onlyUpscale = shouldReevaluateResult.getRight();
if (onlyUpscale ||
this.currentThresholds.compareAndSet(currentThresholdsSnapshot, new CurrentIntervalThresholds())) {
this.reevaluateThresholds(
totalSnapshot,
currentTotalCountSnapshot,
currentRetryCountSnapshot,
shouldReevaluateResult.getRight());
}
}
} | class PartitionScopeThresholds<TContext> {
private final static Logger logger = LoggerFactory.getLogger(PartitionScopeThresholds.class);
private final String pkRangeId;
private final BulkProcessingOptions<TContext> options;
private final AtomicInteger targetMicroBatchSize;
private final AtomicLong totalOperationCount;
private final AtomicReference<CurrentIntervalThresholds> currentThresholds;
private final String identifier = UUID.randomUUID().toString();
private final double minRetryRate;
private final double maxRetryRate;
private final double avgRetryRate;
public PartitionScopeThresholds(String pkRangeId, BulkProcessingOptions<TContext> options) {
checkNotNull(pkRangeId, "expected non-null pkRangeId");
checkNotNull(options, "expected non-null options");
this.pkRangeId = pkRangeId;
this.options = options;
this.targetMicroBatchSize = new AtomicInteger(options.getMaxMicroBatchSize());
this.totalOperationCount = new AtomicLong(0);
this.currentThresholds = new AtomicReference<>(new CurrentIntervalThresholds());
this.minRetryRate = options.getMinTargetedMicroBatchRetryRate();
this.maxRetryRate = options.getMaxTargetedMicroBatchRetryRate();
this.avgRetryRate = ((this.maxRetryRate + this.minRetryRate)/2);
}
public String getPartitionKeyRangeId() {
return this.pkRangeId;
}
private Pair<Boolean, Boolean> shouldReevaluateThresholds(long totalSnapshot, long currentSnapshot) {
if (totalSnapshot < 1_000) {
return Pair.of(currentSnapshot == 100, false);
}
if (totalSnapshot < 10_000) {
return Pair.of(currentSnapshot == 1_000, false);
}
return Pair.of(currentSnapshot % 1_000 == 0, currentSnapshot % 10_000 == 0);
}
private void reevaluateThresholds(
long totalCount,
long currentCount,
long retryCount,
double retryRate,
boolean onlyUpscale) {
int microBatchSizeBefore = this.targetMicroBatchSize.get();
int microBatchSizeAfter = microBatchSizeBefore;
if (retryRate < this.minRetryRate && microBatchSizeBefore < this.options.getMaxMicroBatchSize()) {
int targetedNewBatchSize = Math.min(
Math.min(
microBatchSizeBefore * 2,
microBatchSizeBefore + (int)(this.options.getMaxMicroBatchSize() * this.avgRetryRate)),
this.options.getMaxMicroBatchSize());
if (this.targetMicroBatchSize.compareAndSet(microBatchSizeBefore, targetedNewBatchSize)) {
microBatchSizeAfter = targetedNewBatchSize;
}
} else if (!onlyUpscale && retryRate > this.maxRetryRate && microBatchSizeBefore > 1) {
double deltaRate = retryRate - this.avgRetryRate;
int targetedNewBatchSize = Math.max(1, (int) (microBatchSizeBefore * (1 - deltaRate)));
if (this.targetMicroBatchSize.compareAndSet(microBatchSizeBefore, targetedNewBatchSize)) {
microBatchSizeAfter = targetedNewBatchSize;
}
}
logger.debug(
"Reevaluated thresholds for PKRange '{}
"CurrentRetryRate: {} - BatchSize {} -> {}, OnlyUpscale: {})",
this.pkRangeId,
this.identifier,
totalCount,
currentCount,
retryCount,
retryRate,
microBatchSizeBefore,
microBatchSizeAfter,
onlyUpscale);
}
public void recordSuccessfulOperation() {
this.recordOperation(false);
}
public void recordEnqueuedRetry() {
this.recordOperation(true);
}
public int getTargetMicroBatchSizeSnapshot() {
return this.targetMicroBatchSize.get();
}
private static class CurrentIntervalThresholds {
public final AtomicLong currentOperationCount = new AtomicLong(0);
public final AtomicLong currentRetriedOperationCount = new AtomicLong(0);
}
} | class PartitionScopeThresholds<TContext> {
private final static Logger logger = LoggerFactory.getLogger(PartitionScopeThresholds.class);
private final String pkRangeId;
private final BulkProcessingOptions<TContext> options;
private final AtomicInteger targetMicroBatchSize;
private final AtomicLong totalOperationCount;
private final AtomicReference<CurrentIntervalThresholds> currentThresholds;
private final String identifier = UUID.randomUUID().toString();
private final double minRetryRate;
private final double maxRetryRate;
private final double avgRetryRate;
public PartitionScopeThresholds(String pkRangeId, BulkProcessingOptions<TContext> options) {
checkNotNull(pkRangeId, "expected non-null pkRangeId");
checkNotNull(options, "expected non-null options");
this.pkRangeId = pkRangeId;
this.options = options;
this.targetMicroBatchSize = new AtomicInteger(options.getMaxMicroBatchSize());
this.totalOperationCount = new AtomicLong(0);
this.currentThresholds = new AtomicReference<>(new CurrentIntervalThresholds());
this.minRetryRate = options.getMinTargetedMicroBatchRetryRate();
this.maxRetryRate = options.getMaxTargetedMicroBatchRetryRate();
this.avgRetryRate = ((this.maxRetryRate + this.minRetryRate)/2);
}
public String getPartitionKeyRangeId() {
return this.pkRangeId;
}
private Pair<Boolean, Boolean> shouldReevaluateThresholds(long totalSnapshot, long currentSnapshot) {
if (totalSnapshot < 1_000) {
return Pair.of(currentSnapshot == 100, false);
}
if (totalSnapshot < 10_000) {
return Pair.of(currentSnapshot == 1_000, false);
}
return Pair.of(currentSnapshot % 1_000 == 0, currentSnapshot % 10_000 == 0);
}
private void reevaluateThresholds(
long totalCount,
long currentCount,
long retryCount,
boolean onlyUpscale) {
double retryRate = currentCount == 0 ? 0 : (double)retryCount / currentCount;
int microBatchSizeBefore = this.targetMicroBatchSize.get();
int microBatchSizeAfter = microBatchSizeBefore;
if (retryRate < this.minRetryRate && microBatchSizeBefore < this.options.getMaxMicroBatchSize()) {
int targetedNewBatchSize = Math.min(
Math.min(
microBatchSizeBefore * 2,
microBatchSizeBefore + (int)(this.options.getMaxMicroBatchSize() * this.avgRetryRate)),
this.options.getMaxMicroBatchSize());
if (this.targetMicroBatchSize.compareAndSet(microBatchSizeBefore, targetedNewBatchSize)) {
microBatchSizeAfter = targetedNewBatchSize;
}
} else if (!onlyUpscale && retryRate > this.maxRetryRate && microBatchSizeBefore > 1) {
double deltaRate = retryRate - this.avgRetryRate;
int targetedNewBatchSize = Math.max(1, (int) (microBatchSizeBefore * (1 - deltaRate)));
if (this.targetMicroBatchSize.compareAndSet(microBatchSizeBefore, targetedNewBatchSize)) {
microBatchSizeAfter = targetedNewBatchSize;
}
}
logger.debug(
"Reevaluated thresholds for PKRange '{}
"CurrentRetryRate: {} - BatchSize {} -> {}, OnlyUpscale: {})",
this.pkRangeId,
this.identifier,
totalCount,
currentCount,
retryCount,
retryRate,
microBatchSizeBefore,
microBatchSizeAfter,
onlyUpscale);
}
public void recordSuccessfulOperation() {
this.recordOperation(false);
}
public void recordEnqueuedRetry() {
this.recordOperation(true);
}
public int getTargetMicroBatchSizeSnapshot() {
return this.targetMicroBatchSize.get();
}
private static class CurrentIntervalThresholds {
public final AtomicLong currentOperationCount = new AtomicLong(0);
public final AtomicLong currentRetriedOperationCount = new AtomicLong(0);
}
} |
Shall we extract `peek()` to a separate variable and reuse it at the recovery call? | private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {
switch (peek(lookahead + 1).kind) {
case IDENTIFIER_TOKEN:
SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;
switch (tokenAfterIdentifier) {
case ON_KEYWORD:
case OPEN_BRACE_TOKEN:
return true;
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
case QUESTION_MARK_TOKEN:
return false;
default:
return false;
}
case ON_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse listener declaration, given the qualifier.
* <p>
* <code>
* listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ;
* </code>
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.LISTENER_DECL);
STNode listenerKeyword = parseListenerKeyword();
if (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) {
STNode listenerDecl =
parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true);
endContext();
return listenerDecl;
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse listener keyword.
*
* @return Parsed node
*/
private STNode parseListenerKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LISTENER_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LISTENER_KEYWORD);
return parseListenerKeyword();
}
}
/**
* Parse constant declaration, given the qualifier.
* <p>
* <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code>
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.CONSTANT_DECL);
STNode constKeyword = parseConstantKeyword();
return parseConstDecl(metadata, qualifier, constKeyword);
}
/**
* Parse the components that follows after the const keyword of a constant declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case ANNOTATION_KEYWORD:
endContext();
return parseAnnotationDeclaration(metadata, qualifier, constKeyword);
case IDENTIFIER_TOKEN:
STNode constantDecl =
parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false);
endContext();
return constantDecl;
default:
if (isTypeStartingToken(nextToken.kind)) {
break;
}
recover(peek(), ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword);
return parseConstDecl(metadata, qualifier, constKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
private STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,
boolean isListener) {
STNode varNameOrTypeName = parseStatementStartIdentifier();
return parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener);
}
/**
* Parse the component that follows the first identifier in a const decl. The identifier
* can be either the type-name (a user defined type) or the var-name there the type-name
* is not present.
*
* @param qualifier Qualifier that precedes the constant decl
* @param keyword Keyword
* @param typeOrVarName Identifier that follows the const-keywoord
* @return Parsed node
*/
private STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword,
STNode typeOrVarName, boolean isListener) {
if (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STNode type = typeOrVarName;
STNode variableName = parseVariableName();
return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);
}
STNode type;
STNode variableName;
switch (peek().kind) {
case IDENTIFIER_TOKEN:
type = typeOrVarName;
variableName = parseVariableName();
break;
case EQUAL_TOKEN:
variableName = ((STSimpleNameReferenceNode) typeOrVarName).name;
type = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, keyword, typeOrVarName,
isListener);
return parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener);
}
return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);
}
private STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener,
STNode type, STNode variableName) {
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
if (isListener) {
return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName,
equalsToken, initializer, semicolonToken);
}
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse const keyword.
*
* @return Parsed node
*/
private STNode parseConstantKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONST_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.CONST_KEYWORD);
return parseConstantKeyword();
}
}
/**
* Parse typeof expression.
* <p>
* <code>
* typeof-expr := typeof expression
* </code>
*
* @param isRhsExpr
* @return Typeof expression node
*/
private STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode typeofKeyword = parseTypeofKeyword();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);
return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);
}
/**
* Parse typeof-keyword.
*
* @return Typeof-keyword node
*/
private STNode parseTypeofKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TYPEOF_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TYPEOF_KEYWORD);
return parseTypeofKeyword();
}
}
/**
* Parse optional type descriptor given the type.
* <p>
* <code>optional-type-descriptor := type-descriptor `?`</code>
* </p>
*
* @param typeDescriptorNode Preceding type descriptor
* @return Parsed node
*/
private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {
startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);
STNode questionMarkToken = parseQuestionMark();
endContext();
typeDescriptorNode = validateForUsageOfVar(typeDescriptorNode);
return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken);
}
/**
* Parse unary expression.
* <p>
* <code>
* unary-expr := + expression | - expression | ~ expression | ! expression
* </code>
*
* @param isRhsExpr
* @return Unary expression node
*/
private STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode unaryOperator = parseUnaryOperator();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);
return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);
}
/**
* Parse unary operator.
* <code>UnaryOperator := + | - | ~ | !</code>
*
* @return Parsed node
*/
private STNode parseUnaryOperator() {
STToken token = peek();
if (isUnaryOperator(token.kind)) {
return consume();
} else {
recover(token, ParserRuleContext.UNARY_OPERATOR);
return parseUnaryOperator();
}
}
/**
* Check whether the given token kind is a unary operator.
*
* @param kind STToken kind
* @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise
*/
private boolean isUnaryOperator(SyntaxKind kind) {
switch (kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse array type descriptor.
* <p>
* <code>
* array-type-descriptor := member-type-descriptor [ [ array-length ] ]
* member-type-descriptor := type-descriptor
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* inferred-array-length := *
* </code>
* </p>
*
* @param memberTypeDesc
* @return Parsed Node
*/
private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) {
startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);
STNode openBracketToken = parseOpenBracket();
STNode arrayLengthNode = parseArrayLength();
STNode closeBracketToken = parseCloseBracket();
endContext();
return createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken);
}
private STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode,
STNode closeBracketToken) {
memberTypeDesc = validateForUsageOfVar(memberTypeDesc);
if (arrayLengthNode != null) {
switch (arrayLengthNode.kind) {
case ASTERISK_LITERAL:
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
break;
case NUMERIC_LITERAL:
SyntaxKind numericLiteralKind = arrayLengthNode.childInBucket(0).kind;
if (numericLiteralKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
numericLiteralKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
break;
}
default:
openBracketToken = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracketToken,
arrayLengthNode, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);
arrayLengthNode = STNodeFactory.createEmptyNode();
}
}
return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, openBracketToken, arrayLengthNode,
closeBracketToken);
}
/**
* Parse array length.
* <p>
* <code>
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* constant-reference-expr := variable-reference-expr
* </code>
* </p>
*
* @return Parsed array length
*/
private STNode parseArrayLength() {
STToken token = peek();
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case ASTERISK_TOKEN:
return parseBasicLiteral();
case CLOSE_BRACKET_TOKEN:
return STNodeFactory.createEmptyNode();
case IDENTIFIER_TOKEN:
return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);
default:
recover(token, ParserRuleContext.ARRAY_LENGTH);
return parseArrayLength();
}
}
/**
* Parse annotations.
* <p>
* <i>Note: In the <a href="https:
* annotations-list is specified as one-or-more annotations. And the usage is marked as
* optional annotations-list. However, for the consistency of the tree, here we make the
* annotation-list as zero-or-more annotations, and the usage is not-optional.</i>
* <p>
* <code>annots := annotation*</code>
*
* @return Parsed node
*/
private STNode parseOptionalAnnotations() {
startContext(ParserRuleContext.ANNOTATIONS);
List<STNode> annotList = new ArrayList<>();
STToken nextToken = peek();
while (nextToken.kind == SyntaxKind.AT_TOKEN) {
annotList.add(parseAnnotation());
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(annotList);
}
/**
* Parse annotation list with at least one annotation.
*
* @return Annotation list
*/
private STNode parseAnnotations() {
startContext(ParserRuleContext.ANNOTATIONS);
List<STNode> annotList = new ArrayList<>();
annotList.add(parseAnnotation());
while (peek().kind == SyntaxKind.AT_TOKEN) {
annotList.add(parseAnnotation());
}
endContext();
return STNodeFactory.createNodeList(annotList);
}
/**
* Parse annotation attachment.
* <p>
* <code>annotation := @ annot-tag-reference annot-value</code>
*
* @return Parsed node
*/
private STNode parseAnnotation() {
STNode atToken = parseAtToken();
STNode annotReference;
if (isPredeclaredIdentifier(peek().kind)) {
annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);
} else {
annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
}
STNode annotValue;
if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {
annotValue = parseMappingConstructorExpr();
} else {
annotValue = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);
}
/**
* Parse '@' token.
*
* @return Parsed node
*/
private STNode parseAtToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.AT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.AT);
return parseAtToken();
}
}
/**
* Parse metadata. Meta data consist of optional doc string and
* an annotations list.
* <p>
* <code>metadata := [DocumentationString] annots</code>
*
* @return Parse node
*/
private STNode parseMetaData() {
STNode docString;
STNode annotations;
switch (peek().kind) {
case DOCUMENTATION_STRING:
docString = parseMarkdownDocumentation();
annotations = parseOptionalAnnotations();
break;
case AT_TOKEN:
docString = STNodeFactory.createEmptyNode();
annotations = parseOptionalAnnotations();
break;
default:
return STNodeFactory.createEmptyNode();
}
return createMetadata(docString, annotations);
}
/**
* Create metadata node.
*
* @return A metadata node
*/
private STNode createMetadata(STNode docString, STNode annotations) {
if (annotations == null && docString == null) {
return STNodeFactory.createEmptyNode();
} else {
return STNodeFactory.createMetadataNode(docString, annotations);
}
}
/**
* Parse is expression.
* <code>
* is-expr := expression is type-descriptor
* </code>
*
* @param lhsExpr Preceding expression of the is expression
* @return Is expression node
*/
private STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode isKeyword = parseIsKeyword();
STNode typeDescriptor = parseTypeDescriptorInExpression(isInConditionalExpr);
return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor);
}
/**
* Parse is-keyword.
*
* @return Is-keyword node
*/
private STNode parseIsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.IS_KEYWORD);
return parseIsKeyword();
}
}
/**
* Parse local type definition statement statement.
* <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code>
*
* @return local type definition statement statement
*/
private STNode parseLocalTypeDefinitionStatement(STNode annots) {
startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);
STNode typeKeyword = parseTypeKeyword();
STNode typeName = parseTypeName();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,
semicolon);
}
/**
* Parse statement which is only consists of an action or expression.
*
* @param annots Annotations
* @return Statement node
*/
private STNode parseExpressionStatement(STNode annots) {
startContext(ParserRuleContext.EXPRESSION_STATEMENT);
STNode expression = parseActionOrExpressionInLhs(annots);
return getExpressionAsStatement(expression);
}
/**
* Parse statements that starts with an expression.
*
* @return Statement node
*/
private STNode parseStatementStartWithExpr(STNode annots) {
startContext(ParserRuleContext.AMBIGUOUS_STMT);
STNode expr = parseActionOrExpressionInLhs(annots);
return parseStatementStartWithExprRhs(expr);
}
/**
* Parse the component followed by the expression, at the beginning of a statement.
*
* @param expression Action or expression in LHS
* @return Statement node
*/
private STNode parseStatementStartWithExprRhs(STNode expression) {
SyntaxKind nextTokenKind = peek().kind;
if (isAction(expression) || nextTokenKind == SyntaxKind.SEMICOLON_TOKEN) {
return getExpressionAsStatement(expression);
}
switch (nextTokenKind) {
case EQUAL_TOKEN:
switchContext(ParserRuleContext.ASSIGNMENT_STMT);
return parseAssignmentStmtRhs(expression);
case IDENTIFIER_TOKEN:
default:
if (isCompoundBinaryOperator(nextTokenKind)) {
return parseCompoundAssignmentStmtRhs(expression);
}
ParserRuleContext context;
if (isPossibleExpressionStatement(expression)) {
context = ParserRuleContext.EXPR_STMT_RHS;
} else {
context = ParserRuleContext.STMT_START_WITH_EXPR_RHS;
}
recover(peek(), context, expression);
return parseStatementStartWithExprRhs(expression);
}
}
private boolean isPossibleExpressionStatement(STNode expression) {
switch (expression.kind) {
case METHOD_CALL:
case FUNCTION_CALL:
case CHECK_EXPRESSION:
case REMOTE_METHOD_CALL_ACTION:
case CHECK_ACTION:
case BRACED_ACTION:
case START_ACTION:
case TRAP_ACTION:
case FLUSH_ACTION:
case ASYNC_SEND_ACTION:
case SYNC_SEND_ACTION:
case RECEIVE_ACTION:
case WAIT_ACTION:
case QUERY_ACTION:
case COMMIT_ACTION:
return true;
default:
return false;
}
}
private STNode getExpressionAsStatement(STNode expression) {
switch (expression.kind) {
case METHOD_CALL:
case FUNCTION_CALL:
case CHECK_EXPRESSION:
return parseCallStatement(expression);
case REMOTE_METHOD_CALL_ACTION:
case CHECK_ACTION:
case BRACED_ACTION:
case START_ACTION:
case TRAP_ACTION:
case FLUSH_ACTION:
case ASYNC_SEND_ACTION:
case SYNC_SEND_ACTION:
case RECEIVE_ACTION:
case WAIT_ACTION:
case QUERY_ACTION:
case COMMIT_ACTION:
return parseActionStatement(expression);
default:
STNode semicolon = parseSemicolon();
endContext();
STNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT,
expression, semicolon);
exprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT);
return exprStmt;
}
}
private STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) {
STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression);
STNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression;
if (lengthExprs.isEmpty()) {
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),
indexedExpr.closeBracket);
}
STNode lengthExpr = lengthExprs.get(0);
switch (lengthExpr.kind) {
case ASTERISK_LITERAL:
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
break;
case NUMERIC_LITERAL:
SyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind;
if (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
innerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
break;
}
default:
STNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(
indexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);
indexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics);
lengthExpr = STNodeFactory.createEmptyNode();
}
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket);
}
/**
* <p>
* Parse call statement, given the call expression.
* </p>
* <code>
* call-stmt := call-expr ;
* <br/>
* call-expr := function-call-expr | method-call-expr | checking-keyword call-expr
* </code>
*
* @param expression Call expression associated with the call statement
* @return Call statement node
*/
private STNode parseCallStatement(STNode expression) {
STNode semicolon = parseSemicolon();
endContext();
if (expression.kind == SyntaxKind.CHECK_EXPRESSION) {
expression = validateCallExpression(expression);
}
return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);
}
private STNode validateCallExpression(STNode callExpr) {
STCheckExpressionNode checkExpr = (STCheckExpressionNode) callExpr;
STNode expr = checkExpr.expression;
if (expr.kind == SyntaxKind.FUNCTION_CALL || expr.kind == SyntaxKind.METHOD_CALL) {
return callExpr;
}
STNode checkKeyword = checkExpr.checkKeyword;
if (expr.kind == SyntaxKind.CHECK_EXPRESSION) {
expr = validateCallExpression(expr);
return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkKeyword, expr);
}
STNode checkingKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(checkKeyword, expr,
DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_EXPECTED_CALL_EXPRESSION);
STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
funcName = STNodeFactory.createSimpleNameReferenceNode(funcName);
STNode openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN);
STNode arguments = STNodeFactory.createEmptyNodeList();
STNode closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN);
STNode funcCallExpr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, arguments,
closeParenToken);
return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, funcCallExpr);
}
private STNode parseActionStatement(STNode action) {
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);
}
/**
* Parse remote method call action, given the starting expression.
* <p>
* <code>
* remote-method-call-action := expression -> method-name ( arg-list )
* <br/>
* async-send-action := expression -> peer-worker ;
* </code>
*
* @param isRhsExpr Is this an RHS action
* @param expression LHS expression
* @return
*/
private STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) {
STNode rightArrow = parseRightArrow();
return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);
}
private STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) {
STNode name;
STToken nextToken = peek();
switch (nextToken.kind) {
case FUNCTION_KEYWORD:
STNode functionKeyword = consume();
name = STNodeFactory.createSimpleNameReferenceNode(functionKeyword);
return parseAsyncSendAction(expression, rightArrow, name);
case IDENTIFIER_TOKEN:
name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());
break;
case CONTINUE_KEYWORD:
case COMMIT_KEYWORD:
name = getKeywordAsSimpleNameRef();
break;
default:
STToken token = peek();
recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression, isRhsExpr, rightArrow);
return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);
}
return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);
}
private STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
return parseRemoteMethodCallAction(expression, rightArrow, name);
case SEMICOLON_TOKEN:
return parseAsyncSendAction(expression, rightArrow, name);
default:
recover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name);
return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);
}
}
private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) {
return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker);
}
private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) {
STNode openParenToken = parseArgListOpenParenthesis();
STNode arguments = parseArgsList();
STNode closeParenToken = parseArgListCloseParenthesis();
return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments,
closeParenToken);
}
/**
* Parse right arrow (<code>-></code>) token.
*
* @return Parsed node
*/
private STNode parseRightArrow() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.RIGHT_ARROW);
return parseRightArrow();
}
}
/**
* Parse parameterized type descriptor.
* parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter
*
* @return Parsed node
*/
private STNode parseParameterizedTypeDescriptor(STNode parameterizedTypeKeyword) {
STNode typeParameter = parseTypeParameter();
return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, typeParameter);
}
/**
* Parse <code> < </code> token.
*
* @return Parsed node
*/
private STNode parseGTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.GT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.GT);
return parseGTToken();
}
}
/**
* Parse <code> > </code> token.
*
* @return Parsed node
*/
private STNode parseLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.LT);
return parseLTToken();
}
}
/**
* Parse nil literal. Here nil literal is only referred to ( ).
*
* @return Parsed node
*/
private STNode parseNilLiteral() {
startContext(ParserRuleContext.NIL_LITERAL);
STNode openParenthesisToken = parseOpenParenthesis();
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);
}
/**
* Parse annotation declaration, given the qualifier.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {
startContext(ParserRuleContext.ANNOTATION_DECL);
STNode annotationKeyword = parseAnnotationKeyword();
STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);
endContext();
return annotDecl;
}
/**
* Parse annotation keyword.
*
* @return Parsed node
*/
private STNode parseAnnotationKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ANNOTATION_KEYWORD);
return parseAnnotationKeyword();
}
}
/**
* Parse the components that follows after the annotation keyword of a annotation declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @param annotationKeyword
* @return Parsed node
*/
private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);
default:
if (isTypeStartingToken(nextToken.kind)) {
break;
}
recover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier, constKeyword,
annotationKeyword);
return parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
/**
* Parse annotation tag.
* <p>
* <code>annot-tag := identifier</code>
*
* @return
*/
private STNode parseAnnotationTag() {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.ANNOTATION_TAG);
return parseAnnotationTag();
}
}
private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);
if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,
typeDescOrAnnotTag, annotTag);
}
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) {
STNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag,
ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false);
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
STNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name;
return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag);
}
/**
* Parse the component that follows the first identifier in an annotation decl. The identifier
* can be either the type-name (a user defined type) or the annot-tag, where the type-name
* is not present.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the annotation decl
* @param constKeyword Const keyword
* @param annotationKeyword Annotation keyword
* @param typeDescOrAnnotTag Identifier that follows the annotation-keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDescOrAnnotTag) {
STToken nextToken = peek();
STNode typeDesc;
STNode annotTag;
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
typeDesc = typeDescOrAnnotTag;
annotTag = parseAnnotationTag();
break;
case SEMICOLON_TOKEN:
case ON_KEYWORD:
typeDesc = STNodeFactory.createEmptyNode();
annotTag = typeDescOrAnnotTag;
break;
default:
recover(peek(), ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword, annotationKeyword,
typeDescOrAnnotTag);
return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag);
}
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDesc, STNode annotTag) {
STNode onKeyword;
STNode attachPoints;
STToken nextToken = peek();
switch (nextToken.kind) {
case SEMICOLON_TOKEN:
onKeyword = STNodeFactory.createEmptyNode();
attachPoints = STNodeFactory.createEmptyNodeList();
break;
case ON_KEYWORD:
onKeyword = parseOnKeyword();
attachPoints = parseAnnotationAttachPoints();
onKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword,
DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);
break;
default:
recover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier, constKeyword,
annotationKeyword, typeDesc, annotTag);
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,
typeDesc, annotTag, onKeyword, attachPoints, semicolonToken);
}
/**
* Parse annotation attach points.
* <p>
* <code>
* annot-attach-points := annot-attach-point (, annot-attach-point)*
* <br/><br/>
* annot-attach-point := dual-attach-point | source-only-attach-point
* <br/><br/>
* dual-attach-point := [source] dual-attach-point-ident
* <br/><br/>
* dual-attach-point-ident :=
* type
* | class
* | [object|service remote] function
* | parameter
* | return
* | service
* | [object|record] field
* <br/><br/>
* source-only-attach-point := source source-only-attach-point-ident
* <br/><br/>
* source-only-attach-point-ident :=
* annotation
* | external
* | var
* | const
* | listener
* | worker
* </code>
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoints() {
startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);
List<STNode> attachPoints = new ArrayList<>();
STToken nextToken = peek();
if (isEndAnnotAttachPointList(nextToken.kind)) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode attachPoint = parseAnnotationAttachPoint();
attachPoints.add(attachPoint);
nextToken = peek();
STNode leadingComma;
while (!isEndAnnotAttachPointList(nextToken.kind)) {
leadingComma = parseAttachPointEnd();
if (leadingComma == null) {
break;
}
attachPoints.add(leadingComma);
attachPoint = parseAnnotationAttachPoint();
if (attachPoint == null) {
attachPoint = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);
attachPoints.add(attachPoint);
break;
}
attachPoints.add(attachPoint);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(attachPoints);
}
/**
* Parse annotation attach point end.
*
* @return Parsed node
*/
private STNode parseAttachPointEnd() {
switch (peek().kind) {
case SEMICOLON_TOKEN:
return null;
case COMMA_TOKEN:
return consume();
default:
recover(peek(), ParserRuleContext.ATTACH_POINT_END);
return parseAttachPointEnd();
}
}
private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse annotation attach point.
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoint() {
switch (peek().kind) {
case EOF_TOKEN:
return null;
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
case SOURCE_KEYWORD:
STNode sourceKeyword = parseSourceKeyword();
return parseAttachPointIdent(sourceKeyword);
case OBJECT_KEYWORD:
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
case RECORD_KEYWORD:
case CLASS_KEYWORD:
sourceKeyword = STNodeFactory.createEmptyNode();
STNode firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
recover(peek(), ParserRuleContext.ATTACH_POINT);
return parseAnnotationAttachPoint();
}
}
/**
* Parse source keyword.
*
* @return Parsed node
*/
private STNode parseSourceKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SOURCE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.SOURCE_KEYWORD);
return parseSourceKeyword();
}
}
/**
* Parse attach point ident gievn.
* <p>
* <code>
* source-only-attach-point-ident := annotation | external | var | const | listener | worker
* <br/><br/>
* dual-attach-point-ident := type | class | [object|service remote] function | parameter
* | return | service | [object|record] field
* </code>
*
* @param sourceKeyword Source keyword
* @return Parsed node
*/
private STNode parseAttachPointIdent(STNode sourceKeyword) {
switch (peek().kind) {
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
STNode firstIdent = consume();
STNode identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
case OBJECT_KEYWORD:
case RESOURCE_KEYWORD:
case RECORD_KEYWORD:
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
case CLASS_KEYWORD:
firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword);
return parseAttachPointIdent(sourceKeyword);
}
}
/**
* Parse dual-attach-point ident.
*
* @param sourceKeyword Source keyword
* @param firstIdent first part of the dual attach-point
* @return Parsed node
*/
private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {
STNode secondIdent;
switch (firstIdent.kind) {
case OBJECT_KEYWORD:
secondIdent = parseIdentAfterObjectIdent();
break;
case RESOURCE_KEYWORD:
secondIdent = parseFunctionIdent();
break;
case RECORD_KEYWORD:
secondIdent = parseFieldIdent();
break;
case SERVICE_KEYWORD:
return parseServiceAttachPoint(sourceKeyword, firstIdent);
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case FIELD_KEYWORD:
case CLASS_KEYWORD:
default:
STNode identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
}
STNode identList = STNodeFactory.createNodeList(firstIdent, secondIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
}
/**
* Parse remote ident.
*
* @return Parsed node
*/
private STNode parseRemoteIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.REMOTE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.REMOTE_IDENT);
return parseRemoteIdent();
}
}
/**
* Parse service attach point.
* <code>service-attach-point := service | service remote function</code>
*
* @return Parsed node
*/
private STNode parseServiceAttachPoint(STNode sourceKeyword, STNode firstIdent) {
STNode identList;
STToken token = peek();
switch (token.kind) {
case REMOTE_KEYWORD:
STNode secondIdent = parseRemoteIdent();
STNode thirdIdent = parseFunctionIdent();
identList = STNodeFactory.createNodeList(firstIdent, secondIdent, thirdIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
case COMMA_TOKEN:
case SEMICOLON_TOKEN:
identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
default:
recover(token, ParserRuleContext.SERVICE_IDENT_RHS);
return parseServiceAttachPoint(sourceKeyword, firstIdent);
}
}
/**
* Parse the idents that are supported after object-ident.
*
* @return Parsed node
*/
private STNode parseIdentAfterObjectIdent() {
STToken token = peek();
switch (token.kind) {
case FUNCTION_KEYWORD:
case FIELD_KEYWORD:
return consume();
default:
recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);
return parseIdentAfterObjectIdent();
}
}
/**
* Parse function ident.
*
* @return Parsed node
*/
private STNode parseFunctionIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FUNCTION_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FUNCTION_IDENT);
return parseFunctionIdent();
}
}
/**
* Parse field ident.
*
* @return Parsed node
*/
private STNode parseFieldIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FIELD_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FIELD_IDENT);
return parseFieldIdent();
}
}
/**
* Parse XML namespace declaration.
* <p>
* <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;
* <br/>
* xml-namespace-uri := simple-const-expr
* <br/>
* xml-namespace-prefix := identifier
* </code>
*
* @return
*/
private STNode parseXMLNamespaceDeclaration(boolean isModuleVar) {
startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);
STNode xmlnsKeyword = parseXMLNSKeyword();
STNode namespaceUri = parseSimpleConstExpr();
while (!isValidXMLNameSpaceURI(namespaceUri)) {
xmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri,
DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI);
namespaceUri = parseSimpleConstExpr();
}
STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);
endContext();
return xmlnsDecl;
}
/**
* Parse xmlns keyword.
*
* @return Parsed node
*/
private STNode parseXMLNSKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XMLNS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.XMLNS_KEYWORD);
return parseXMLNSKeyword();
}
}
private boolean isValidXMLNameSpaceURI(STNode expr) {
switch (expr.kind) {
case STRING_LITERAL:
case QUALIFIED_NAME_REFERENCE:
case SIMPLE_NAME_REFERENCE:
return true;
case IDENTIFIER_TOKEN:
default:
return false;
}
}
private STNode parseSimpleConstExpr() {
startContext(ParserRuleContext.CONSTANT_EXPRESSION);
STNode expr = parseSimpleConstExprInternal();
endContext();
return expr;
}
/**
* Parse simple constants expr.
*
* @return Parsed node
*/
private STNode parseSimpleConstExprInternal() {
STToken nextToken = peek();
switch (nextToken.kind) {
case STRING_LITERAL_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
return parseBasicLiteral();
case PLUS_TOKEN:
case MINUS_TOKEN:
return parseSignedIntOrFloat();
case OPEN_PAREN_TOKEN:
return parseNilLiteral();
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
}
recover(nextToken, ParserRuleContext.CONSTANT_EXPRESSION_START);
return parseSimpleConstExprInternal();
}
}
/**
* Parse the portion after the namsepsace-uri of an XML declaration.
*
* @param xmlnsKeyword XMLNS keyword
* @param namespaceUri Namespace URI
* @return Parsed node
*/
private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) {
STNode asKeyword = STNodeFactory.createEmptyNode();
STNode namespacePrefix = STNodeFactory.createEmptyNode();
switch (peek().kind) {
case AS_KEYWORD:
asKeyword = parseAsKeyword();
namespacePrefix = parseNamespacePrefix();
break;
case SEMICOLON_TOKEN:
break;
default:
recover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri, isModuleVar);
return parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);
}
STNode semicolon = parseSemicolon();
if (isModuleVar) {
return STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword,
namespacePrefix, semicolon);
}
return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,
semicolon);
}
/**
* Parse import prefix.
*
* @return Parsed node
*/
private STNode parseNamespacePrefix() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.NAMESPACE_PREFIX);
return parseNamespacePrefix();
}
}
/**
* Parse named worker declaration.
* <p>
* <code>named-worker-decl := [annots] [transactional] worker worker-name return-type-descriptor { sequence-stmt }
* </code>
*
* @param annots Annotations attached to the worker decl
* @param qualifiers Preceding transactional keyword in a list
* @return Parsed node
*/
private STNode parseNamedWorkerDeclaration(STNode annots, List<STNode> qualifiers) {
startContext(ParserRuleContext.NAMED_WORKER_DECL);
STNode transactionalKeyword = getTransactionalKeyword(qualifiers);
STNode workerKeyword = parseWorkerKeyword();
STNode workerName = parseWorkerName();
STNode returnTypeDesc = parseReturnTypeDescriptor();
STNode workerBody = parseBlockNode();
endContext();
return STNodeFactory.createNamedWorkerDeclarationNode(annots, transactionalKeyword, workerKeyword, workerName,
returnTypeDesc, workerBody);
}
private STNode getTransactionalKeyword(List<STNode> qualifierList) {
List<STNode> validatedList = new ArrayList<>();
for (int i = 0; i < qualifierList.size(); i++) {
STNode qualifier = qualifierList.get(i);
int nextIndex = i + 1;
if (isSyntaxKindInList(validatedList, qualifier.kind)) {
updateLastNodeInListWithInvalidNode(validatedList, qualifier,
DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());
} else if (qualifier.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {
validatedList.add(qualifier);
} else if (qualifierList.size() == nextIndex) {
addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED,
((STToken) qualifier).text());
} else {
updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
}
}
STNode transactionalKeyword;
if (validatedList.isEmpty()) {
transactionalKeyword = STNodeFactory.createEmptyNode();
} else {
transactionalKeyword = validatedList.get(0);
}
return transactionalKeyword;
}
private STNode parseReturnTypeDescriptor() {
STToken token = peek();
if (token.kind != SyntaxKind.RETURNS_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
STNode returnsKeyword = consume();
STNode annot = parseOptionalAnnotations();
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);
return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);
}
/**
* Parse worker keyword.
*
* @return Parsed node
*/
private STNode parseWorkerKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {
return consume();
} else {
recover(peek(), ParserRuleContext.WORKER_KEYWORD);
return parseWorkerKeyword();
}
}
/**
* Parse worker name.
* <p>
* <code>worker-name := identifier</code>
*
* @return Parsed node
*/
private STNode parseWorkerName() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.WORKER_NAME);
return parseWorkerName();
}
}
/**
* Parse lock statement.
* <code>lock-stmt := lock block-stmt [on-fail-clause]</code>
*
* @return Lock statement
*/
private STNode parseLockStatement() {
startContext(ParserRuleContext.LOCK_STMT);
STNode lockKeyword = parseLockKeyword();
STNode blockStatement = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause);
}
/**
* Parse lock-keyword.
*
* @return lock-keyword node
*/
private STNode parseLockKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LOCK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LOCK_KEYWORD);
return parseLockKeyword();
}
}
/**
* Parse union type descriptor.
* union-type-descriptor := type-descriptor | type-descriptor
*
* @param leftTypeDesc Type desc in the LHS os the union type desc.
* @param context Current context.
* @return parsed union type desc node
*/
private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,
boolean isTypedBindingPattern) {
STNode pipeToken = consume();
STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false);
return createUnionTypeDesc(leftTypeDesc, pipeToken, rightTypeDesc);
}
private STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) {
leftTypeDesc = validateForUsageOfVar(leftTypeDesc);
rightTypeDesc = validateForUsageOfVar(rightTypeDesc);
return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);
}
/**
* Parse pipe token.
*
* @return parsed pipe token node
*/
private STNode parsePipeToken() {
STToken token = peek();
if (token.kind == SyntaxKind.PIPE_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.PIPE);
return parsePipeToken();
}
}
private boolean isTypeStartingToken(SyntaxKind nodeKind) {
switch (nodeKind) {
case IDENTIFIER_TOKEN:
case SERVICE_KEYWORD:
case RECORD_KEYWORD:
case OBJECT_KEYWORD:
case ABSTRACT_KEYWORD:
case CLIENT_KEYWORD:
case OPEN_PAREN_TOKEN:
case MAP_KEYWORD:
case FUTURE_KEYWORD:
case TYPEDESC_KEYWORD:
case ERROR_KEYWORD:
case XML_KEYWORD:
case STREAM_KEYWORD:
case TABLE_KEYWORD:
case FUNCTION_KEYWORD:
case OPEN_BRACKET_TOKEN:
case DISTINCT_KEYWORD:
case ISOLATED_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case TRANSACTION_KEYWORD:
return true;
default:
if (isSingletonTypeDescStart(nodeKind, true)) {
return true;
}
return isSimpleType(nodeKind);
}
}
/**
* Check if the token kind is a type descriptor in terminal expression.
* <p>
* simple-type-in-expr :=
* boolean | int | byte | float | decimal | string | handle | json | anydata | any | never
*
* @param nodeKind token kind to check
* @return <code>true</code> for simple type token in expression. <code>false</code> otherwise.
*/
private boolean isSimpleTypeInExpression(SyntaxKind nodeKind) {
switch (nodeKind) {
case VAR_KEYWORD:
case READONLY_KEYWORD:
return false;
default:
return isSimpleType(nodeKind);
}
}
static boolean isSimpleType(SyntaxKind nodeKind) {
switch (nodeKind) {
case INT_KEYWORD:
case FLOAT_KEYWORD:
case DECIMAL_KEYWORD:
case BOOLEAN_KEYWORD:
case STRING_KEYWORD:
case BYTE_KEYWORD:
case JSON_KEYWORD:
case HANDLE_KEYWORD:
case ANY_KEYWORD:
case ANYDATA_KEYWORD:
case NEVER_KEYWORD:
case VAR_KEYWORD:
case READONLY_KEYWORD:
return true;
default:
return false;
}
}
static boolean isPredeclaredPrefix(SyntaxKind nodeKind) {
switch (nodeKind) {
case BOOLEAN_KEYWORD:
case DECIMAL_KEYWORD:
case ERROR_KEYWORD:
case FLOAT_KEYWORD:
case FUTURE_KEYWORD:
case INT_KEYWORD:
case MAP_KEYWORD:
case OBJECT_KEYWORD:
case STREAM_KEYWORD:
case STRING_KEYWORD:
case TABLE_KEYWORD:
case TRANSACTION_KEYWORD:
case TYPEDESC_KEYWORD:
case XML_KEYWORD:
return true;
default:
return false;
}
}
private boolean isQualifiedIdentifierPredeclaredPrefix(SyntaxKind nodeKind) {
return isPredeclaredPrefix(nodeKind) && getNextNextToken().kind == SyntaxKind.COLON_TOKEN;
}
private SyntaxKind getBuiltinTypeSyntaxKind(SyntaxKind typeKeyword) {
switch (typeKeyword) {
case INT_KEYWORD:
return SyntaxKind.INT_TYPE_DESC;
case FLOAT_KEYWORD:
return SyntaxKind.FLOAT_TYPE_DESC;
case DECIMAL_KEYWORD:
return SyntaxKind.DECIMAL_TYPE_DESC;
case BOOLEAN_KEYWORD:
return SyntaxKind.BOOLEAN_TYPE_DESC;
case STRING_KEYWORD:
return SyntaxKind.STRING_TYPE_DESC;
case BYTE_KEYWORD:
return SyntaxKind.BYTE_TYPE_DESC;
case JSON_KEYWORD:
return SyntaxKind.JSON_TYPE_DESC;
case HANDLE_KEYWORD:
return SyntaxKind.HANDLE_TYPE_DESC;
case ANY_KEYWORD:
return SyntaxKind.ANY_TYPE_DESC;
case ANYDATA_KEYWORD:
return SyntaxKind.ANYDATA_TYPE_DESC;
case NEVER_KEYWORD:
return SyntaxKind.NEVER_TYPE_DESC;
case VAR_KEYWORD:
return SyntaxKind.VAR_TYPE_DESC;
case READONLY_KEYWORD:
return SyntaxKind.READONLY_TYPE_DESC;
default:
assert false : typeKeyword + " is not a built-in type";
return SyntaxKind.TYPE_REFERENCE;
}
}
/**
* Parse fork-keyword.
*
* @return Fork-keyword node
*/
private STNode parseForkKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FORK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FORK_KEYWORD);
return parseForkKeyword();
}
}
/**
* Parse fork statement.
* <code>fork-stmt := fork { named-worker-decl+ }</code>
*
* @return Fork statement
*/
private STNode parseForkStatement() {
startContext(ParserRuleContext.FORK_STMT);
STNode forkKeyword = parseForkKeyword();
STNode openBrace = parseOpenBrace();
ArrayList<STNode> workers = new ArrayList<>();
while (!isEndOfStatements()) {
STNode stmt = parseStatement();
if (stmt == null) {
break;
}
if (stmt.kind == SyntaxKind.LOCAL_TYPE_DEFINITION_STATEMENT) {
addInvalidNodeToNextToken(stmt, DiagnosticErrorCode.ERROR_LOCAL_TYPE_DEFINITION_NOT_ALLOWED);
continue;
}
switch (stmt.kind) {
case NAMED_WORKER_DECLARATION:
workers.add(stmt);
break;
default:
if (workers.isEmpty()) {
openBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt,
DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);
} else {
updateLastNodeInListWithInvalidNode(workers, stmt,
DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);
}
}
}
STNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers);
STNode closeBrace = parseCloseBrace();
endContext();
STNode forkStmt =
STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);
if (isNodeListEmpty(namedWorkerDeclarations)) {
return SyntaxErrors.addDiagnostic(forkStmt,
DiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT);
}
return forkStmt;
}
/**
* Parse trap expression.
* <p>
* <code>
* trap-expr := trap expression
* </code>
*
* @param allowActions Allow actions
* @param isRhsExpr Whether this is a RHS expression or not
* @return Trap expression node
*/
private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {
STNode trapKeyword = parseTrapKeyword();
STNode expr = parseExpression(OperatorPrecedence.TRAP, isRhsExpr, allowActions, isInConditionalExpr);
if (isAction(expr)) {
return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr);
}
return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr);
}
/**
* Parse trap-keyword.
*
* @return Trap-keyword node
*/
private STNode parseTrapKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TRAP_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TRAP_KEYWORD);
return parseTrapKeyword();
}
}
/**
* Parse list constructor expression.
* <p>
* <code>
* list-constructor-expr := [ [ expr-list ] ]
* <br/>
* expr-list := expression (, expression)*
* </code>
*
* @return Parsed node
*/
private STNode parseListConstructorExpr() {
startContext(ParserRuleContext.LIST_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode expressions = parseOptionalExpressionsList();
STNode closeBracket = parseCloseBracket();
endContext();
return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket);
}
/**
* Parse optional expression list.
*
* @return Parsed node
*/
private STNode parseOptionalExpressionsList() {
List<STNode> expressions = new ArrayList<>();
if (isEndOfListConstructor(peek().kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode expr = parseExpression();
expressions.add(expr);
return parseOptionalExpressionsList(expressions);
}
private STNode parseOptionalExpressionsList(List<STNode> expressions) {
STNode listConstructorMemberEnd;
while (!isEndOfListConstructor(peek().kind)) {
listConstructorMemberEnd = parseListConstructorMemberEnd();
if (listConstructorMemberEnd == null) {
break;
}
expressions.add(listConstructorMemberEnd);
STNode expr = parseExpression();
expressions.add(expr);
}
return STNodeFactory.createNodeList(expressions);
}
private boolean isEndOfListConstructor(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
default:
return false;
}
}
private STNode parseListConstructorMemberEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END);
return parseListConstructorMemberEnd();
}
}
/**
* Parse foreach statement.
* <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]</code>
*
* @return foreach statement
*/
private STNode parseForEachStatement() {
startContext(ParserRuleContext.FOREACH_STMT);
STNode forEachKeyword = parseForEachKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT);
STNode inKeyword = parseInKeyword();
STNode actionOrExpr = parseActionOrExpression();
STNode blockStatement = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr,
blockStatement, onFailClause);
}
/**
* Parse foreach-keyword.
*
* @return ForEach-keyword node
*/
private STNode parseForEachKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FOREACH_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FOREACH_KEYWORD);
return parseForEachKeyword();
}
}
/**
* Parse in-keyword.
*
* @return In-keyword node
*/
private STNode parseInKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IN_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.IN_KEYWORD);
return parseInKeyword();
}
}
/**
* Parse type cast expression.
* <p>
* <code>
* type-cast-expr := < type-cast-param > expression
* <br/>
* type-cast-param := [annots] type-descriptor | annots
* </code>
*
* @return Parsed node
*/
private STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {
startContext(ParserRuleContext.TYPE_CAST);
STNode ltToken = parseLTToken();
return parseTypeCastExpr(ltToken, isRhsExpr, allowActions, isInConditionalExpr);
}
private STNode parseTypeCastExpr(STNode ltToken, boolean isRhsExpr, boolean allowActions,
boolean isInConditionalExpr) {
STNode typeCastParam = parseTypeCastParam();
STNode gtToken = parseGTToken();
endContext();
STNode expression =
parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);
return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);
}
private STNode parseTypeCastParam() {
STNode annot;
STNode type;
STToken token = peek();
switch (token.kind) {
case AT_TOKEN:
annot = parseOptionalAnnotations();
token = peek();
if (isTypeStartingToken(token.kind)) {
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
} else {
type = STNodeFactory.createEmptyNode();
}
break;
default:
annot = STNodeFactory.createEmptyNode();
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
break;
}
return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type);
}
/**
* Parse table constructor expression.
* <p>
* <code>
* table-constructor-expr-rhs := [ [row-list] ]
* </code>
*
* @param tableKeyword tableKeyword that precedes this rhs
* @param keySpecifier keySpecifier that precedes this rhs
* @return Parsed node
*/
private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {
switchContext(ParserRuleContext.TABLE_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode rowList = parseRowList();
STNode closeBracket = parseCloseBracket();
return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,
closeBracket);
}
/**
* Parse table-keyword.
*
* @return Table-keyword node
*/
private STNode parseTableKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TABLE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TABLE_KEYWORD);
return parseTableKeyword();
}
}
/**
* Parse table rows.
* <p>
* <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code>
*
* @return Parsed node
*/
private STNode parseRowList() {
STToken nextToken = peek();
if (isEndOfTableRowList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> mappings = new ArrayList<>();
STNode mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
STNode rowEnd;
while (!isEndOfTableRowList(nextToken.kind)) {
rowEnd = parseTableRowEnd();
if (rowEnd == null) {
break;
}
mappings.add(rowEnd);
mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
}
return STNodeFactory.createNodeList(mappings);
}
private boolean isEndOfTableRowList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
case COMMA_TOKEN:
case OPEN_BRACE_TOKEN:
return false;
default:
return isEndOfMappingConstructor(tokenKind);
}
}
private STNode parseTableRowEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.TABLE_ROW_END);
return parseTableRowEnd();
}
}
/**
* Parse key specifier.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier() {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode keyKeyword = parseKeyKeyword();
STNode openParen = parseOpenParenthesis();
STNode fieldNames = parseFieldNames();
STNode closeParen = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);
}
/**
* Parse key-keyword.
*
* @return Key-keyword node
*/
private STNode parseKeyKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.KEY_KEYWORD) {
return consume();
}
if (isKeyKeyword(token)) {
return getKeyKeyword(consume());
}
recover(token, ParserRuleContext.KEY_KEYWORD);
return parseKeyKeyword();
}
static boolean isKeyKeyword(STToken token) {
return token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text());
}
private STNode getKeyKeyword(STToken token) {
return STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),
token.diagnostics());
}
/**
* Parse field names.
* <p>
* <code>field-name-list := [ field-name (, field-name)* ]</code>
*
* @return Parsed node
*/
private STNode parseFieldNames() {
STToken nextToken = peek();
if (isEndOfFieldNamesList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> fieldNames = new ArrayList<>();
STNode fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
STNode leadingComma;
while (!isEndOfFieldNamesList(nextToken.kind)) {
leadingComma = parseComma();
fieldNames.add(leadingComma);
fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
}
return STNodeFactory.createNodeList(fieldNames);
}
private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case IDENTIFIER_TOKEN:
return false;
default:
return true;
}
}
/**
* Parse error type descriptor.
* <p>
* error-type-descriptor := error [type-parameter]
* type-parameter := < type-descriptor >
* </p>
*
* @return Parsed node
*/
private STNode parseErrorTypeDescriptor() {
STNode errorKeywordToken = parseErrorKeyword();
return parseErrorTypeDescriptor(errorKeywordToken);
}
private STNode parseErrorTypeDescriptor(STNode errorKeywordToken) {
STNode errorTypeParamsNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
errorTypeParamsNode = parseTypeParameter();
} else {
errorTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode);
}
/**
* Parse error-keyword.
*
* @return Parsed error-keyword node
*/
private STNode parseErrorKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ERROR_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ERROR_KEYWORD);
return parseErrorKeyword();
}
}
/**
* Parse typedesc type descriptor.
* typedesc-type-descriptor := typedesc type-parameter
*
* @return Parsed typedesc type node
*/
private STNode parseTypedescTypeDescriptor(STNode typedescKeywordToken) {
STNode typedescTypeParamsNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
typedescTypeParamsNode = parseTypeParameter();
} else {
typedescTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createTypedescTypeDescriptorNode(typedescKeywordToken, typedescTypeParamsNode);
}
/**
* Parse stream type descriptor.
* <p>
* stream-type-descriptor := stream [stream-type-parameters]
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type descriptor node
*/
private STNode parseStreamTypeDescriptor(STNode streamKeywordToken) {
STNode streamTypeParamsNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
streamTypeParamsNode = parseStreamTypeParamsNode();
} else {
streamTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);
}
/**
* Parse xml type descriptor.
* xml-type-descriptor := xml type-parameter
*
* @return Parsed typedesc type node
*/
private STNode parseXmlTypeDescriptor(STNode xmlKeywordToken) {
STNode typedescTypeParamsNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
typedescTypeParamsNode = parseTypeParameter();
} else {
typedescTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createXmlTypeDescriptorNode(xmlKeywordToken, typedescTypeParamsNode);
}
/**
* Parse stream type params node.
* <p>
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type params node
*/
private STNode parseStreamTypeParamsNode() {
STNode ltToken = parseLTToken();
startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode leftTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);
endContext();
return streamTypedesc;
}
private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {
STNode commaToken, rightTypeDescNode, gtToken;
switch (peek().kind) {
case COMMA_TOKEN:
commaToken = parseComma();
rightTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
break;
case GT_TOKEN:
commaToken = STNodeFactory.createEmptyNode();
rightTypeDescNode = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode);
return parseStreamTypeParamsNode(ltToken, leftTypeDescNode);
}
gtToken = parseGTToken();
return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,
gtToken);
}
/**
* Parse stream-keyword.
*
* @return Parsed stream-keyword node
*/
private STNode parseStreamKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.STREAM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.STREAM_KEYWORD);
return parseStreamKeyword();
}
}
/**
* Parse let expression.
* <p>
* <code>
* let-expr := let let-var-decl [, let-var-decl]* in expression
* </code>
*
* @return Parsed node
*/
private STNode parseLetExpression(boolean isRhsExpr) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr);
STNode inKeyword = parseInKeyword();
letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,
DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);
}
/**
* Parse let-keyword.
*
* @return Let-keyword node
*/
private STNode parseLetKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LET_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LET_KEYWORD);
return parseLetKeyword();
}
}
/**
* Parse let variable declarations.
* <p>
* <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code>
*
* @return Parsed node
*/
private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) {
startContext(context);
List<STNode> varDecls = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfLetVarDeclarations(nextToken.kind)) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode varDec = parseLetVarDecl(isRhsExpr);
varDecls.add(varDec);
nextToken = peek();
STNode leadingComma;
while (!isEndOfLetVarDeclarations(nextToken.kind)) {
leadingComma = parseComma();
varDecls.add(leadingComma);
varDec = parseLetVarDecl(isRhsExpr);
varDecls.add(varDec);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(varDecls);
}
private boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case AT_TOKEN:
return false;
case IN_KEYWORD:
return true;
default:
return !isTypeStartingToken(tokenKind);
}
}
/**
* Parse let variable declaration.
* <p>
* <code>let-var-decl := [annots] typed-binding-pattern = expression</code>
*
* @return Parsed node
*/
private STNode parseLetVarDecl(boolean isRhsExpr) {
STNode annot = parseOptionalAnnotations();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL);
STNode assign = parseAssignOp();
STNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false);
return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression);
}
/**
* Parse raw backtick string template expression.
* <p>
* <code>BacktickString := `expression`</code>
*
* @return Template expression node
*/
private STNode parseTemplateExpression() {
STNode type = STNodeFactory.createEmptyNode();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
private STNode parseTemplateContent() {
List<STNode> items = new ArrayList<>();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
items.add(contentItem);
nextToken = peek();
}
return STNodeFactory.createNodeList(items);
}
private boolean isEndOfBacktickContent(SyntaxKind kind) {
switch (kind) {
case EOF_TOKEN:
case BACKTICK_TOKEN:
return true;
default:
return false;
}
}
private STNode parseTemplateItem() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return parseInterpolation();
}
return consume();
}
/**
* Parse string template expression.
* <p>
* <code>string-template-expr := string ` expression `</code>
*
* @return String template expression node
*/
private STNode parseStringTemplateExpression() {
STNode type = parseStringKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
/**
* Parse <code>string</code> keyword.
*
* @return string keyword node
*/
private STNode parseStringKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.STRING_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.STRING_KEYWORD);
return parseStringKeyword();
}
}
/**
* Parse XML template expression.
* <p>
* <code>xml-template-expr := xml BacktickString</code>
*
* @return XML template expression
*/
private STNode parseXMLTemplateExpression() {
STNode xmlKeyword = parseXMLKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContentAsXML();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,
startingBackTick, content, endingBackTick);
}
/**
* Parse <code>xml</code> keyword.
*
* @return xml keyword node
*/
private STNode parseXMLKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XML_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.XML_KEYWORD);
return parseXMLKeyword();
}
}
/**
* Parse the content of the template string as XML. This method first read the
* input in the same way as the raw-backtick-template (BacktickString). Then
* it parses the content as XML.
*
* @return XML node
*/
private STNode parseTemplateContentAsXML() {
ArrayDeque<STNode> expressions = new ArrayDeque<>();
StringBuilder xmlStringBuilder = new StringBuilder();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {
xmlStringBuilder.append(((STToken) contentItem).text());
} else {
xmlStringBuilder.append("${}");
expressions.add(contentItem);
}
nextToken = peek();
}
CharReader charReader = CharReader.from(xmlStringBuilder.toString());
AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader));
XMLParser xmlParser = new XMLParser(tokenReader, expressions);
return xmlParser.parse();
}
/**
* Parse interpolation of a back-tick string.
* <p>
* <code>
* interpolation := ${ expression }
* </code>
*
* @return Interpolation node
*/
private STNode parseInterpolation() {
startContext(ParserRuleContext.INTERPOLATION);
STNode interpolStart = parseInterpolationStart();
STNode expr = parseExpression();
while (!isEndOfInterpolation()) {
STToken nextToken = consume();
expr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken,
DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text());
}
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);
}
private boolean isEndOfInterpolation() {
SyntaxKind nextTokenKind = peek().kind;
switch (nextTokenKind) {
case EOF_TOKEN:
case BACKTICK_TOKEN:
return true;
default:
ParserMode currentLexerMode = this.tokenReader.getCurrentMode();
return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN && currentLexerMode != ParserMode.INTERPOLATION &&
currentLexerMode != ParserMode.INTERPOLATION_BRACED_CONTENT;
}
}
/**
* Parse interpolation start token.
* <p>
* <code>interpolation-start := ${</code>
*
* @return Interpolation start token
*/
private STNode parseInterpolationStart() {
STToken token = peek();
if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);
return parseInterpolationStart();
}
}
/**
* Parse back-tick token.
*
* @return Back-tick token
*/
private STNode parseBacktickToken(ParserRuleContext ctx) {
STToken token = peek();
if (token.kind == SyntaxKind.BACKTICK_TOKEN) {
return consume();
} else {
recover(token, ctx);
return parseBacktickToken(ctx);
}
}
/**
* Parse table type descriptor.
* <p>
* table-type-descriptor := table row-type-parameter [key-constraint]
* row-type-parameter := type-parameter
* key-constraint := key-specifier | key-type-constraint
* key-specifier := key ( [ field-name (, field-name)* ] )
* key-type-constraint := key type-parameter
* </p>
*
* @return Parsed table type desc node.
*/
private STNode parseTableTypeDescriptor(STNode tableKeywordToken) {
STNode rowTypeParameterNode = parseRowTypeParameter();
STNode keyConstraintNode;
STToken nextToken = peek();
if (isKeyKeyword(nextToken)) {
STNode keyKeywordToken = getKeyKeyword(consume());
keyConstraintNode = parseKeyConstraint(keyKeywordToken);
} else {
keyConstraintNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);
}
/**
* Parse row type parameter node.
* <p>
* row-type-parameter := type-parameter
* </p>
*
* @return Parsed node.
*/
private STNode parseRowTypeParameter() {
startContext(ParserRuleContext.ROW_TYPE_PARAM);
STNode rowTypeParameterNode = parseTypeParameter();
endContext();
return rowTypeParameterNode;
}
/**
* Parse type parameter node.
* <p>
* type-parameter := < type-descriptor >
* </p>
*
* @return Parsed node
*/
private STNode parseTypeParameter() {
STNode ltToken = parseLTToken();
STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
STNode gtToken = parseGTToken();
return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);
}
/**
* Parse key constraint.
* <p>
* key-constraint := key-specifier | key-type-constraint
* </p>
*
* @return Parsed node.
*/
private STNode parseKeyConstraint(STNode keyKeywordToken) {
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
return parseKeySpecifier(keyKeywordToken);
case LT_TOKEN:
return parseKeyTypeConstraint(keyKeywordToken);
default:
recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken);
return parseKeyConstraint(keyKeywordToken);
}
}
/**
* Parse key specifier given parsed key keyword token.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier(STNode keyKeywordToken) {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode openParenToken = parseOpenParenthesis();
STNode fieldNamesNode = parseFieldNames();
STNode closeParenToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);
}
/**
* Parse key type constraint.
* <p>
* key-type-constraint := key type-parameter
* </p>
*
* @return Parsed node
*/
private STNode parseKeyTypeConstraint(STNode keyKeywordToken) {
STNode typeParameterNode = parseTypeParameter();
return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);
}
/**
* Parse function type descriptor.
* <p>
* <code>function-type-descriptor := [isolated] function function-signature</code>
*
* @param qualifiers Preceding type descriptor qualifiers
* @return Function type descriptor node
*/
private STNode parseFunctionTypeDesc(List<STNode> qualifiers) {
startContext(ParserRuleContext.FUNC_TYPE_DESC);
STNode qualifierList;
STNode functionKeyword = parseFunctionKeyword();
STNode signature;
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
signature = parseFuncSignature(true);
qualifierList = createFuncTypeQualNodeList(qualifiers, true);
break;
default:
signature = STNodeFactory.createEmptyNode();
qualifierList = createFuncTypeQualNodeList(qualifiers, false);
break;
}
endContext();
return STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, signature);
}
private STNode createFuncTypeQualNodeList(List<STNode> qualifierList, boolean hasFuncSignature) {
List<STNode> validatedList = new ArrayList<>();
for (int i = 0; i < qualifierList.size(); i++) {
STNode qualifier = qualifierList.get(i);
int nextIndex = i + 1;
if (isSyntaxKindInList(validatedList, qualifier.kind)) {
updateLastNodeInListWithInvalidNode(validatedList, qualifier,
DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());
} else if (hasFuncSignature && isRegularFuncQual(qualifier.kind)) {
validatedList.add(qualifier);
} else if (qualifier.kind == SyntaxKind.ISOLATED_KEYWORD) {
validatedList.add(qualifier);
} else if (qualifierList.size() == nextIndex) {
addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED,
((STToken) qualifier).text());
} else {
updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
}
}
return STNodeFactory.createNodeList(validatedList);
}
private boolean isRegularFuncQual(SyntaxKind tokenKind) {
switch (tokenKind) {
case ISOLATED_KEYWORD:
case TRANSACTIONAL_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse explicit anonymous function expression.
* <p>
* <code>explicit-anonymous-function-expr :=
* [annots] (isolated| transactional) function function-signature anon-func-body</code>
*
* @param annots Annotations.
* @param qualifiers Function qualifiers
* @param isRhsExpr Is expression in rhs context
* @return Anonymous function expression node
*/
private STNode parseExplicitFunctionExpression(STNode annots, List<STNode> qualifiers, boolean isRhsExpr) {
startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);
STNode qualifierList = createFuncTypeQualNodeList(qualifiers, true);
STNode funcKeyword = parseFunctionKeyword();
STNode funcSignature = parseFuncSignature(false);
STNode funcBody = parseAnonFuncBody(isRhsExpr);
return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, funcKeyword,
funcSignature, funcBody);
}
/**
* Parse anonymous function body.
* <p>
* <code>anon-func-body := block-function-body | expr-function-body</code>
*
* @param isRhsExpr Is expression in rhs context
* @return Anon function body node
*/
private STNode parseAnonFuncBody(boolean isRhsExpr) {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
case EOF_TOKEN:
STNode body = parseFunctionBodyBlock(true);
endContext();
return body;
case RIGHT_DOUBLE_ARROW_TOKEN:
endContext();
return parseExpressionFuncBody(true, isRhsExpr);
default:
recover(peek(), ParserRuleContext.ANON_FUNC_BODY, isRhsExpr);
return parseAnonFuncBody(isRhsExpr);
}
}
/**
* Parse expression function body.
* <p>
* <code>expr-function-body := => expression</code>
*
* @param isAnon Is anonymous function.
* @param isRhsExpr Is expression in rhs context
* @return Expression function body node
*/
private STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) {
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode semiColon;
if (isAnon) {
semiColon = STNodeFactory.createEmptyNode();
} else {
semiColon = parseSemicolon();
}
return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon);
}
/**
* Parse '=>' token.
*
* @return Double right arrow token
*/
private STNode parseDoubleRightArrow() {
STToken token = peek();
if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.EXPR_FUNC_BODY_START);
return parseDoubleRightArrow();
}
}
private STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) {
switch (params.kind) {
case SIMPLE_NAME_REFERENCE:
case INFER_PARAM_LIST:
break;
case BRACED_EXPRESSION:
params = getAnonFuncParam((STBracedExpressionNode) params);
break;
default:
STToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
syntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params,
DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);
params = STNodeFactory.createSimpleNameReferenceNode(syntheticParam);
}
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression);
}
/**
* Create a new anon-func-param node from a braced expression.
*
* @param bracedExpression Braced expression
* @return Anon-func param node
*/
private STNode getAnonFuncParam(STBracedExpressionNode bracedExpression) {
List<STNode> paramList = new ArrayList<>();
STNode innerExpression = bracedExpression.expression;
STNode openParen = bracedExpression.openParen;
if (innerExpression.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
paramList.add(innerExpression);
} else {
openParen = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openParen, innerExpression,
DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);
}
return STNodeFactory.createImplicitAnonymousFunctionParameters(openParen,
STNodeFactory.createNodeList(paramList), bracedExpression.closeParen);
}
/**
* Parse implicit anon function expression.
*
* @param openParen Open parenthesis token
* @param firstParam First parameter
* @param isRhsExpr Is expression in rhs context
* @return Implicit anon function expression node
*/
private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) {
List<STNode> paramList = new ArrayList<>();
paramList.add(firstParam);
STToken nextToken = peek();
STNode paramEnd;
STNode param;
while (!isEndOfAnonFuncParametersList(nextToken.kind)) {
paramEnd = parseImplicitAnonFuncParamEnd();
if (paramEnd == null) {
break;
}
paramList.add(paramEnd);
param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM);
param = STNodeFactory.createSimpleNameReferenceNode(param);
paramList.add(param);
nextToken = peek();
}
STNode params = STNodeFactory.createNodeList(paramList);
STNode closeParen = parseCloseParenthesis();
endContext();
STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);
return parseImplicitAnonFunc(inferedParams, isRhsExpr);
}
private STNode parseImplicitAnonFuncParamEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS);
return parseImplicitAnonFuncParamEnd();
}
}
private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case RETURNS_KEYWORD:
case TYPE_KEYWORD:
case LISTENER_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case DO_KEYWORD:
case OPEN_BRACE_TOKEN:
case RIGHT_DOUBLE_ARROW_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse tuple type descriptor.
* <p>
* <code>tuple-type-descriptor := [ tuple-member-type-descriptors ]
* <br/><br/>
* tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]
* | [ tuple-rest-descriptor ]
* <br/><br/>
* tuple-rest-descriptor := type-descriptor ...
* </code>
*
* @return
*/
private STNode parseTupleTypeDesc() {
STNode openBracket = parseOpenBracket();
startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);
STNode memberTypeDesc = parseTupleMemberTypeDescList();
STNode closeBracket = parseCloseBracket();
endContext();
openBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket,
DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC);
return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket);
}
/**
* Parse tuple member type descriptors.
*
* @return Parsed node
*/
private STNode parseTupleMemberTypeDescList() {
List<STNode> typeDescList = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfTypeList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
return parseTupleTypeMembers(typeDesc, typeDescList);
}
private STNode parseTupleTypeMembers(STNode typeDesc, List<STNode> typeDescList) {
STNode tupleMemberRhs = parseTypeDescInTupleRhs();
if (tupleMemberRhs != null) {
typeDesc = STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);
}
while (!isEndOfTypeList(peek().kind)) {
tupleMemberRhs = parseTupleMemberRhs();
if (tupleMemberRhs == null) {
break;
}
if (typeDesc.kind == SyntaxKind.REST_TYPE) {
typeDesc = invalidateTypeDescAfterRestDesc(typeDesc);
break;
}
typeDescList.add(typeDesc);
typeDescList.add(tupleMemberRhs);
typeDesc = parseMemberDescriptor();
}
typeDescList.add(typeDesc);
return STNodeFactory.createNodeList(typeDescList);
}
private STNode parseMemberDescriptor() {
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
STNode tupleMemberRhs = parseTypeDescInTupleRhs();
if (tupleMemberRhs != null) {
return STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);
}
return typeDesc;
}
private STNode invalidateTypeDescAfterRestDesc(STNode restDescriptor) {
while (!isEndOfTypeList(peek().kind)) {
STNode tupleMemberRhs = parseTupleMemberRhs();
if (tupleMemberRhs == null) {
break;
}
restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, tupleMemberRhs, null);
restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, parseMemberDescriptor(),
DiagnosticErrorCode.ERROR_TYPEDESC_AFTER_REST_DESCRIPTOR);
}
return restDescriptor;
}
private STNode parseTupleMemberRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.TUPLE_TYPE_MEMBER_RHS);
return parseTupleMemberRhs();
}
}
private STNode parseTypeDescInTupleRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
case CLOSE_BRACKET_TOKEN:
return null;
case ELLIPSIS_TOKEN:
return parseEllipsis();
default:
recover(peek(), ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS);
return parseTypeDescInTupleRhs();
}
}
private boolean isEndOfTypeList(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case CLOSE_BRACKET_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case EOF_TOKEN:
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse table constructor or query expression.
* <p>
* <code>
* table-constructor-or-query-expr := table-constructor-expr | query-expr
* <br/>
* table-constructor-expr := table [key-specifier] [ [row-list] ]
* <br/>
* query-expr := [query-construct-type] query-pipeline select-clause
* [query-construct-type] query-pipeline select-clause on-conflict-clause?
* <br/>
* query-construct-type := table key-specifier | stream
* </code>
*
* @return Parsed node
*/
private STNode parseTableConstructorOrQuery(boolean isRhsExpr) {
startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);
STNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr);
endContext();
return tableOrQueryExpr;
}
private STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr) {
STNode queryConstructType;
switch (peek().kind) {
case FROM_KEYWORD:
queryConstructType = STNodeFactory.createEmptyNode();
return parseQueryExprRhs(queryConstructType, isRhsExpr);
case STREAM_KEYWORD:
queryConstructType = parseQueryConstructType(parseStreamKeyword(), null);
return parseQueryExprRhs(queryConstructType, isRhsExpr);
case TABLE_KEYWORD:
STNode tableKeyword = parseTableKeyword();
return parseTableConstructorOrQuery(tableKeyword, isRhsExpr);
default:
recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr);
return parseTableConstructorOrQueryInternal(isRhsExpr);
}
}
private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) {
STNode keySpecifier;
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
keySpecifier = STNodeFactory.createEmptyNode();
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
case KEY_KEYWORD:
keySpecifier = parseKeySpecifier();
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);
case IDENTIFIER_TOKEN:
if (isKeyKeyword(nextToken)) {
keySpecifier = parseKeySpecifier();
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);
}
break;
default:
break;
}
recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr);
return parseTableConstructorOrQuery(tableKeyword, isRhsExpr);
}
private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) {
switch (peek().kind) {
case FROM_KEYWORD:
return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr);
case OPEN_BRACKET_TOKEN:
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
default:
recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword, keySpecifier,
isRhsExpr);
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);
}
}
/**
* Parse query construct type.
* <p>
* <code>query-construct-type := table key-specifier | stream</code>
*
* @return Parsed node
*/
private STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) {
return STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier);
}
/**
* Parse query action or expression.
* <p>
* <code>
* query-expr-rhs := query-pipeline select-clause
* query-pipeline select-clause on-conflict-clause?
* <br/>
* query-pipeline := from-clause intermediate-clause*
* </code>
*
* @param queryConstructType queryConstructType that precedes this rhs
* @return Parsed node
*/
private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) {
switchContext(ParserRuleContext.QUERY_EXPRESSION);
STNode fromClause = parseFromClause(isRhsExpr);
List<STNode> clauses = new ArrayList<>();
STNode intermediateClause;
STNode selectClause = null;
while (!isEndOfIntermediateClause(peek().kind)) {
intermediateClause = parseIntermediateClause(isRhsExpr);
if (intermediateClause == null) {
break;
}
if (selectClause != null) {
selectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause,
DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE);
continue;
}
if (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) {
selectClause = intermediateClause;
} else {
clauses.add(intermediateClause);
}
}
if (peek().kind == SyntaxKind.DO_KEYWORD) {
STNode intermediateClauses = STNodeFactory.createNodeList(clauses);
STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);
return parseQueryAction(queryConstructType, queryPipeline, selectClause, isRhsExpr);
}
if (selectClause == null) {
STNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD);
STNode expr = STNodeFactory
.createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));
selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr);
if (clauses.isEmpty()) {
fromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);
} else {
int lastIndex = clauses.size() - 1;
STNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex),
DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);
clauses.set(lastIndex, intClauseWithDiagnostic);
}
}
STNode intermediateClauses = STNodeFactory.createNodeList(clauses);
STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);
STNode onConflictClause = parseOnConflictClause(isRhsExpr);
return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause,
onConflictClause);
}
/**
* Parse an intermediate clause.
* <p>
* <code>
* intermediate-clause := from-clause | where-clause | let-clause | join-clause | limit-clause | order-by-clause
* </code>
*
* @return Parsed node
*/
private STNode parseIntermediateClause(boolean isRhsExpr) {
switch (peek().kind) {
case FROM_KEYWORD:
return parseFromClause(isRhsExpr);
case WHERE_KEYWORD:
return parseWhereClause(isRhsExpr);
case LET_KEYWORD:
return parseLetClause(isRhsExpr);
case SELECT_KEYWORD:
return parseSelectClause(isRhsExpr);
case JOIN_KEYWORD:
case OUTER_KEYWORD:
return parseJoinClause(isRhsExpr);
case ORDER_KEYWORD:
case BY_KEYWORD:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
return parseOrderByClause(isRhsExpr);
case LIMIT_KEYWORD:
return parseLimitClause(isRhsExpr);
case DO_KEYWORD:
case SEMICOLON_TOKEN:
case ON_KEYWORD:
case CONFLICT_KEYWORD:
return null;
default:
recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS, isRhsExpr);
return parseIntermediateClause(isRhsExpr);
}
}
/**
* Parse join-keyword.
*
* @return Join-keyword node
*/
private STNode parseJoinKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.JOIN_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.JOIN_KEYWORD);
return parseJoinKeyword();
}
}
/**
* Parse equals keyword.
*
* @return Parsed node
*/
private STNode parseEqualsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.EQUALS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.EQUALS_KEYWORD);
return parseEqualsKeyword();
}
}
private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) {
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case EOF_TOKEN:
case RESOURCE_KEYWORD:
case LISTENER_KEYWORD:
case DOCUMENTATION_STRING:
case PRIVATE_KEYWORD:
case RETURNS_KEYWORD:
case SERVICE_KEYWORD:
case TYPE_KEYWORD:
case CONST_KEYWORD:
case FINAL_KEYWORD:
case DO_KEYWORD:
return true;
default:
return isValidExprRhsStart(tokenKind, SyntaxKind.NONE);
}
}
/**
* Parse from clause.
* <p>
* <code>from-clause := from typed-binding-pattern in expression</code>
*
* @return Parsed node
*/
private STNode parseFromClause(boolean isRhsExpr) {
STNode fromKeyword = parseFromKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE);
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression);
}
/**
* Parse from-keyword.
*
* @return From-keyword node
*/
private STNode parseFromKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FROM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FROM_KEYWORD);
return parseFromKeyword();
}
}
/**
* Parse where clause.
* <p>
* <code>where-clause := where expression</code>
*
* @return Parsed node
*/
private STNode parseWhereClause(boolean isRhsExpr) {
STNode whereKeyword = parseWhereKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createWhereClauseNode(whereKeyword, expression);
}
/**
* Parse where-keyword.
*
* @return Where-keyword node
*/
private STNode parseWhereKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WHERE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.WHERE_KEYWORD);
return parseWhereKeyword();
}
}
/**
* Parse limit-keyword.
*
* @return limit-keyword node
*/
private STNode parseLimitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LIMIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LIMIT_KEYWORD);
return parseLimitKeyword();
}
}
/**
* Parse let clause.
* <p>
* <code>let-clause := let let-var-decl [, let-var-decl]* </code>
*
* @return Parsed node
*/
private STNode parseLetClause(boolean isRhsExpr) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr);
letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,
DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);
return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);
}
/**
* Parse order-keyword.
*
* @return Order-keyword node
*/
private STNode parseOrderKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ORDER_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ORDER_KEYWORD);
return parseOrderKeyword();
}
}
/**
* Parse by-keyword.
*
* @return By-keyword node
*/
private STNode parseByKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BY_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BY_KEYWORD);
return parseByKeyword();
}
}
/**
* Parse order by clause.
* <p>
* <code>order-by-clause := order by order-key-list
* </code>
*
* @return Parsed node
*/
private STNode parseOrderByClause(boolean isRhsExpr) {
STNode orderKeyword = parseOrderKeyword();
STNode byKeyword = parseByKeyword();
STNode orderKeys = parseOrderKeyList(isRhsExpr);
byKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY);
return STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys);
}
/**
* Parse order key.
* <p>
* <code>order-key-list := order-key [, order-key]*</code>
*
* @return Parsed node
*/
private STNode parseOrderKeyList(boolean isRhsExpr) {
startContext(ParserRuleContext.ORDER_KEY_LIST);
List<STNode> orderKeys = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfOrderKeys(nextToken.kind)) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode orderKey = parseOrderKey(isRhsExpr);
orderKeys.add(orderKey);
nextToken = peek();
STNode orderKeyListMemberEnd;
while (!isEndOfOrderKeys(nextToken.kind)) {
orderKeyListMemberEnd = parseOrderKeyListMemberEnd();
if (orderKeyListMemberEnd == null) {
break;
}
orderKeys.add(orderKeyListMemberEnd);
orderKey = parseOrderKey(isRhsExpr);
orderKeys.add(orderKey);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(orderKeys);
}
private boolean isEndOfOrderKeys(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
return false;
case SEMICOLON_TOKEN:
case EOF_TOKEN:
return true;
default:
return isQueryClauseStartToken(tokenKind);
}
}
private boolean isQueryClauseStartToken(SyntaxKind tokenKind) {
switch (tokenKind) {
case SELECT_KEYWORD:
case LET_KEYWORD:
case WHERE_KEYWORD:
case OUTER_KEYWORD:
case JOIN_KEYWORD:
case ORDER_KEYWORD:
case DO_KEYWORD:
case FROM_KEYWORD:
case LIMIT_KEYWORD:
return true;
default:
return false;
}
}
private STNode parseOrderKeyListMemberEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return parseComma();
case EOF_TOKEN:
return null;
default:
if (isQueryClauseStartToken(nextToken.kind)) {
return null;
}
recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END);
return parseOrderKeyListMemberEnd();
}
}
/**
* Parse order key.
* <p>
* <code>order-key := expression (ascending | descending)?</code>
*
* @return Parsed node
*/
private STNode parseOrderKey(boolean isRhsExpr) {
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode orderDirection;
STToken nextToken = peek();
switch (nextToken.kind) {
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
orderDirection = consume();
break;
default:
orderDirection = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createOrderKeyNode(expression, orderDirection);
}
/**
* Parse select clause.
* <p>
* <code>select-clause := select expression</code>
*
* @return Parsed node
*/
private STNode parseSelectClause(boolean isRhsExpr) {
startContext(ParserRuleContext.SELECT_CLAUSE);
STNode selectKeyword = parseSelectKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
endContext();
return STNodeFactory.createSelectClauseNode(selectKeyword, expression);
}
/**
* Parse select-keyword.
*
* @return Select-keyword node
*/
private STNode parseSelectKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SELECT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.SELECT_KEYWORD);
return parseSelectKeyword();
}
}
/**
* Parse on-conflict clause.
* <p>
* <code>
* onConflictClause := on conflict expression
* </code>
*
* @return On conflict clause node
*/
private STNode parseOnConflictClause(boolean isRhsExpr) {
STToken nextToken = peek();
if (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
startContext(ParserRuleContext.ON_CONFLICT_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode conflictKeyword = parseConflictKeyword();
endContext();
STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr);
}
/**
* Parse conflict keyword.
*
* @return Conflict keyword node
*/
private STNode parseConflictKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONFLICT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.CONFLICT_KEYWORD);
return parseConflictKeyword();
}
}
/**
* Parse limit clause.
* <p>
* <code>limitClause := limit expression</code>
*
* @return Limit expression node
*/
private STNode parseLimitClause(boolean isRhsExpr) {
STNode limitKeyword = parseLimitKeyword();
STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createLimitClauseNode(limitKeyword, expr);
}
/**
* Parse join clause.
* <p>
* <code>
* join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause
* <br/>
* join-var-decl := join (typeName | var) bindingPattern
* <br/>
* outer-join-var-decl := outer join var binding-pattern
* </code>
*
* @return Join clause
*/
private STNode parseJoinClause(boolean isRhsExpr) {
startContext(ParserRuleContext.JOIN_CLAUSE);
STNode outerKeyword;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.OUTER_KEYWORD) {
outerKeyword = consume();
} else {
outerKeyword = STNodeFactory.createEmptyNode();
}
STNode joinKeyword = parseJoinKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE);
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
endContext();
STNode onCondition = parseOnClause(isRhsExpr);
return STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression,
onCondition);
}
/**
* Parse on clause.
* <p>
* <code>on clause := `on` expression `equals` expression</code>
*
* @return On clause node
*/
private STNode parseOnClause(boolean isRhsExpr) {
STToken nextToken = peek();
if (isQueryClauseStartToken(nextToken.kind)) {
return createMissingOnClauseNode();
}
startContext(ParserRuleContext.ON_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode equalsKeyword = parseEqualsKeyword();
endContext();
STNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);
}
private STNode createMissingOnClauseNode() {
STNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD,
DiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD);
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);
STNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD,
DiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD);
STNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);
STNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);
return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);
}
/**
* Parse start action.
* <p>
* <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code>
*
* @return Start action node
*/
private STNode parseStartAction(STNode annots) {
STNode startKeyword = parseStartKeyword();
STNode expr = parseActionOrExpression();
switch (expr.kind) {
case FUNCTION_CALL:
case METHOD_CALL:
case REMOTE_METHOD_CALL_ACTION:
break;
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
STNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN);
STNode arguments = STNodeFactory.createEmptyNodeList();
STNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN);
expr = STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken);
break;
default:
startKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr,
DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION);
STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
funcName = STNodeFactory.createSimpleNameReferenceNode(funcName);
openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN);
arguments = STNodeFactory.createEmptyNodeList();
closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN);
expr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, arguments,
closeParenToken);
break;
}
return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr);
}
/**
* Parse start keyword.
*
* @return Start keyword node
*/
private STNode parseStartKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.START_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.START_KEYWORD);
return parseStartKeyword();
}
}
/**
* Parse flush action.
* <p>
* <code>flush-action := flush [peer-worker]</code>
*
* @return flush action node
*/
private STNode parseFlushAction() {
STNode flushKeyword = parseFlushKeyword();
STNode peerWorker = parseOptionalPeerWorkerName();
return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker);
}
/**
* Parse flush keyword.
*
* @return flush keyword node
*/
private STNode parseFlushKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FLUSH_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FLUSH_KEYWORD);
return parseFlushKeyword();
}
}
/**
* Parse peer worker.
* <p>
* <code>peer-worker := worker-name | function</code>
*
* @return peer worker name node
*/
private STNode parseOptionalPeerWorkerName() {
STToken token = peek();
switch (token.kind) {
case IDENTIFIER_TOKEN:
case FUNCTION_KEYWORD:
return STNodeFactory.createSimpleNameReferenceNode(consume());
default:
return STNodeFactory.createEmptyNode();
}
}
/**
* Parse intersection type descriptor.
* <p>
* intersection-type-descriptor := type-descriptor & type-descriptor
* </p>
*
* @return Parsed node
*/
private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,
boolean isTypedBindingPattern) {
STNode bitwiseAndToken = consume();
STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false);
return createIntersectionTypeDesc(leftTypeDesc, bitwiseAndToken, rightTypeDesc);
}
private STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) {
leftTypeDesc = validateForUsageOfVar(leftTypeDesc);
rightTypeDesc = validateForUsageOfVar(rightTypeDesc);
return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc);
}
/**
* Parse singleton type descriptor.
* <p>
* singleton-type-descriptor := simple-const-expr
* simple-const-expr :=
* nil-literal
* | boolean-literal
* | [Sign] int-literal
* | [Sign] floating-point-literal
* | string-literal
* | constant-reference-expr
* </p>
*/
private STNode parseSingletonTypeDesc() {
STNode simpleContExpr = parseSimpleConstExpr();
return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr);
}
private STNode parseSignedIntOrFloat() {
STNode operator = parseUnaryOperator();
STNode literal;
STToken nextToken = peek();
switch (nextToken.kind) {
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
literal = parseBasicLiteral();
break;
default:
literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN);
literal = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal);
}
return STNodeFactory.createUnaryExpressionNode(operator, literal);
}
private boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) {
STToken nextNextToken = getNextNextToken();
switch (tokenKind) {
case STRING_LITERAL_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
if (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) {
return true;
}
return false;
case PLUS_TOKEN:
case MINUS_TOKEN:
return isIntOrFloat(nextNextToken);
default:
return false;
}
}
static boolean isIntOrFloat(STToken token) {
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
return true;
default:
return false;
}
}
private boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) {
switch (token.kind) {
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
case OPEN_PAREN_TOKEN:
case OPEN_BRACKET_TOKEN:
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return true;
default:
return false;
}
}
/**
* Check whether the parser reached to a valid expression start.
*
* @param nextTokenKind Kind of the next immediate token.
* @param nextTokenIndex Index to the next token.
* @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise
*/
private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) {
nextTokenIndex++;
switch (nextTokenKind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
SyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind;
return nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN || nextNextTokenKind == SyntaxKind.COMMA_TOKEN ||
nextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN ||
isValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE);
case IDENTIFIER_TOKEN:
return isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE);
case OPEN_PAREN_TOKEN:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case OPEN_BRACE_TOKEN:
case TYPEOF_KEYWORD:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
case TRAP_KEYWORD:
case OPEN_BRACKET_TOKEN:
case LT_TOKEN:
case FROM_KEYWORD:
case LET_KEYWORD:
case BACKTICK_TOKEN:
case NEW_KEYWORD:
case LEFT_ARROW_TOKEN:
case FUNCTION_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case ISOLATED_KEYWORD:
return true;
case PLUS_TOKEN:
case MINUS_TOKEN:
return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);
case TABLE_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD;
case STREAM_KEYWORD:
STToken nextNextToken = peek(nextTokenIndex);
return nextNextToken.kind == SyntaxKind.KEY_KEYWORD ||
nextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN ||
nextNextToken.kind == SyntaxKind.FROM_KEYWORD;
case ERROR_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN;
case XML_KEYWORD:
case STRING_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN;
case START_KEYWORD:
case FLUSH_KEYWORD:
case WAIT_KEYWORD:
default:
return false;
}
}
/**
* Parse sync send action.
* <p>
* <code>sync-send-action := expression ->> peer-worker</code>
*
* @param expression LHS expression of the sync send action
* @return Sync send action node
*/
private STNode parseSyncSendAction(STNode expression) {
STNode syncSendToken = parseSyncSendToken();
STNode peerWorker = parsePeerWorkerName();
return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker);
}
/**
* Parse peer worker.
* <p>
* <code>peer-worker := worker-name | function</code>
*
* @return peer worker name node
*/
private STNode parsePeerWorkerName() {
STToken token = peek();
switch (token.kind) {
case IDENTIFIER_TOKEN:
case FUNCTION_KEYWORD:
return STNodeFactory.createSimpleNameReferenceNode(consume());
default:
recover(token, ParserRuleContext.PEER_WORKER_NAME);
return parsePeerWorkerName();
}
}
/**
* Parse sync send token.
* <p>
* <code>sync-send-token := ->> </code>
*
* @return sync send token
*/
private STNode parseSyncSendToken() {
STToken token = peek();
if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.SYNC_SEND_TOKEN);
return parseSyncSendToken();
}
}
/**
* Parse receive action.
* <p>
* <code>receive-action := single-receive-action | multiple-receive-action</code>
*
* @return Receive action
*/
private STNode parseReceiveAction() {
STNode leftArrow = parseLeftArrowToken();
STNode receiveWorkers = parseReceiveWorkers();
return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers);
}
private STNode parseReceiveWorkers() {
switch (peek().kind) {
case FUNCTION_KEYWORD:
case IDENTIFIER_TOKEN:
return parsePeerWorkerName();
case OPEN_BRACE_TOKEN:
return parseMultipleReceiveWorkers();
default:
recover(peek(), ParserRuleContext.RECEIVE_WORKERS);
return parseReceiveWorkers();
}
}
/**
* Parse multiple worker receivers.
* <p>
* <code>{ receive-field (, receive-field)* }</code>
*
* @return Multiple worker receiver node
*/
private STNode parseMultipleReceiveWorkers() {
startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS);
STNode openBrace = parseOpenBrace();
STNode receiveFields = parseReceiveFields();
STNode closeBrace = parseCloseBrace();
endContext();
openBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace,
DiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION);
return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace);
}
private STNode parseReceiveFields() {
List<STNode> receiveFields = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfReceiveFields(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode receiveField = parseReceiveField();
receiveFields.add(receiveField);
nextToken = peek();
STNode recieveFieldEnd;
while (!isEndOfReceiveFields(nextToken.kind)) {
recieveFieldEnd = parseReceiveFieldEnd();
if (recieveFieldEnd == null) {
break;
}
receiveFields.add(recieveFieldEnd);
receiveField = parseReceiveField();
receiveFields.add(receiveField);
nextToken = peek();
}
return STNodeFactory.createNodeList(receiveFields);
}
private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
default:
return false;
}
}
private STNode parseReceiveFieldEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.RECEIVE_FIELD_END);
return parseReceiveFieldEnd();
}
}
/**
* Parse receive field.
* <p>
* <code>receive-field := peer-worker | field-name : peer-worker</code>
*
* @return Receiver field node
*/
private STNode parseReceiveField() {
switch (peek().kind) {
case FUNCTION_KEYWORD:
STNode functionKeyword = consume();
return STNodeFactory.createSimpleNameReferenceNode(functionKeyword);
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME);
return createQualifiedReceiveField(identifier);
default:
recover(peek(), ParserRuleContext.RECEIVE_FIELD);
return parseReceiveField();
}
}
private STNode createQualifiedReceiveField(STNode identifier) {
if (peek().kind != SyntaxKind.COLON_TOKEN) {
return identifier;
}
STNode colon = parseColon();
STNode peerWorker = parsePeerWorkerName();
return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker);
}
/**
* Parse left arrow (<-) token.
*
* @return left arrow token
*/
private STNode parseLeftArrowToken() {
STToken token = peek();
if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.LEFT_ARROW_TOKEN);
return parseLeftArrowToken();
}
}
/**
* Parse signed right shift token (>>).
*
* @return Parsed node
*/
private STNode parseSignedRightShiftToken() {
STNode openGTToken = consume();
STToken endLGToken = consume();
STNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, openGTToken.leadingMinutiae(),
endLGToken.trailingMinutiae());
if (hasTrailingMinutiae(openGTToken)) {
doubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken,
DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP);
}
return doubleGTToken;
}
/**
* Parse unsigned right shift token (>>>).
*
* @return Parsed node
*/
private STNode parseUnsignedRightShiftToken() {
STNode openGTToken = consume();
STNode middleGTToken = consume();
STNode endLGToken = consume();
STNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN,
openGTToken.leadingMinutiae(), endLGToken.trailingMinutiae());
boolean validOpenGTToken = !hasTrailingMinutiae(openGTToken);
boolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken);
if (validOpenGTToken && validMiddleGTToken) {
return unsignedRightShiftToken;
}
unsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken,
DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP);
return unsignedRightShiftToken;
}
/**
* Parse wait action.
* <p>
* <code>wait-action := single-wait-action | multiple-wait-action | alternate-wait-action </code>
*
* @return Wait action node
*/
private STNode parseWaitAction() {
STNode waitKeyword = parseWaitKeyword();
if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {
return parseMultiWaitAction(waitKeyword);
}
return parseSingleOrAlternateWaitAction(waitKeyword);
}
/**
* Parse wait keyword.
*
* @return wait keyword
*/
private STNode parseWaitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WAIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.WAIT_KEYWORD);
return parseWaitKeyword();
}
}
/**
* Parse single or alternate wait actions.
* <p>
* <code>
* alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+
* <br/>
* wait-future-expr := expression but not mapping-constructor-expr
* </code>
*
* @param waitKeyword wait keyword
* @return Single or alternate wait action node
*/
private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) {
startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS);
STToken nextToken = peek();
if (isEndOfWaitFutureExprList(nextToken.kind)) {
endContext();
STNode waitFutureExprs = STNodeFactory
.createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));
waitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs,
DiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION);
return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs);
}
List<STNode> waitFutureExprList = new ArrayList<>();
STNode waitField = parseWaitFutureExpr();
waitFutureExprList.add(waitField);
nextToken = peek();
STNode waitFutureExprEnd;
while (!isEndOfWaitFutureExprList(nextToken.kind)) {
waitFutureExprEnd = parseWaitFutureExprEnd();
if (waitFutureExprEnd == null) {
break;
}
waitFutureExprList.add(waitFutureExprEnd);
waitField = parseWaitFutureExpr();
waitFutureExprList.add(waitField);
nextToken = peek();
}
endContext();
return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0));
}
private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case OPEN_BRACE_TOKEN:
return true;
case PIPE_TOKEN:
default:
return false;
}
}
private STNode parseWaitFutureExpr() {
STNode waitFutureExpr = parseActionOrExpression();
if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) {
waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr,
DiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR);
} else if (isAction(waitFutureExpr)) {
waitFutureExpr =
SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR);
}
return waitFutureExpr;
}
private STNode parseWaitFutureExprEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
return parsePipeToken();
default:
if (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) {
return null;
}
recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END);
return parseWaitFutureExprEnd();
}
}
/**
* Parse multiple wait action.
* <p>
* <code>multiple-wait-action := wait { wait-field (, wait-field)* }</code>
*
* @param waitKeyword Wait keyword
* @return Multiple wait action node
*/
private STNode parseMultiWaitAction(STNode waitKeyword) {
startContext(ParserRuleContext.MULTI_WAIT_FIELDS);
STNode openBrace = parseOpenBrace();
STNode waitFields = parseWaitFields();
STNode closeBrace = parseCloseBrace();
endContext();
openBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace,
DiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION);
STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace);
return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode);
}
private STNode parseWaitFields() {
List<STNode> waitFields = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfWaitFields(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode waitField = parseWaitField();
waitFields.add(waitField);
nextToken = peek();
STNode waitFieldEnd;
while (!isEndOfWaitFields(nextToken.kind)) {
waitFieldEnd = parseWaitFieldEnd();
if (waitFieldEnd == null) {
break;
}
waitFields.add(waitFieldEnd);
waitField = parseWaitField();
waitFields.add(waitField);
nextToken = peek();
}
return STNodeFactory.createNodeList(waitFields);
}
private boolean isEndOfWaitFields(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
default:
return false;
}
}
private STNode parseWaitFieldEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.WAIT_FIELD_END);
return parseWaitFieldEnd();
}
}
/**
* Parse wait field.
* <p>
* <code>wait-field := variable-name | field-name : wait-future-expr</code>
*
* @return Receiver field node
*/
private STNode parseWaitField() {
switch (peek().kind) {
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME);
identifier = STNodeFactory.createSimpleNameReferenceNode(identifier);
return createQualifiedWaitField(identifier);
default:
recover(peek(), ParserRuleContext.WAIT_FIELD_NAME);
return parseWaitField();
}
}
private STNode createQualifiedWaitField(STNode identifier) {
if (peek().kind != SyntaxKind.COLON_TOKEN) {
return identifier;
}
STNode colon = parseColon();
STNode waitFutureExpr = parseWaitFutureExpr();
return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr);
}
/**
* Parse annot access expression.
* <p>
* <code>
* annot-access-expr := expression .@ annot-tag-reference
* <br/>
* annot-tag-reference := qualified-identifier | identifier
* </code>
*
* @param lhsExpr Preceding expression of the annot access access
* @return Parsed node
*/
private STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode annotAccessToken = parseAnnotChainingToken();
STNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr);
return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference);
}
/**
* Parse annot-chaining-token.
*
* @return Parsed node
*/
private STNode parseAnnotChainingToken() {
STToken token = peek();
if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN);
return parseAnnotChainingToken();
}
}
/**
* Parse field access identifier.
* <p>
* <code>field-access-identifier := qualified-identifier | identifier</code>
*
* @return Parsed node
*/
private STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) {
return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr);
}
/**
* Parse query action.
* <p>
* <code>query-action := query-pipeline do-clause
* <br/>
* do-clause := do block-stmt
* </code>
*
* @param queryConstructType Query construct type. This is only for validation
* @param queryPipeline Query pipeline
* @param selectClause Select clause if any This is only for validation.
* @return Query action node
*/
private STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause,
boolean isRhsExpr) {
if (queryConstructType != null) {
queryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType,
DiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION);
}
if (selectClause != null) {
queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause,
DiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION);
}
startContext(ParserRuleContext.DO_CLAUSE);
STNode doKeyword = parseDoKeyword();
STNode blockStmt = parseBlockNode();
endContext();
return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt);
}
/**
* Parse 'do' keyword.
*
* @return do keyword node
*/
private STNode parseDoKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.DO_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.DO_KEYWORD);
return parseDoKeyword();
}
}
/**
* Parse optional field access or xml optional attribute access expression.
* <p>
* <code>
* optional-field-access-expr := expression ?. field-name
* <br/>
* xml-optional-attribute-access-expr := expression ?. xml-attribute-name
* <br/>
* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier
* <br/>
* xml-qualified-name := xml-namespace-prefix : identifier
* <br/>
* xml-namespace-prefix := identifier
* </code>
*
* @param lhsExpr Preceding expression of the optional access
* @return Parsed node
*/
private STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode optionalFieldAccessToken = parseOptionalChainingToken();
STNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr);
return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName);
}
/**
* Parse optional chaining token.
*
* @return parsed node
*/
private STNode parseOptionalChainingToken() {
STToken token = peek();
if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN);
return parseOptionalChainingToken();
}
}
/**
* Parse conditional expression.
* <p>
* <code>conditional-expr := expression ? expression : expression</code>
*
* @param lhsExpr Preceding expression of the question mark
* @return Parsed node
*/
private STNode parseConditionalExpression(STNode lhsExpr) {
startContext(ParserRuleContext.CONDITIONAL_EXPRESSION);
STNode questionMark = parseQuestionMark();
STNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true);
STNode nextToken = peek();
STNode endExpr;
STNode colon;
if (nextToken.kind != SyntaxKind.COLON_TOKEN && middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) middleExpr;
STNode modulePrefix = qualifiedNameRef.modulePrefix;
if (modulePrefix.kind == SyntaxKind.IDENTIFIER_TOKEN) {
middleExpr = STNodeFactory.createSimpleNameReferenceNode(modulePrefix);
} else {
middleExpr = modulePrefix;
}
colon = qualifiedNameRef.colon;
endContext();
endExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);
} else {
if (middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
middleExpr = generateQualifiedNameRef(middleExpr);
}
colon = parseColon();
endContext();
endExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false);
}
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr);
}
private STNode generateQualifiedNameRef(STNode qualifiedName) {
STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) qualifiedName;
STNode modulePrefix = qualifiedNameRef.modulePrefix;
if (modulePrefix.kind != SyntaxKind.IDENTIFIER_TOKEN) {
STBuiltinSimpleNameReferenceNode builtInType = (STBuiltinSimpleNameReferenceNode) modulePrefix;
STToken nameToken = (STToken) builtInType.name;
STNode preDeclaredPrefix = STNodeFactory.createIdentifierToken(nameToken.text(),
nameToken.leadingMinutiae(), nameToken.trailingMinutiae());
return STNodeFactory.createQualifiedNameReferenceNode(preDeclaredPrefix,
qualifiedNameRef.colon, qualifiedNameRef.identifier);
} else {
return qualifiedName;
}
}
/**
* Parse enum declaration.
* <p>
* module-enum-decl :=
* metadata
* [public] enum identifier { enum-member (, enum-member)* }
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @param metadata
* @param qualifier
* @return Parsed enum node.
*/
private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.MODULE_ENUM_DECLARATION);
STNode enumKeywordToken = parseEnumKeyword();
STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME);
STNode openBraceToken = parseOpenBrace();
STNode enumMemberList = parseEnumMemberList();
STNode closeBraceToken = parseCloseBrace();
endContext();
openBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken,
DiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER);
return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier,
openBraceToken, enumMemberList, closeBraceToken);
}
/**
* Parse 'enum' keyword.
*
* @return enum keyword node
*/
private STNode parseEnumKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ENUM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ENUM_KEYWORD);
return parseEnumKeyword();
}
}
/**
* Parse enum member list.
* <p>
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @return enum member list node.
*/
private STNode parseEnumMemberList() {
startContext(ParserRuleContext.ENUM_MEMBER_LIST);
if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> enumMemberList = new ArrayList<>();
STNode enumMember = parseEnumMember();
STNode enumMemberRhs;
while (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) {
enumMemberRhs = parseEnumMemberEnd();
if (enumMemberRhs == null) {
break;
}
enumMemberList.add(enumMember);
enumMemberList.add(enumMemberRhs);
enumMember = parseEnumMember();
}
enumMemberList.add(enumMember);
endContext();
return STNodeFactory.createNodeList(enumMemberList);
}
/**
* Parse enum member.
* <p>
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @return Parsed enum member node.
*/
private STNode parseEnumMember() {
STNode metadata;
switch (peek().kind) {
case DOCUMENTATION_STRING:
case AT_TOKEN:
metadata = parseMetaData();
break;
default:
metadata = STNodeFactory.createEmptyNode();
}
STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME);
return parseEnumMemberRhs(metadata, identifierNode);
}
private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) {
STNode equalToken, constExprNode;
switch (peek().kind) {
case EQUAL_TOKEN:
equalToken = parseAssignOp();
constExprNode = parseExpression();
break;
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
equalToken = STNodeFactory.createEmptyNode();
constExprNode = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS, metadata, identifierNode);
return parseEnumMemberRhs(metadata, identifierNode);
}
return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode);
}
private STNode parseEnumMemberEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.ENUM_MEMBER_END);
return parseEnumMemberEnd();
}
}
private STNode parseTransactionStmtOrVarDecl(STNode annots, List<STNode> qualifiers, STToken transactionKeyword) {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
reportInvalidStatementAnnots(annots, qualifiers);
reportInvalidQualifierList(qualifiers);
return parseTransactionStatement(transactionKeyword);
case COLON_TOKEN:
if (getNextNextToken().kind == SyntaxKind.IDENTIFIER_TOKEN) {
STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);
return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);
}
default:
Solution solution = recover(peek(), ParserRuleContext.TRANSACTION_STMT_RHS_OR_TYPE_REF);
if (solution.action == Action.KEEP ||
(solution.action == Action.INSERT && solution.tokenKind == SyntaxKind.COLON_TOKEN)) {
STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);
return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);
}
return parseTransactionStmtOrVarDecl(annots, qualifiers, transactionKeyword);
}
}
/**
* Parse transaction statement.
* <p>
* <code>transaction-stmt := `transaction` block-stmt [on-fail-clause]</code>
*
* @return Transaction statement node
*/
private STNode parseTransactionStatement(STNode transactionKeyword) {
startContext(ParserRuleContext.TRANSACTION_STMT);
STNode blockStmt = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause);
}
/**
* Parse commit action.
* <p>
* <code>commit-action := "commit"</code>
*
* @return Commit action node
*/
private STNode parseCommitAction() {
STNode commitKeyword = parseCommitKeyword();
return STNodeFactory.createCommitActionNode(commitKeyword);
}
/**
* Parse commit keyword.
*
* @return parsed node
*/
private STNode parseCommitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.COMMIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.COMMIT_KEYWORD);
return parseCommitKeyword();
}
}
/**
* Parse retry statement.
* <p>
* <code>
* retry-stmt := `retry` retry-spec block-stmt [on-fail-clause]
* <br/>
* retry-spec := [type-parameter] [ `(` arg-list `)` ]
* </code>
*
* @return Retry statement node
*/
private STNode parseRetryStatement() {
startContext(ParserRuleContext.RETRY_STMT);
STNode retryKeyword = parseRetryKeyword();
STNode retryStmt = parseRetryKeywordRhs(retryKeyword);
return retryStmt;
}
private STNode parseRetryKeywordRhs(STNode retryKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case LT_TOKEN:
STNode typeParam = parseTypeParameter();
return parseRetryTypeParamRhs(retryKeyword, typeParam);
case OPEN_PAREN_TOKEN:
case OPEN_BRACE_TOKEN:
case TRANSACTION_KEYWORD:
typeParam = STNodeFactory.createEmptyNode();
return parseRetryTypeParamRhs(retryKeyword, typeParam);
default:
recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS, retryKeyword);
return parseRetryKeywordRhs(retryKeyword);
}
}
private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) {
STNode args;
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
args = parseParenthesizedArgList();
break;
case OPEN_BRACE_TOKEN:
case TRANSACTION_KEYWORD:
args = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS, retryKeyword, typeParam);
return parseRetryTypeParamRhs(retryKeyword, typeParam);
}
STNode blockStmt = parseRetryBody();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause);
}
private STNode parseRetryBody() {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
return parseBlockNode();
case TRANSACTION_KEYWORD:
return parseTransactionStatement(consume());
default:
recover(peek(), ParserRuleContext.RETRY_BODY);
return parseRetryBody();
}
}
/**
* Parse optional on fail clause.
*
* @return Parsed node
*/
private STNode parseOptionalOnFailClause() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.ON_KEYWORD) {
return parseOnFailClause();
}
if (isEndOfRegularCompoundStmt(nextToken.kind)) {
return STNodeFactory.createEmptyNode();
}
recover(nextToken, ParserRuleContext.REGULAR_COMPOUND_STMT_RHS);
return parseOptionalOnFailClause();
}
private boolean isEndOfRegularCompoundStmt(SyntaxKind nodeKind) {
switch (nodeKind) {
case CLOSE_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case AT_TOKEN:
case EOF_TOKEN:
return true;
default:
return isStatementStartingToken(nodeKind);
}
}
private boolean isStatementStartingToken(SyntaxKind nodeKind) {
switch (nodeKind) {
case FINAL_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case DO_KEYWORD:
case PANIC_KEYWORD:
case CONTINUE_KEYWORD:
case BREAK_KEYWORD:
case RETURN_KEYWORD:
case TYPE_KEYWORD:
case LOCK_KEYWORD:
case OPEN_BRACE_TOKEN:
case FORK_KEYWORD:
case FOREACH_KEYWORD:
case XMLNS_KEYWORD:
case TRANSACTION_KEYWORD:
case RETRY_KEYWORD:
case ROLLBACK_KEYWORD:
case MATCH_KEYWORD:
case FAIL_KEYWORD:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case TRAP_KEYWORD:
case START_KEYWORD:
case FLUSH_KEYWORD:
case LEFT_ARROW_TOKEN:
case WAIT_KEYWORD:
case COMMIT_KEYWORD:
case WORKER_KEYWORD:
return true;
default:
if (isTypeStartingToken(nodeKind)) {
return true;
}
if (isValidExpressionStart(nodeKind, 1)) {
return true;
}
return false;
}
}
/**
* Parse on fail clause.
* <p>
* <code>
* on-fail-clause := on fail typed-binding-pattern statement-block
* </code>
*
* @return On fail clause node
*/
private STNode parseOnFailClause() {
startContext(ParserRuleContext.ON_FAIL_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode failKeyword = parseFailKeyword();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false);
STNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_REF);
STNode blockStatement = parseBlockNode();
endContext();
return STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typeDescriptor, identifier,
blockStatement);
}
/**
* Parse retry keyword.
*
* @return parsed node
*/
private STNode parseRetryKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.RETRY_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.RETRY_KEYWORD);
return parseRetryKeyword();
}
}
/**
* Parse transaction statement.
* <p>
* <code>rollback-stmt := "rollback" [expression] ";"</code>
*
* @return Rollback statement node
*/
private STNode parseRollbackStatement() {
startContext(ParserRuleContext.ROLLBACK_STMT);
STNode rollbackKeyword = parseRollbackKeyword();
STNode expression;
if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) {
expression = STNodeFactory.createEmptyNode();
} else {
expression = parseExpression();
}
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon);
}
/**
* Parse rollback keyword.
*
* @return Rollback keyword node
*/
private STNode parseRollbackKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ROLLBACK_KEYWORD);
return parseRollbackKeyword();
}
}
/**
* Parse transactional expression.
* <p>
* <code>transactional-expr := "transactional"</code>
*
* @return Transactional expression node
*/
private STNode parseTransactionalExpression() {
STNode transactionalKeyword = parseTransactionalKeyword();
return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword);
}
/**
* Parse transactional keyword.
*
* @return Transactional keyword node
*/
private STNode parseTransactionalKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD);
return parseTransactionalKeyword();
}
}
/**
* Parse base16 literal.
* <p>
* <code>
* byte-array-literal := Base16Literal | Base64Literal
* <br/>
* Base16Literal := base16 WS ` HexGroup* WS `
* <br/>
* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `
* </code>
*
* @return parsed node
*/
private STNode parseByteArrayLiteral() {
STNode type;
if (peek().kind == SyntaxKind.BASE16_KEYWORD) {
type = parseBase16Keyword();
} else {
type = parseBase64Keyword();
}
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
if (startingBackTick.isMissing()) {
startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode content = STNodeFactory.createEmptyNode();
STNode byteArrayLiteral =
STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick);
byteArrayLiteral =
SyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT);
return byteArrayLiteral;
}
STNode content = parseByteArrayContent();
return parseByteArrayLiteral(type, startingBackTick, content);
}
/**
* Parse byte array literal.
*
* @param typeKeyword keyword token, possible values are `base16` and `base64`
* @param startingBackTick starting backtick token
* @param byteArrayContent byte array literal content to be validated
* @return parsed byte array literal node
*/
private STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) {
STNode content = STNodeFactory.createEmptyNode();
STNode newStartingBackTick = startingBackTick;
STNodeList items = (STNodeList) byteArrayContent;
if (items.size() == 1) {
STNode item = items.get(0);
if (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else if (item.kind != SyntaxKind.TEMPLATE_STRING) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else {
content = item;
}
} else if (items.size() > 1) {
STNode clonedStartingBackTick = startingBackTick;
for (int index = 0; index < items.size(); index++) {
STNode item = items.get(index);
clonedStartingBackTick =
SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item);
}
newStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick,
DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);
}
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick);
}
/**
* Parse <code>base16</code> keyword.
*
* @return base16 keyword node
*/
private STNode parseBase16Keyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BASE16_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BASE16_KEYWORD);
return parseBase16Keyword();
}
}
/**
* Parse <code>base64</code> keyword.
*
* @return base64 keyword node
*/
private STNode parseBase64Keyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BASE64_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BASE64_KEYWORD);
return parseBase64Keyword();
}
}
/**
* Validate and parse byte array literal content.
* An error is reported, if the content is invalid.
*
* @return parsed node
*/
private STNode parseByteArrayContent() {
STToken nextToken = peek();
List<STNode> items = new ArrayList<>();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode content = parseTemplateItem();
items.add(content);
nextToken = peek();
}
return STNodeFactory.createNodeList(items);
}
/**
* Validate base16 literal content.
* <p>
* <code>
* Base16Literal := base16 WS ` HexGroup* WS `
* <br/>
* HexGroup := WS HexDigit WS HexDigit
* <br/>
* WS := WhiteSpaceChar*
* <br/>
* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20
* </code>
*
* @param content the string surrounded by the backticks
* @return <code>true</code>, if the string content is valid. <code>false</code> otherwise.
*/
static boolean isValidBase16LiteralContent(String content) {
char[] charArray = content.toCharArray();
int hexDigitCount = 0;
for (char c : charArray) {
switch (c) {
case LexerTerminals.TAB:
case LexerTerminals.NEWLINE:
case LexerTerminals.CARRIAGE_RETURN:
case LexerTerminals.SPACE:
break;
default:
if (isHexDigit(c)) {
hexDigitCount++;
} else {
return false;
}
break;
}
}
return hexDigitCount % 2 == 0;
}
/**
* Validate base64 literal content.
* <p>
* <code>
* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `
* <br/>
* Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char
* <br/>
* PaddedBase64Group :=
* WS Base64Char WS Base64Char WS Base64Char WS PaddingChar
* | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar
* <br/>
* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /
* <br/>
* PaddingChar := =
* <br/>
* WS := WhiteSpaceChar*
* <br/>
* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20
* </code>
*
* @param content the string surrounded by the backticks
* @return <code>true</code>, if the string content is valid. <code>false</code> otherwise.
*/
static boolean isValidBase64LiteralContent(String content) {
char[] charArray = content.toCharArray();
int base64CharCount = 0;
int paddingCharCount = 0;
for (char c : charArray) {
switch (c) {
case LexerTerminals.TAB:
case LexerTerminals.NEWLINE:
case LexerTerminals.CARRIAGE_RETURN:
case LexerTerminals.SPACE:
break;
case LexerTerminals.EQUAL:
paddingCharCount++;
break;
default:
if (isBase64Char(c)) {
if (paddingCharCount == 0) {
base64CharCount++;
} else {
return false;
}
} else {
return false;
}
break;
}
}
if (paddingCharCount > 2) {
return false;
} else if (paddingCharCount == 0) {
return base64CharCount % 4 == 0;
} else {
return base64CharCount % 4 == 4 - paddingCharCount;
}
}
/**
* <p>
* Check whether a given char is a base64 char.
* </p>
* <code>Base64Char := A .. Z | a .. z | 0 .. 9 | + | /</code>
*
* @param c character to check
* @return <code>true</code>, if the character represents a base64 char. <code>false</code> otherwise.
*/
static boolean isBase64Char(int c) {
if ('a' <= c && c <= 'z') {
return true;
}
if ('A' <= c && c <= 'Z') {
return true;
}
if (c == '+' || c == '/') {
return true;
}
return isDigit(c);
}
static boolean isHexDigit(int c) {
if ('a' <= c && c <= 'f') {
return true;
}
if ('A' <= c && c <= 'F') {
return true;
}
return isDigit(c);
}
static boolean isDigit(int c) {
return ('0' <= c && c <= '9');
}
/**
* Parse xml filter expression.
* <p>
* <code>xml-filter-expr := expression .< xml-name-pattern ></code>
*
* @param lhsExpr Preceding expression of .< token
* @return Parsed node
*/
private STNode parseXMLFilterExpression(STNode lhsExpr) {
STNode xmlNamePatternChain = parseXMLFilterExpressionRhs();
return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain);
}
/**
* Parse xml filter expression rhs.
* <p>
* <code>filer-expression-rhs := .< xml-name-pattern ></code>
*
* @return Parsed node
*/
private STNode parseXMLFilterExpressionRhs() {
STNode dotLTToken = parseDotLTToken();
return parseXMLNamePatternChain(dotLTToken);
}
/**
* Parse xml name pattern chain.
* <p>
* <code>
* xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step
* <br/>
* filer-expression-rhs := .< xml-name-pattern >
* <br/>
* xml-element-children-step := /< xml-name-pattern >
* <br/>
* xml-element-descendants-step := /**\/<xml-name-pattern >
* </code>
*
* @param startToken Preceding token of xml name pattern
* @return Parsed node
*/
private STNode parseXMLNamePatternChain(STNode startToken) {
startContext(ParserRuleContext.XML_NAME_PATTERN);
STNode xmlNamePattern = parseXMLNamePattern();
STNode gtToken = parseGTToken();
endContext();
startToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken,
DiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN);
return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken);
}
/**
* Parse <code> .< </code> token.
*
* @return Parsed node
*/
private STNode parseDotLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.DOT_LT_TOKEN);
return parseDotLTToken();
}
}
/**
* Parse xml name pattern.
* <p>
* <code>xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*</code>
*
* @return Parsed node
*/
private STNode parseXMLNamePattern() {
List<STNode> xmlAtomicNamePatternList = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfXMLNamePattern(nextToken.kind)) {
return STNodeFactory.createNodeList(xmlAtomicNamePatternList);
}
STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern();
xmlAtomicNamePatternList.add(xmlAtomicNamePattern);
STNode separator;
while (!isEndOfXMLNamePattern(peek().kind)) {
separator = parseXMLNamePatternSeparator();
if (separator == null) {
break;
}
xmlAtomicNamePatternList.add(separator);
xmlAtomicNamePattern = parseXMLAtomicNamePattern();
xmlAtomicNamePatternList.add(xmlAtomicNamePattern);
}
return STNodeFactory.createNodeList(xmlAtomicNamePatternList);
}
private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) {
switch (tokenKind) {
case GT_TOKEN:
case EOF_TOKEN:
return true;
case IDENTIFIER_TOKEN:
case ASTERISK_TOKEN:
case COLON_TOKEN:
default:
return false;
}
}
private STNode parseXMLNamePatternSeparator() {
STToken token = peek();
switch (token.kind) {
case PIPE_TOKEN:
return consume();
case GT_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS);
return parseXMLNamePatternSeparator();
}
}
/**
* Parse xml atomic name pattern.
* <p>
* <code>
* xml-atomic-name-pattern :=
* *
* | identifier
* | xml-namespace-prefix : identifier
* | xml-namespace-prefix : *
* </code>
*
* @return Parsed node
*/
private STNode parseXMLAtomicNamePattern() {
startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN);
STNode atomicNamePattern = parseXMLAtomicNamePatternBody();
endContext();
return atomicNamePattern;
}
private STNode parseXMLAtomicNamePatternBody() {
STToken token = peek();
STNode identifier;
switch (token.kind) {
case ASTERISK_TOKEN:
return consume();
case IDENTIFIER_TOKEN:
identifier = consume();
break;
default:
recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START);
return parseXMLAtomicNamePatternBody();
}
return parseXMLAtomicNameIdentifier(identifier);
}
private STNode parseXMLAtomicNameIdentifier(STNode identifier) {
STToken token = peek();
if (token.kind == SyntaxKind.COLON_TOKEN) {
STNode colon = consume();
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {
STToken endToken = consume();
return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken);
}
}
return STNodeFactory.createSimpleNameReferenceNode(identifier);
}
/**
* Parse xml step expression.
* <p>
* <code>xml-step-expr := expression xml-step-start</code>
*
* @param lhsExpr Preceding expression of /*, /<, or /**\/< token
* @return Parsed node
*/
private STNode parseXMLStepExpression(STNode lhsExpr) {
STNode xmlStepStart = parseXMLStepStart();
return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart);
}
/**
* Parse xml filter expression rhs.
* <p>
* <code>
* xml-step-start :=
* xml-all-children-step
* | xml-element-children-step
* | xml-element-descendants-step
* <br/>
* xml-all-children-step := /*
* </code>
*
* @return Parsed node
*/
private STNode parseXMLStepStart() {
STToken token = peek();
STNode startToken;
switch (token.kind) {
case SLASH_ASTERISK_TOKEN:
return consume();
case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:
startToken = parseDoubleSlashDoubleAsteriskLTToken();
break;
case SLASH_LT_TOKEN:
default:
startToken = parseSlashLTToken();
break;
}
return parseXMLNamePatternChain(startToken);
}
/**
* Parse <code> /< </code> token.
*
* @return Parsed node
*/
private STNode parseSlashLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN);
return parseSlashLTToken();
}
}
/**
* Parse <code> /< </code> token.
*
* @return Parsed node
*/
private STNode parseDoubleSlashDoubleAsteriskLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN);
return parseDoubleSlashDoubleAsteriskLTToken();
}
}
/**
* Parse match statement.
* <p>
* <code>match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]</code>
*
* @return Match statement
*/
private STNode parseMatchStatement() {
startContext(ParserRuleContext.MATCH_STMT);
STNode matchKeyword = parseMatchKeyword();
STNode actionOrExpr = parseActionOrExpression();
startContext(ParserRuleContext.MATCH_BODY);
STNode openBrace = parseOpenBrace();
List<STNode> matchClausesList = new ArrayList<>();
while (!isEndOfMatchClauses(peek().kind)) {
STNode clause = parseMatchClause();
matchClausesList.add(clause);
}
STNode matchClauses = STNodeFactory.createNodeList(matchClausesList);
if (isNodeListEmpty(matchClauses)) {
openBrace = SyntaxErrors.addDiagnostic(openBrace,
DiagnosticErrorCode.ERROR_MATCH_STATEMENT_SHOULD_HAVE_ONE_OR_MORE_MATCH_CLAUSES);
}
STNode closeBrace = parseCloseBrace();
endContext();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace,
onFailClause);
}
/**
* Parse match keyword.
*
* @return Match keyword node
*/
private STNode parseMatchKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.MATCH_KEYWORD) {
return consume();
} else {
recover(nextToken, ParserRuleContext.MATCH_KEYWORD);
return parseMatchKeyword();
}
}
private boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse a single match match clause.
* <p>
* <code>
* match-clause := match-pattern-list [match-guard] => block-stmt
* <br/>
* match-guard := if expression
* </code>
*
* @return A match clause
*/
private STNode parseMatchClause() {
STNode matchPatterns = parseMatchPatternList();
STNode matchGuard = parseMatchGuard();
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode blockStmt = parseBlockNode();
return STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt);
}
/**
* Parse match guard.
* <p>
* <code>match-guard := if expression</code>
*
* @return Match guard
*/
private STNode parseMatchGuard() {
switch (peek().kind) {
case IF_KEYWORD:
STNode ifKeyword = parseIfKeyword();
STNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false);
return STNodeFactory.createMatchGuardNode(ifKeyword, expr);
case RIGHT_DOUBLE_ARROW_TOKEN:
return STNodeFactory.createEmptyNode();
default:
recover(peek(), ParserRuleContext.OPTIONAL_MATCH_GUARD);
return parseMatchGuard();
}
}
/**
* Parse match patterns list.
* <p>
* <code>match-pattern-list := match-pattern (| match-pattern)*</code>
*
* @return Match patterns list
*/
private STNode parseMatchPatternList() {
startContext(ParserRuleContext.MATCH_PATTERN);
List<STNode> matchClauses = new ArrayList<>();
while (!isEndOfMatchPattern(peek().kind)) {
STNode clause = parseMatchPattern();
if (clause == null) {
break;
}
matchClauses.add(clause);
STNode seperator = parseMatchPatternListMemberRhs();
if (seperator == null) {
break;
}
matchClauses.add(seperator);
}
endContext();
return STNodeFactory.createNodeList(matchClauses);
}
private boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case PIPE_TOKEN:
case IF_KEYWORD:
case RIGHT_ARROW_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse match pattern.
* <p>
* <code>
* match-pattern := var binding-pattern
* | wildcard-match-pattern
* | const-pattern
* | list-match-pattern
* | mapping-match-pattern
* | error-match-pattern
* </code>
*
* @return Match pattern
*/
private STNode parseMatchPattern() {
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case PLUS_TOKEN:
case MINUS_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
return parseSimpleConstExpr();
case IDENTIFIER_TOKEN:
STNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);
return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);
case VAR_KEYWORD:
return parseVarTypedBindingPattern();
case OPEN_BRACKET_TOKEN:
return parseListMatchPattern();
case OPEN_BRACE_TOKEN:
return parseMappingMatchPattern();
case ERROR_KEYWORD:
return parseErrorMatchPattern();
default:
recover(peek(), ParserRuleContext.MATCH_PATTERN_START);
return parseMatchPattern();
}
}
private STNode parseMatchPatternListMemberRhs() {
switch (peek().kind) {
case PIPE_TOKEN:
return parsePipeToken();
case IF_KEYWORD:
case RIGHT_DOUBLE_ARROW_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.MATCH_PATTERN_LIST_MEMBER_RHS);
return parseMatchPatternListMemberRhs();
}
}
/**
* Parse var typed binding pattern.
* <p>
* <code>var binding-pattern</code>
* </p>
*
* @return Parsed typed binding pattern node
*/
private STNode parseVarTypedBindingPattern() {
STNode varKeyword = parseVarKeyword();
STNode varTypeDesc = createBuiltinSimpleNameReference(varKeyword);
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createTypedBindingPatternNode(varTypeDesc, bindingPattern);
}
/**
* Parse var keyword.
*
* @return Var keyword node
*/
private STNode parseVarKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.VAR_KEYWORD) {
return consume();
} else {
recover(nextToken, ParserRuleContext.VAR_KEYWORD);
return parseVarKeyword();
}
}
/**
* Parse list match pattern.
* <p>
* <code>
* list-match-pattern := [ list-member-match-patterns ]
* list-member-match-patterns :=
* match-pattern (, match-pattern)* [, rest-match-pattern]
* | [ rest-match-pattern ]
* </code>
* </p>
*
* @return Parsed list match pattern node
*/
private STNode parseListMatchPattern() {
startContext(ParserRuleContext.LIST_MATCH_PATTERN);
STNode openBracketToken = parseOpenBracket();
List<STNode> matchPatternList = new ArrayList<>();
STNode listMatchPatternMemberRhs = null;
boolean isEndOfFields = false;
while (!isEndOfListMatchPattern()) {
STNode listMatchPatternMember = parseListMatchPatternMember();
matchPatternList.add(listMatchPatternMember);
listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();
if (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
isEndOfFields = true;
break;
}
if (listMatchPatternMemberRhs != null) {
matchPatternList.add(listMatchPatternMemberRhs);
} else {
break;
}
}
while (isEndOfFields && listMatchPatternMemberRhs != null) {
updateLastNodeInListWithInvalidNode(matchPatternList, listMatchPatternMemberRhs, null);
if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {
break;
}
STNode invalidField = parseListMatchPatternMember();
updateLastNodeInListWithInvalidNode(matchPatternList, invalidField,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);
listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();
}
STNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList);
STNode closeBracketToken = parseCloseBracket();
endContext();
return STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, closeBracketToken);
}
public boolean isEndOfListMatchPattern() {
switch (peek().kind) {
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseListMatchPatternMember() {
STNode nextToken = peek();
switch (nextToken.kind) {
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
default:
return parseMatchPattern();
}
}
/**
* Parse rest match pattern.
* <p>
* <code>
* rest-match-pattern := ... var variable-name
* </code>
* </p>
*
* @return Parsed rest match pattern node
*/
private STNode parseRestMatchPattern() {
startContext(ParserRuleContext.REST_MATCH_PATTERN);
STNode ellipsisToken = parseEllipsis();
STNode varKeywordToken = parseVarKeyword();
STNode variableName = parseVariableName();
endContext();
STSimpleNameReferenceNode simpleNameReferenceNode =
(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName);
return STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode);
}
private STNode parseListMatchPatternMemberRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS);
return parseListMatchPatternMemberRhs();
}
}
/**
* Parse mapping match pattern.
* <p>
* mapping-match-pattern := { field-match-patterns }
* <br/>
* field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern]
* | [ rest-match-pattern ]
* <br/>
* field-match-pattern := field-name : match-pattern
* <br/>
* rest-match-pattern := ... var variable-name
* </p>
*
* @return Parsed Node.
*/
private STNode parseMappingMatchPattern() {
startContext(ParserRuleContext.MAPPING_MATCH_PATTERN);
STNode openBraceToken = parseOpenBrace();
List<STNode> fieldMatchPatternList = new ArrayList<>();
STNode fieldMatchPatternRhs = null;
boolean isEndOfFields = false;
while (!isEndOfMappingMatchPattern()) {
STNode fieldMatchPatternMember = parseFieldMatchPatternMember();
fieldMatchPatternList.add(fieldMatchPatternMember);
fieldMatchPatternRhs = parseFieldMatchPatternRhs();
if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
isEndOfFields = true;
break;
}
if (fieldMatchPatternRhs != null) {
fieldMatchPatternList.add(fieldMatchPatternRhs);
} else {
break;
}
}
while (isEndOfFields && fieldMatchPatternRhs != null) {
updateLastNodeInListWithInvalidNode(fieldMatchPatternList, fieldMatchPatternRhs, null);
if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {
break;
}
STNode invalidField = parseFieldMatchPatternMember();
updateLastNodeInListWithInvalidNode(fieldMatchPatternList, invalidField,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);
fieldMatchPatternRhs = parseFieldMatchPatternRhs();
}
STNode fieldMatchPatterns = STNodeFactory.createNodeList(fieldMatchPatternList);
STNode closeBraceToken = parseCloseBrace();
endContext();
return STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, closeBraceToken);
}
private STNode parseFieldMatchPatternMember() {
switch (peek().kind) {
case IDENTIFIER_TOKEN:
return parseFieldMatchPattern();
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
default:
recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER);
return parseFieldMatchPatternMember();
}
}
/**
* Parse filed match pattern.
* <p>
* field-match-pattern := field-name : match-pattern
* </p>
*
* @return Parsed field match pattern node
*/
public STNode parseFieldMatchPattern() {
STNode fieldNameNode = parseVariableName();
STNode colonToken = parseColon();
STNode matchPattern = parseMatchPattern();
return STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern);
}
public boolean isEndOfMappingMatchPattern() {
switch (peek().kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseFieldMatchPatternRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS);
return parseFieldMatchPatternRhs();
}
}
private STNode parseErrorMatchPatternOrConsPattern(STNode typeRefOrConstExpr) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,
ParserRuleContext.ERROR_KEYWORD);
startContext(ParserRuleContext.ERROR_MATCH_PATTERN);
return parseErrorMatchPattern(errorKeyword, typeRefOrConstExpr);
default:
if (isMatchPatternEnd(peek().kind)) {
return typeRefOrConstExpr;
}
recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_OR_CONST_PATTERN, typeRefOrConstExpr);
return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);
}
}
private boolean isMatchPatternEnd(SyntaxKind tokenKind) {
switch (tokenKind) {
case RIGHT_DOUBLE_ARROW_TOKEN:
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case CLOSE_PAREN_TOKEN:
case PIPE_TOKEN:
case IF_KEYWORD:
case EOF_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse functional match pattern.
* <p>
* error-match-pattern := error [error-type-reference] ( error-arg-list-match-pattern )
* error-arg-list-match-pattern :=
* error-message-match-pattern [, error-cause-match-pattern] [, error-field-match-patterns]
* | [error-field-match-patterns]
* error-message-match-pattern := simple-match-pattern
* error-cause-match-pattern := simple-match-pattern | error-match-pattern
* simple-match-pattern :=
* wildcard-match-pattern
* | const-pattern
* | var variable-name
* error-field-match-patterns :=
* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]
* | rest-match-pattern
* named-arg-match-pattern := arg-name = match-pattern
* </p>
*
* @return Parsed functional match pattern node.
*/
private STNode parseErrorMatchPattern() {
startContext(ParserRuleContext.ERROR_MATCH_PATTERN);
STNode errorKeyword = consume();
return parseErrorMatchPattern(errorKeyword);
}
private STNode parseErrorMatchPattern(STNode errorKeyword) {
STToken nextToken = peek();
STNode typeRef;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
typeRef = STNodeFactory.createEmptyNode();
break;
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
typeRef = parseTypeReference();
break;
}
recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_ERROR_KEYWORD_RHS);
return parseErrorMatchPattern(errorKeyword);
}
return parseErrorMatchPattern(errorKeyword, typeRef);
}
private STNode parseErrorMatchPattern(STNode errorKeyword, STNode typeRef) {
STNode openParenthesisToken = parseOpenParenthesis();
STNode argListMatchPatternNode = parseErrorArgListMatchPatterns();
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createErrorMatchPatternNode(errorKeyword, typeRef, openParenthesisToken,
argListMatchPatternNode, closeParenthesisToken);
}
private STNode parseErrorArgListMatchPatterns() {
List<STNode> argListMatchPatterns = new ArrayList<>();
if (isEndOfErrorFieldMatchPatterns()) {
return STNodeFactory.createNodeList(argListMatchPatterns);
}
startContext(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_FIRST_ARG);
STNode firstArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_START);
endContext();
if (firstArg == null) {
return STNodeFactory.createNodeList(argListMatchPatterns);
}
if (isSimpleMatchPattern(firstArg.kind)) {
argListMatchPatterns.add(firstArg);
STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_END);
if (argEnd != null) {
STNode secondArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_RHS);
if (isValidSecondArgMatchPattern(secondArg.kind)) {
argListMatchPatterns.add(argEnd);
argListMatchPatterns.add(secondArg);
} else {
updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListMatchPatterns, secondArg,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);
}
}
} else {
if (firstArg.kind != SyntaxKind.NAMED_ARG_MATCH_PATTERN &&
firstArg.kind != SyntaxKind.REST_MATCH_PATTERN) {
addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);
} else {
argListMatchPatterns.add(firstArg);
}
}
parseErrorFieldMatchPatterns(argListMatchPatterns);
return STNodeFactory.createNodeList(argListMatchPatterns);
}
private boolean isSimpleMatchPattern(SyntaxKind matchPatternKind) {
switch (matchPatternKind) {
case IDENTIFIER_TOKEN:
case SIMPLE_NAME_REFERENCE:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case NULL_LITERAL:
case NIL_LITERAL:
case BOOLEAN_LITERAL:
case TYPED_BINDING_PATTERN:
case UNARY_EXPRESSION:
return true;
default:
return false;
}
}
private boolean isValidSecondArgMatchPattern(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case ERROR_MATCH_PATTERN:
case NAMED_ARG_MATCH_PATTERN:
case REST_MATCH_PATTERN:
return true;
default:
if (isSimpleMatchPattern(syntaxKind)) {
return true;
}
return false;
}
}
/**
* Parse error field match patterns.
* error-field-match-patterns :=
* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]
* | rest-match-pattern
* named-arg-match-pattern := arg-name = match-pattern
* @param argListMatchPatterns
*/
private void parseErrorFieldMatchPatterns(List<STNode> argListMatchPatterns) {
SyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_MATCH_PATTERN;
while (!isEndOfErrorFieldMatchPatterns()) {
STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN_RHS);
if (argEnd == null) {
break;
}
STNode currentArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN);
DiagnosticErrorCode errorCode = validateErrorFieldMatchPatternOrder(lastValidArgKind, currentArg.kind);
if (errorCode == null) {
argListMatchPatterns.add(argEnd);
argListMatchPatterns.add(currentArg);
lastValidArgKind = currentArg.kind;
} else if (argListMatchPatterns.size() == 0) {
addInvalidNodeToNextToken(argEnd, null);
addInvalidNodeToNextToken(currentArg, errorCode);
} else {
updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode);
}
}
}
private boolean isEndOfErrorFieldMatchPatterns() {
return isEndOfErrorFieldBindingPatterns();
}
private STNode parseErrorArgListMatchPatternEnd(ParserRuleContext currentCtx) {
switch (peek().kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), currentCtx);
return parseErrorArgListMatchPatternEnd(currentCtx);
}
}
private STNode parseErrorArgListMatchPattern(ParserRuleContext context) {
STToken nextToken = peek();
switch (nextToken.kind) {
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
case IDENTIFIER_TOKEN:
return parseNamedOrSimpleMatchPattern();
case OPEN_PAREN_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case PLUS_TOKEN:
case MINUS_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case OPEN_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseMatchPattern();
case VAR_KEYWORD:
STNode varType = createBuiltinSimpleNameReference(consume());
STNode variableName = createCaptureOrWildcardBP(parseVariableName());
return STNodeFactory.createTypedBindingPatternNode(varType, variableName);
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(nextToken, context);
return parseErrorArgListMatchPattern(context);
}
}
private STNode parseNamedOrSimpleMatchPattern() {
STNode identifier = consume();
STToken secondToken = peek();
switch (secondToken.kind) {
case EQUAL_TOKEN:
return parseNamedArgMatchPattern(identifier);
case COMMA_TOKEN:
case CLOSE_PAREN_TOKEN:
default:
return identifier;
}
}
/**
* Parses the next named arg match pattern.
* <br/>
* <code>named-arg-match-pattern := arg-name = match-pattern</code>
* <br/>
* <br/>
*
* @return arg match pattern list node added the new arg match pattern
*/
private STNode parseNamedArgMatchPattern(STNode identifier) {
startContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN);
STNode equalToken = parseAssignOp();
STNode matchPattern = parseMatchPattern();
endContext();
return STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern);
}
private DiagnosticErrorCode validateErrorFieldMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {
switch (currentArgKind) {
case NAMED_ARG_MATCH_PATTERN:
case REST_MATCH_PATTERN:
if (prevArgKind == SyntaxKind.REST_MATCH_PATTERN) {
return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;
}
return null;
default:
return DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED;
}
}
/**
* Parse markdown documentation.
*
* @return markdown documentation node
*/
private STNode parseMarkdownDocumentation() {
List<STNode> markdownDocLineList = new ArrayList<>();
STToken nextToken = peek();
while (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) {
STToken documentationString = consume();
STNode parsedDocLines = parseDocumentationString(documentationString);
appendParsedDocumentationLines(markdownDocLineList, parsedDocLines);
nextToken = peek();
}
STNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList);
return STNodeFactory.createMarkdownDocumentationNode(markdownDocLines);
}
/**
* Parse documentation string.
*
* @return markdown documentation line list node
*/
private STNode parseDocumentationString(STToken documentationStringToken) {
List<STNode> leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae());
Collection<STNodeDiagnostic> diagnostics = new ArrayList<>((documentationStringToken.diagnostics()));
CharReader charReader = CharReader.from(documentationStringToken.text());
DocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList, diagnostics);
AbstractTokenReader tokenReader = new TokenReader(documentationLexer);
DocumentationParser documentationParser = new DocumentationParser(tokenReader);
return documentationParser.parse();
}
private List<STNode> getLeadingTriviaList(STNode leadingMinutiaeNode) {
List<STNode> leadingTriviaList = new ArrayList<>();
int bucketCount = leadingMinutiaeNode.bucketCount();
for (int i = 0; i < bucketCount; i++) {
leadingTriviaList.add(leadingMinutiaeNode.childInBucket(i));
}
return leadingTriviaList;
}
private void appendParsedDocumentationLines(List<STNode> markdownDocLineList, STNode parsedDocLines) {
int bucketCount = parsedDocLines.bucketCount();
for (int i = 0; i < bucketCount; i++) {
STNode markdownDocLine = parsedDocLines.childInBucket(i);
markdownDocLineList.add(markdownDocLine);
}
}
/**
* Parse any statement that starts with a token that has ambiguity between being
* a type-desc or an expression.
*
* @param annots Annotations
* @param qualifiers
* @return Statement node
*/
private STNode parseStmtStartsWithTypeOrExpr(STNode annots, List<STNode> qualifiers) {
startContext(ParserRuleContext.AMBIGUOUS_STMT);
STNode typeOrExpr = parseTypedBindingPatternOrExpr(qualifiers, true);
return parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr);
}
private STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) {
if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
List<STNode> varDeclQualifiers = new ArrayList<>();
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseVarDeclRhs(annots, varDeclQualifiers, typedBindingPatternOrExpr, false);
}
STNode expr = getExpression(typedBindingPatternOrExpr);
expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);
return parseStatementStartWithExprRhs(expr);
}
private STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypedBindingPatternOrExpr(typeDescQualifiers, allowAssignment);
}
private STNode parseTypedBindingPatternOrExpr(List<STNode> qualifiers, boolean allowAssignment) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
STNode typeOrExpr;
if (isPredeclaredIdentifier(nextToken.kind)) {
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
}
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseTypedBPOrExprStartsWithOpenParenthesis();
case FUNCTION_KEYWORD:
return parseAnonFuncExprOrTypedBPWithFuncType(qualifiers);
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTupleTypeDescOrExprStartsWithOpenBracket();
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode basicLiteral = parseBasicLiteral();
return parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment);
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());
}
return parseTypedBindingPattern(qualifiers, ParserRuleContext.VAR_DECL_STMT);
}
}
/**
* Parse the component after the ambiguous starting node. Ambiguous node could be either an expr
* or a type-desc. The component followed by this ambiguous node could be the binding-pattern or
* the expression-rhs.
*
* @param typeOrExpr Type desc or the expression
* @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a
* valid lvalue expression
* @return Typed-binding-pattern node or an expression node
*/
private STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
STToken nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode pipe = parsePipeToken();
STNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);
if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;
typeOrExpr = getTypeDescFromExpr(typeOrExpr);
STNode newTypeDesc = createUnionTypeDesc(typeOrExpr, pipe, typedBP.typeDescriptor);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);
}
if (peek().kind == SyntaxKind.EQUAL_TOKEN) {
return createCaptureBPWithMissingVarName(typeOrExpr, pipe, rhsTypedBPOrExpr);
}
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,
rhsTypedBPOrExpr);
case BITWISE_AND_TOKEN:
nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode ampersand = parseBinaryOperator();
rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);
if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;
typeOrExpr = getTypeDescFromExpr(typeOrExpr);
STNode newTypeDesc = createIntersectionTypeDesc(typeOrExpr, ampersand, typedBP.typeDescriptor);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);
}
if (peek().kind == SyntaxKind.EQUAL_TOKEN) {
return createCaptureBPWithMissingVarName(typeOrExpr, ampersand, rhsTypedBPOrExpr);
}
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,
rhsTypedBPOrExpr);
case SEMICOLON_TOKEN:
if (isDefiniteExpr(typeOrExpr.kind)) {
return typeOrExpr;
}
if (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) {
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
}
return typeOrExpr;
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
if (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) {
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
}
return typeOrExpr;
case EQUAL_TOKEN:
return typeOrExpr;
case OPEN_BRACKET_TOKEN:
return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment,
ParserRuleContext.AMBIGUOUS_STMT);
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
default:
if (isCompoundBinaryOperator(nextToken.kind)) {
return typeOrExpr;
}
if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {
return typeOrExpr;
}
STToken token = peek();
recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr, allowAssignment);
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
}
}
private STNode createCaptureBPWithMissingVarName(STNode lhsType, STNode separatorToken, STNode rhsType) {
lhsType = getTypeDescFromExpr(lhsType);
rhsType = getTypeDescFromExpr(rhsType);
STNode newTypeDesc;
if (separatorToken.kind == SyntaxKind.PIPE_TOKEN) {
newTypeDesc = createUnionTypeDesc(lhsType, separatorToken, rhsType);
} else {
newTypeDesc = createIntersectionTypeDesc(lhsType, separatorToken, rhsType);
}
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
ParserRuleContext.VARIABLE_NAME);
STNode captureBP = STNodeFactory.createCaptureBindingPatternNode(identifier);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, captureBP);
}
private STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) {
typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false);
return parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);
}
private STNode parseTypedBPOrExprStartsWithOpenParenthesis() {
STNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis();
if (isDefiniteTypeDesc(exprOrTypeDesc.kind)) {
return parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc);
}
return parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false);
}
private boolean isDefiniteTypeDesc(SyntaxKind kind) {
return kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0;
}
private boolean isDefiniteExpr(SyntaxKind kind) {
if (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
return false;
}
return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&
kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;
}
/**
* Parse type or expression that starts with open parenthesis. Possible options are:
* 1) () - nil type-desc or nil-literal
* 2) (T) - Parenthesized type-desc
* 3) (expr) - Parenthesized expression
* 4) (param, param, ..) - Anon function params
*
* @return Type-desc or expression node
*/
private STNode parseTypedDescOrExprStartsWithOpenParenthesis() {
STNode openParen = parseOpenParenthesis();
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {
STNode closeParen = parseCloseParenthesis();
return parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen);
}
STNode typeOrExpr = parseTypeDescOrExpr();
if (isAction(typeOrExpr)) {
STNode closeParen = parseCloseParenthesis();
return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr,
closeParen);
}
if (isExpression(typeOrExpr.kind)) {
startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);
return parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false);
}
STNode closeParen = parseCloseParenthesis();
STNode typeDescNode = getTypeDescFromExpr(typeOrExpr);
return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeDescNode, closeParen);
}
/**
* Parse type-desc or expression. This method does not handle binding patterns.
*
* @return Type-desc node or expression node
*/
private STNode parseTypeDescOrExpr() {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypeDescOrExpr(typeDescQualifiers);
}
private STNode parseTypeDescOrExpr(List<STNode> qualifiers) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
STNode typeOrExpr;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis();
break;
case FUNCTION_KEYWORD:
typeOrExpr = parseAnonFuncExprOrFuncTypeDesc(qualifiers);
break;
case IDENTIFIER_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);
return parseTypeDescOrExprRhs(typeOrExpr);
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTupleTypeDescOrExprStartsWithOpenBracket();
break;
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode basicLiteral = parseBasicLiteral();
return parseTypeDescOrExprRhs(basicLiteral);
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());
}
return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
}
if (isDefiniteTypeDesc(typeOrExpr.kind)) {
return parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
}
return parseTypeDescOrExprRhs(typeOrExpr);
}
private boolean isExpression(SyntaxKind kind) {
switch (kind) {
case NUMERIC_LITERAL:
case STRING_LITERAL_TOKEN:
case NIL_LITERAL:
case NULL_LITERAL:
case BOOLEAN_LITERAL:
return true;
default:
return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&
kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;
}
}
/**
* Parse statement that starts with an empty parenthesis. Empty parenthesis can be
* 1) Nil literal
* 2) Nil type-desc
* 3) Anon-function params
*
* @param openParen Open parenthesis
* @param closeParen Close parenthesis
* @return Parsed node
*/
private STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) {
STToken nextToken = peek();
switch (nextToken.kind) {
case RIGHT_DOUBLE_ARROW_TOKEN:
STNode params = STNodeFactory.createEmptyNodeList();
STNode anonFuncParam =
STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);
endContext();
return anonFuncParam;
default:
return STNodeFactory.createNilLiteralNode(openParen, closeParen);
}
}
private STNode parseAnonFuncExprOrTypedBPWithFuncType(List<STNode> qualifiers) {
STNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(qualifiers);
if (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) {
return exprOrTypeDesc;
}
return parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT);
}
/**
* Parse anon-func-expr or function-type-desc, by resolving the ambiguity.
*
* @param qualifiers Preceding qualifiers
* @return Anon-func-expr or function-type-desc
*/
private STNode parseAnonFuncExprOrFuncTypeDesc(List<STNode> qualifiers) {
startContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC);
STNode qualifierList;
STNode functionKeyword = parseFunctionKeyword();
STNode funcSignature;
if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN) {
funcSignature = parseFuncSignature(true);
qualifierList = createFuncTypeQualNodeList(qualifiers, true);
endContext();
return parseAnonFuncExprOrFuncTypeDesc(qualifierList, functionKeyword, funcSignature);
}
funcSignature = STNodeFactory.createEmptyNode();
qualifierList = createFuncTypeQualNodeList(qualifiers, false);
STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,
funcSignature);
if (getCurrentContext() != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
}
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
private STNode parseAnonFuncExprOrFuncTypeDesc(STNode qualifierList, STNode functionKeyword, STNode funcSignature) {
ParserRuleContext currentCtx = getCurrentContext();
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
case RIGHT_DOUBLE_ARROW_TOKEN:
if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.EXPRESSION_STATEMENT);
}
startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);
funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);
STNode funcBody = parseAnonFuncBody(false);
STNode annots = STNodeFactory.createEmptyNodeList();
STNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList,
functionKeyword, funcSignature, funcBody);
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true);
case IDENTIFIER_TOKEN:
default:
STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,
funcSignature);
if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,
true);
}
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
}
private STNode parseTypeDescOrExprRhs(STNode typeOrExpr) {
STToken nextToken = peek();
STNode typeDesc;
switch (nextToken.kind) {
case PIPE_TOKEN:
STToken nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode pipe = parsePipeToken();
STNode rhsTypeDescOrExpr = parseTypeDescOrExpr();
if (isExpression(rhsTypeDescOrExpr.kind)) {
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,
rhsTypeDescOrExpr);
}
typeDesc = getTypeDescFromExpr(typeOrExpr);
rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);
return createUnionTypeDesc(typeDesc, pipe, rhsTypeDescOrExpr);
case BITWISE_AND_TOKEN:
nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode ampersand = parseBinaryOperator();
rhsTypeDescOrExpr = parseTypeDescOrExpr();
if (isExpression(rhsTypeDescOrExpr.kind)) {
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,
rhsTypeDescOrExpr);
}
typeDesc = getTypeDescFromExpr(typeOrExpr);
rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);
return createIntersectionTypeDesc(typeDesc, ampersand, rhsTypeDescOrExpr);
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
typeDesc = parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,
false);
return typeDesc;
case SEMICOLON_TOKEN:
return getTypeDescFromExpr(typeOrExpr);
case EQUAL_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
case COMMA_TOKEN:
return typeOrExpr;
case OPEN_BRACKET_TOKEN:
return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true,
ParserRuleContext.AMBIGUOUS_STMT);
case ELLIPSIS_TOKEN:
STNode ellipsis = parseEllipsis();
typeOrExpr = getTypeDescFromExpr(typeOrExpr);
return STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis);
default:
if (isCompoundBinaryOperator(nextToken.kind)) {
return typeOrExpr;
}
if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false);
}
recover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS, typeOrExpr);
return parseTypeDescOrExprRhs(typeOrExpr);
}
}
private boolean isAmbiguous(STNode node) {
switch (node.kind) {
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
case NIL_LITERAL:
case NULL_LITERAL:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case BOOLEAN_LITERAL:
case BRACKETED_LIST:
return true;
case BINARY_EXPRESSION:
STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;
if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||
binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return false;
}
return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);
case BRACED_EXPRESSION:
return isAmbiguous(((STBracedExpressionNode) node).expression);
case INDEXED_EXPRESSION:
STIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node;
if (!isAmbiguous(indexExpr.containerExpression)) {
return false;
}
STNode keys = indexExpr.keyExpression;
for (int i = 0; i < keys.bucketCount(); i++) {
STNode item = keys.childInBucket(i);
if (item.kind == SyntaxKind.COMMA_TOKEN) {
continue;
}
if (!isAmbiguous(item)) {
return false;
}
}
return true;
default:
return false;
}
}
private boolean isAllBasicLiterals(STNode node) {
switch (node.kind) {
case NIL_LITERAL:
case NULL_LITERAL:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case BOOLEAN_LITERAL:
return true;
case BINARY_EXPRESSION:
STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;
if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||
binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return false;
}
return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);
case BRACED_EXPRESSION:
return isAmbiguous(((STBracedExpressionNode) node).expression);
case BRACKETED_LIST:
STAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node;
for (STNode member : list.members) {
if (member.kind == SyntaxKind.COMMA_TOKEN) {
continue;
}
if (!isAllBasicLiterals(member)) {
return false;
}
}
return true;
case UNARY_EXPRESSION:
STUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node;
if (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN &&
unaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) {
return false;
}
return isNumericLiteral(unaryExpr.expression);
default:
return false;
}
}
private boolean isNumericLiteral(STNode node) {
switch (node.kind) {
case NUMERIC_LITERAL:
return true;
default:
return false;
}
}
private STNode parseTupleTypeDescOrExprStartsWithOpenBracket() {
startContext(ParserRuleContext.BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
List<STNode> members = new ArrayList<>();
STNode memberEnd;
while (!isEndOfListConstructor(peek().kind)) {
STNode expr = parseTypeDescOrExpr();
if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN && isDefiniteTypeDesc(expr.kind)) {
STNode ellipsis = consume();
expr = STNodeFactory.createRestDescriptorNode(expr, ellipsis);
}
members.add(expr);
memberEnd = parseBracketedListMemberEnd();
if (memberEnd == null) {
break;
}
members.add(memberEnd);
}
STNode memberNodes = STNodeFactory.createNodeList(members);
STNode closeBracket = parseCloseBracket();
endContext();
return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberNodes, closeBracket);
}
/**
* Parse binding-patterns.
* <p>
* <code>
* binding-pattern := capture-binding-pattern
* | wildcard-binding-pattern
* | list-binding-pattern
* | mapping-binding-pattern
* | functional-binding-pattern
* <br/><br/>
* <p>
* capture-binding-pattern := variable-name
* variable-name := identifier
* <br/><br/>
* <p>
* wildcard-binding-pattern := _
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* <p>
* mapping-binding-pattern := { field-binding-patterns }
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/>
* field-binding-pattern := field-name : binding-pattern | variable-name
* <br/>
* rest-binding-pattern := ... variable-name
* <p>
* <br/><br/>
* functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )
* <br/>
* arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]
* | other-arg-binding-patterns
* <br/>
* positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*
* <br/>
* positional-arg-binding-pattern := binding-pattern
* <br/>
* other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern]
* | [rest-binding-pattern]
* <br/>
* named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*
* <br/>
* named-arg-binding-pattern := arg-name = binding-pattern
* </code>
*
* @return binding-pattern node
*/
private STNode parseBindingPattern() {
switch (peek().kind) {
case OPEN_BRACKET_TOKEN:
return parseListBindingPattern();
case IDENTIFIER_TOKEN:
return parseBindingPatternStartsWithIdentifier();
case OPEN_BRACE_TOKEN:
return parseMappingBindingPattern();
case ERROR_KEYWORD:
return parseErrorBindingPattern();
default:
recover(peek(), ParserRuleContext.BINDING_PATTERN);
return parseBindingPattern();
}
}
private STNode parseBindingPatternStartsWithIdentifier() {
STNode argNameOrBindingPattern =
parseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);
STToken secondToken = peek();
if (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {
startContext(ParserRuleContext.ERROR_BINDING_PATTERN);
STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,
ParserRuleContext.ERROR_KEYWORD);
return parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern);
}
if (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) {
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);
identifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern);
return createCaptureOrWildcardBP(identifier);
}
return createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name);
}
private STNode createCaptureOrWildcardBP(STNode varName) {
STNode bindingPattern;
if (isWildcardBP(varName)) {
bindingPattern = getWildcardBindingPattern(varName);
} else {
bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName);
}
return bindingPattern;
}
/**
* Parse list-binding-patterns.
* <p>
* <code>
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* </code>
*
* @return list-binding-pattern node
*/
private STNode parseListBindingPattern() {
startContext(ParserRuleContext.LIST_BINDING_PATTERN);
STNode openBracket = parseOpenBracket();
List<STNode> bindingPatternsList = new ArrayList<>();
STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList);
endContext();
return listBindingPattern;
}
private STNode parseListBindingPattern(STNode openBracket, List<STNode> bindingPatternsList) {
if (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) {
STNode closeBracket = parseCloseBracket();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList);
return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);
}
STNode listBindingPatternMember = parseListBindingPatternMember();
bindingPatternsList.add(listBindingPatternMember);
STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList);
return listBindingPattern;
}
private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List<STNode> bindingPatterns) {
STNode member = firstMember;
STToken token = peek();
STNode listBindingPatternRhs = null;
while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {
listBindingPatternRhs = parseListBindingPatternMemberRhs();
if (listBindingPatternRhs == null) {
break;
}
bindingPatterns.add(listBindingPatternRhs);
member = parseListBindingPatternMember();
bindingPatterns.add(member);
token = peek();
}
STNode closeBracket = parseCloseBracket();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);
return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);
}
private STNode parseListBindingPatternMemberRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END);
return parseListBindingPatternMemberRhs();
}
}
private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse list-binding-pattern member.
* <p>
* <code>
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* </code>
*
* @return List binding pattern member
*/
private STNode parseListBindingPatternMember() {
switch (peek().kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
case OPEN_BRACKET_TOKEN:
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseBindingPattern();
default:
recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER);
return parseListBindingPatternMember();
}
}
/**
* Parse rest binding pattern.
* <p>
* <code>
* rest-binding-pattern := ... variable-name
* </code>
*
* @return Rest binding pattern node
*/
private STNode parseRestBindingPattern() {
startContext(ParserRuleContext.REST_BINDING_PATTERN);
STNode ellipsis = parseEllipsis();
STNode varName = parseVariableName();
endContext();
STSimpleNameReferenceNode simpleNameReferenceNode =
(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName);
return STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode);
}
/**
* Parse Typed-binding-pattern.
* <p>
* <code>
* typed-binding-pattern := inferable-type-descriptor binding-pattern
* <br/><br/>
* inferable-type-descriptor := type-descriptor | var
* </code>
*
* @return Typed binding pattern node
*/
private STNode parseTypedBindingPattern(ParserRuleContext context) {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypedBindingPattern(typeDescQualifiers, context);
}
private STNode parseTypedBindingPattern(List<STNode> qualifiers, ParserRuleContext context) {
STNode typeDesc = parseTypeDescriptor(qualifiers,
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false);
STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context);
return typeBindingPattern;
}
/**
* Parse mapping-binding-patterns.
* <p>
* <code>
* mapping-binding-pattern := { field-binding-patterns }
* <br/><br/>
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* field-binding-pattern := field-name : binding-pattern | variable-name
* </code>
*
* @return mapping-binding-pattern node
*/
private STNode parseMappingBindingPattern() {
startContext(ParserRuleContext.MAPPING_BINDING_PATTERN);
STNode openBrace = parseOpenBrace();
STToken token = peek();
if (isEndOfMappingBindingPattern(token.kind)) {
STNode closeBrace = parseCloseBrace();
STNode bindingPatternsNode = STNodeFactory.createEmptyNodeList();
endContext();
return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);
}
List<STNode> bindingPatterns = new ArrayList<>();
STNode prevMember = parseMappingBindingPatternMember();
if (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {
bindingPatterns.add(prevMember);
}
return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember);
}
private STNode parseMappingBindingPattern(STNode openBrace, List<STNode> bindingPatterns, STNode prevMember) {
STToken token = peek();
STNode mappingBindingPatternRhs = null;
while (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {
mappingBindingPatternRhs = parseMappingBindingPatternEnd();
if (mappingBindingPatternRhs == null) {
break;
}
bindingPatterns.add(mappingBindingPatternRhs);
prevMember = parseMappingBindingPatternMember();
if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {
break;
}
bindingPatterns.add(prevMember);
token = peek();
}
if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {
bindingPatterns.add(prevMember);
}
STNode closeBrace = parseCloseBrace();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);
endContext();
return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);
}
/**
* Parse mapping-binding-pattern entry.
* <p>
* <code>
* mapping-binding-pattern := { field-binding-patterns }
* <br/><br/>
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* field-binding-pattern := field-name : binding-pattern
* | variable-name
* </code>
*
* @return mapping-binding-pattern node
*/
private STNode parseMappingBindingPatternMember() {
STToken token = peek();
switch (token.kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
default:
return parseFieldBindingPattern();
}
}
private STNode parseMappingBindingPatternEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.MAPPING_BINDING_PATTERN_END);
return parseMappingBindingPatternEnd();
}
}
/**
* Parse field-binding-pattern.
* <code>field-binding-pattern := field-name : binding-pattern | varname</code>
*
* @return field-binding-pattern node
*/
private STNode parseFieldBindingPattern() {
switch (peek().kind) {
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME);
STNode fieldBindingPattern = parseFieldBindingPattern(identifier);
return fieldBindingPattern;
default:
recover(peek(), ParserRuleContext.FIELD_BINDING_PATTERN_NAME);
return parseFieldBindingPattern();
}
}
private STNode parseFieldBindingPattern(STNode identifier) {
STNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier);
if (peek().kind != SyntaxKind.COLON_TOKEN) {
return STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference);
}
STNode colon = parseColon();
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern);
}
private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) {
return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN || endOfModuleLevelNode(1);
}
private STNode parseErrorTypeDescOrErrorBP(STNode annots) {
STToken nextNextToken = peek(2);
switch (nextNextToken.kind) {
case OPEN_PAREN_TOKEN:
return parseAsErrorBindingPattern();
case LT_TOKEN:
return parseAsErrorTypeDesc(annots);
case IDENTIFIER_TOKEN:
SyntaxKind nextNextNextTokenKind = peek(3).kind;
if (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN ||
nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {
return parseAsErrorBindingPattern();
}
default:
return parseAsErrorTypeDesc(annots);
}
}
private STNode parseAsErrorBindingPattern() {
startContext(ParserRuleContext.ASSIGNMENT_STMT);
return parseAssignmentStmtRhs(parseErrorBindingPattern());
}
private STNode parseAsErrorTypeDesc(STNode annots) {
STNode finalKeyword = STNodeFactory.createEmptyNode();
return parseVariableDecl(getAnnotations(annots), finalKeyword);
}
/**
* Parse error binding pattern node.
* <p>
* <code>error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )</code>
* <br/><br/>
* error-arg-list-binding-pattern :=
* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]
* | [error-field-binding-patterns]
* <br/><br/>
* error-message-binding-pattern := simple-binding-pattern
* <br/><br/>
* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern
* <br/><br/>
* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern
* <br/><br/>
* error-field-binding-patterns :=
* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]
* | rest-binding-pattern
* <br/><br/>
* named-arg-binding-pattern := arg-name = binding-pattern
*
* @return Error binding pattern node.
*/
private STNode parseErrorBindingPattern() {
startContext(ParserRuleContext.ERROR_BINDING_PATTERN);
STNode errorKeyword = parseErrorKeyword();
return parseErrorBindingPattern(errorKeyword);
}
private STNode parseErrorBindingPattern(STNode errorKeyword) {
STToken nextToken = peek();
STNode typeRef;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
typeRef = STNodeFactory.createEmptyNode();
break;
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
typeRef = parseTypeReference();
break;
}
recover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS);
return parseErrorBindingPattern(errorKeyword);
}
return parseErrorBindingPattern(errorKeyword, typeRef);
}
private STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) {
STNode openParenthesis = parseOpenParenthesis();
STNode argListBindingPatterns = parseErrorArgListBindingPatterns();
STNode closeParenthesis = parseCloseParenthesis();
endContext();
return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis,
argListBindingPatterns, closeParenthesis);
}
/**
* Parse error arg list binding pattern.
* <p>
* <code>
* error-arg-list-binding-pattern :=
* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]
* | [error-field-binding-patterns]
* <br/><br/>
* <p>
* error-message-binding-pattern := simple-binding-pattern
* <br/><br/>
* <p>
* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern
* <br/><br/>
* <p>
* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern
* <br/><br/>
* <p>
* error-field-binding-patterns :=
* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]
* | rest-binding-pattern
* <br/><br/>
* <p>
* named-arg-binding-pattern := arg-name = binding-pattern
* </code>
*
* @return Error arg list binding patterns.
*/
private STNode parseErrorArgListBindingPatterns() {
List<STNode> argListBindingPatterns = new ArrayList<>();
if (isEndOfErrorFieldBindingPatterns()) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
return parseErrorArgListBindingPatterns(argListBindingPatterns);
}
private STNode parseErrorArgListBindingPatterns(List<STNode> argListBindingPatterns) {
STNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START, true);
if (firstArg == null) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
switch (firstArg.kind) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);
case ERROR_BINDING_PATTERN:
STNode missingIdentifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode missingErrorMsgBP = STNodeFactory.createCaptureBindingPatternNode(missingIdentifier);
missingErrorMsgBP = SyntaxErrors.addDiagnostic(missingErrorMsgBP,
DiagnosticErrorCode.ERROR_MISSING_ERROR_MESSAGE_BINDING_PATTERN);
STNode missingComma = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.COMMA_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_COMMA_TOKEN);
argListBindingPatterns.add(missingErrorMsgBP);
argListBindingPatterns.add(missingComma);
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);
case REST_BINDING_PATTERN:
case NAMED_ARG_BINDING_PATTERN:
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);
default:
addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);
return parseErrorArgListBindingPatterns(argListBindingPatterns);
}
}
private STNode parseErrorArgListBPWithoutErrorMsg(List<STNode> argListBindingPatterns) {
STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END);
if (argEnd == null) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
STNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS, false);
assert secondArg != null;
switch (secondArg.kind) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
case ERROR_BINDING_PATTERN:
case REST_BINDING_PATTERN:
case NAMED_ARG_BINDING_PATTERN:
argListBindingPatterns.add(argEnd);
argListBindingPatterns.add(secondArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, secondArg.kind);
default:
updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg,
DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);
return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);
}
}
private STNode parseErrorArgListBPWithoutErrorMsgAndCause(List<STNode> argListBindingPatterns,
SyntaxKind lastValidArgKind) {
while (!isEndOfErrorFieldBindingPatterns()) {
STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END);
if (argEnd == null) {
break;
}
STNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN, false);
assert currentArg != null;
DiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind);
if (errorCode == null) {
argListBindingPatterns.add(argEnd);
argListBindingPatterns.add(currentArg);
lastValidArgKind = currentArg.kind;
} else if (argListBindingPatterns.size() == 0) {
addInvalidNodeToNextToken(argEnd, null);
addInvalidNodeToNextToken(currentArg, errorCode);
} else {
updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode);
}
}
return STNodeFactory.createNodeList(argListBindingPatterns);
}
private boolean isEndOfErrorFieldBindingPatterns() {
SyntaxKind nextTokenKind = peek().kind;
switch (nextTokenKind) {
case CLOSE_PAREN_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) {
switch (peek().kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), currentCtx);
return parseErrorArgsBindingPatternEnd(currentCtx);
}
}
private STNode parseErrorArgListBindingPattern(ParserRuleContext context, boolean isFirstArg) {
switch (peek().kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
case IDENTIFIER_TOKEN:
STNode argNameOrSimpleBindingPattern = consume();
return parseNamedOrSimpleArgBindingPattern(argNameOrSimpleBindingPattern);
case OPEN_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseBindingPattern();
case CLOSE_PAREN_TOKEN:
if (isFirstArg) {
return null;
}
default:
recover(peek(), context);
return parseErrorArgListBindingPattern(context, isFirstArg);
}
}
private STNode parseNamedOrSimpleArgBindingPattern(STNode argNameOrSimpleBindingPattern) {
STToken secondToken = peek();
switch (secondToken.kind) {
case EQUAL_TOKEN:
STNode equal = consume();
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern,
equal, bindingPattern);
case COMMA_TOKEN:
case CLOSE_PAREN_TOKEN:
default:
return createCaptureOrWildcardBP(argNameOrSimpleBindingPattern);
}
}
private DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind,
SyntaxKind currentArgKind) {
switch (currentArgKind) {
case NAMED_ARG_BINDING_PATTERN:
case REST_BINDING_PATTERN:
if (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) {
return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;
}
return null;
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
case ERROR_BINDING_PATTERN:
case LIST_BINDING_PATTERN:
case MAPPING_BINDING_PATTERN:
default:
return DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED;
}
}
/*
* This parses Typed binding patterns and deals with ambiguity between types,
* and binding patterns. An example is 'T[a]'.
* The ambiguity lies in between:
* 1) Array Type
* 2) List binding pattern
* 3) Member access expression.
*/
/**
* Parse the component after the type-desc, of a typed-binding-pattern.
*
* @param typeDesc Starting type-desc of the typed-binding-pattern
* @return Typed-binding pattern
*/
private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) {
return parseTypedBindingPatternTypeRhs(typeDesc, context, true);
}
private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) {
switch (peek().kind) {
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
case OPEN_BRACKET_TOKEN:
STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context);
assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN;
return typedBindingPattern;
case CLOSE_PAREN_TOKEN:
case COMMA_TOKEN:
case CLOSE_BRACKET_TOKEN:
case CLOSE_BRACE_TOKEN:
if (!isRoot) {
return typeDesc;
}
default:
recover(peek(), ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS, typeDesc, context, isRoot);
return parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot);
}
}
/**
* Parse typed-binding pattern with list, array-type-desc, or member-access-expr.
*
* @param typeDescOrExpr Type desc or the expression at the start
* @param isTypedBindingPattern Is this is a typed-binding-pattern. If this is `false`, then it's still ambiguous
* @return Parsed node
*/
private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern,
boolean allowAssignment, ParserRuleContext context) {
startContext(ParserRuleContext.BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
if (isBracketedListEnd(peek().kind)) {
return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context);
}
STNode member = parseBracketedListMember(isTypedBindingPattern);
SyntaxKind currentNodeType = getBracketedListNodeType(member, isTypedBindingPattern);
switch (currentNodeType) {
case ARRAY_TYPE_DESC:
STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context);
return typedBindingPattern;
case LIST_BINDING_PATTERN:
STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false);
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
case INDEXED_EXPRESSION:
return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member);
case ARRAY_TYPE_DESC_OR_MEMBER_ACCESS:
break;
case NONE:
default:
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd != null) {
List<STNode> memberList = new ArrayList<>();
memberList.add(getBindingPattern(member));
memberList.add(memberEnd);
bindingPattern = parseAsListBindingPattern(openBracket, memberList);
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
}
}
STNode closeBracket = parseCloseBracket();
endContext();
return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern, allowAssignment, context);
}
private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) {
member = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true);
STNode closeBracket = parseCloseBracket();
endContext();
STNode keyExpr = STNodeFactory.createNodeList(member);
STNode memberAccessExpr =
STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket);
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false);
}
private boolean isBracketedListEnd(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse a member of an ambiguous bracketed list. This member could be:
* 1) Array length
* 2) Key expression of a member-access-expr
* 3) A member-binding pattern of a list-binding-pattern.
*
* @param isTypedBindingPattern Is this in a definite typed-binding pattern
* @return Parsed member node
*/
private STNode parseBracketedListMember(boolean isTypedBindingPattern) {
STToken nextToken = peek();
switch (nextToken.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case ASTERISK_TOKEN:
case STRING_LITERAL_TOKEN:
return parseBasicLiteral();
case CLOSE_BRACKET_TOKEN:
return STNodeFactory.createEmptyNode();
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
case ELLIPSIS_TOKEN:
case OPEN_BRACKET_TOKEN:
return parseStatementStartBracketedListMember();
case IDENTIFIER_TOKEN:
if (isTypedBindingPattern) {
return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
}
break;
default:
if ((!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) ||
isQualifiedIdentifierPredeclaredPrefix(nextToken.kind)) {
break;
}
ParserRuleContext recoverContext =
isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH
: ParserRuleContext.BRACKETED_LIST_MEMBER;
recover(peek(), recoverContext, isTypedBindingPattern);
return parseBracketedListMember(isTypedBindingPattern);
}
STNode expr = parseExpression();
if (isWildcardBP(expr)) {
return getWildcardBindingPattern(expr);
}
return expr;
}
/**
* Treat the current node as an array, and parse the remainder of the binding pattern.
*
* @param typeDesc Type-desc
* @param openBracket Open bracket
* @param member Member
* @return Parsed node
*/
private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) {
typeDesc = getTypeDescFromExpr(typeDesc);
switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);
STNode closeBracket = parseCloseBracket();
endContext();
endContext();
return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true,
context);
}
private STNode parseBracketedListMemberEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END);
return parseBracketedListMemberEnd();
}
}
/**
* We reach here to break ambiguity of T[a]. This could be:
* 1) Array Type Desc
* 2) Member access on LHS
* 3) Typed-binding-pattern
*
* @param typeDescOrExpr Type name or the expr that precede the open-bracket.
* @param openBracket Open bracket
* @param member Member
* @param closeBracket Open bracket
* @param isTypedBindingPattern Is this is a typed-binding-pattern.
* @return Specific node that matches to T[a], after solving ambiguity.
*/
private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket, boolean isTypedBindingPattern,
boolean allowAssignment, ParserRuleContext context) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);
case OPEN_BRACKET_TOKEN:
if (isTypedBindingPattern) {
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
arrayTypeDesc = createArrayTypeDesc(typeDesc, openBracket, member, closeBracket);
return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);
}
STNode keyExpr = getKeyExpr(member);
STNode expr =
STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);
return parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context);
case QUESTION_MARK_TOKEN:
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
typeDesc = parseComplexTypeDescriptor(arrayTypeDesc,
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
return parseTypedBindingPatternTypeRhs(typeDesc, context);
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
return parseComplexTypeDescInTypedBPOrExprRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern);
case IN_KEYWORD:
if (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE) {
break;
}
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
case EQUAL_TOKEN:
if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {
break;
}
if (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) {
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
}
keyExpr = getKeyExpr(member);
typeDescOrExpr = getExpression(typeDescOrExpr);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);
case SEMICOLON_TOKEN:
if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {
break;
}
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
case CLOSE_BRACE_TOKEN:
case COMMA_TOKEN:
if (context == ParserRuleContext.AMBIGUOUS_STMT) {
keyExpr = getKeyExpr(member);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,
closeBracket);
}
default:
if (!isTypedBindingPattern && isValidExprRhsStart(nextToken.kind, closeBracket.kind)) {
keyExpr = getKeyExpr(member);
typeDescOrExpr = getExpression(typeDescOrExpr);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,
closeBracket);
}
break;
}
ParserRuleContext recoveryCtx = ParserRuleContext.BRACKETED_LIST_RHS;
if (isTypedBindingPattern) {
recoveryCtx = ParserRuleContext.TYPE_DESC_RHS_OR_BP_RHS;
}
recover(peek(), recoveryCtx, typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern, allowAssignment, context);
return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern, allowAssignment, context);
}
private STNode getKeyExpr(STNode member) {
if (member == null) {
STToken keyIdentifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR);
STNode missingVarRef = STNodeFactory.createSimpleNameReferenceNode(keyIdentifier);
return STNodeFactory.createNodeList(missingVarRef);
}
return STNodeFactory.createNodeList(member);
}
private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket) {
STNode bindingPatterns = STNodeFactory.createEmptyNodeList();
if (!isEmpty(member)) {
SyntaxKind memberKind = member.kind;
if (memberKind == SyntaxKind.NUMERIC_LITERAL || memberKind == SyntaxKind.ASTERISK_LITERAL) {
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
STToken identifierToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_VARIABLE_NAME);
STNode variableName = STNodeFactory.createCaptureBindingPatternNode(identifierToken);
return STNodeFactory.createTypedBindingPatternNode(arrayTypeDesc, variableName);
}
if (member.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
openBracket = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracket, member,
DiagnosticErrorCode.ERROR_FIELD_BP_INSIDE_LIST_BP);
} else {
STNode bindingPattern = getBindingPattern(member);
bindingPatterns = STNodeFactory.createNodeList(bindingPattern);
}
}
STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, closeBracket);
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
}
/**
* Parse a union or intersection type-desc/binary-expression that involves ambiguous
* bracketed list in lhs.
* <p>
* e.g: <code>(T[a] & R..)</code> or <code>(T[a] | R.. )</code>
* <p>
* Complexity occurs in scenarios such as <code>T[a] |/& R[b]</code>. If the token after this
* is another binding-pattern, then <code>(T[a] |/& R[b])</code> becomes the type-desc. However,
* if the token follows this is an equal or semicolon, then <code>(T[a] |/& R)</code> becomes
* the type-desc, and <code>[b]</code> becomes the binding pattern.
*
* @param typeDescOrExpr Type desc or the expression
* @param openBracket Open bracket
* @param member Member
* @param closeBracket Close bracket
* @return Parsed node
*/
private STNode parseComplexTypeDescInTypedBPOrExprRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket, boolean isTypedBindingPattern) {
STNode pipeOrAndToken = parseUnionOrIntersectionToken();
STNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false);
if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);
lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);
STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr;
STNode newTypeDesc;
if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) {
newTypeDesc = createUnionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor);
} else {
newTypeDesc =
createIntersectionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor);
}
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern);
}
if (isTypedBindingPattern) {
STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);
lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);
return createCaptureBPWithMissingVarName(lhsTypeDesc, pipeOrAndToken, typedBindingPatternOrExpr);
}
STNode keyExpr = getExpression(member);
STNode containerExpr = getExpression(typeDescOrExpr);
STNode lhsExpr =
STNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket);
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken,
typedBindingPatternOrExpr);
}
private STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) {
if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;
STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc);
lhsTypeDesc = createUnionTypeDesc(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);
} else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc;
STNode middleTypeDesc =
getArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc);
lhsTypeDesc = createIntersectionTypeDesc(intersectionTypeDesc.leftTypeDesc,
intersectionTypeDesc.bitwiseAndToken, middleTypeDesc);
} else {
lhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket);
}
return lhsTypeDesc;
}
/**
* Parse union (|) or intersection (&) type operator.
*
* @return pipe or bitwise and token
*/
private STNode parseUnionOrIntersectionToken() {
STToken token = peek();
if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN);
return parseUnionOrIntersectionToken();
}
}
/**
* Infer the type of the ambiguous bracketed list, based on the type of the member.
*
* @param memberNode Member node
* @return Inferred type of the bracketed list
*/
private SyntaxKind getBracketedListNodeType(STNode memberNode, boolean isTypedBindingPattern) {
if (isEmpty(memberNode)) {
return SyntaxKind.NONE;
}
if (isDefiniteTypeDesc(memberNode.kind)) {
return SyntaxKind.TUPLE_TYPE_DESC;
}
switch (memberNode.kind) {
case ASTERISK_LITERAL:
return SyntaxKind.ARRAY_TYPE_DESC;
case CAPTURE_BINDING_PATTERN:
case LIST_BINDING_PATTERN:
case REST_BINDING_PATTERN:
case MAPPING_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
return SyntaxKind.LIST_BINDING_PATTERN;
case QUALIFIED_NAME_REFERENCE:
case REST_TYPE:
return SyntaxKind.TUPLE_TYPE_DESC;
case NUMERIC_LITERAL:
if (isTypedBindingPattern) {
return SyntaxKind.ARRAY_TYPE_DESC;
}
return SyntaxKind.ARRAY_TYPE_DESC_OR_MEMBER_ACCESS;
case SIMPLE_NAME_REFERENCE:
case BRACKETED_LIST:
case MAPPING_BP_OR_MAPPING_CONSTRUCTOR:
return SyntaxKind.NONE;
case ERROR_CONSTRUCTOR:
if (isPossibleErrorBindingPattern((STErrorConstructorExpressionNode) memberNode)) {
return SyntaxKind.NONE;
}
return SyntaxKind.INDEXED_EXPRESSION;
default:
if (isTypedBindingPattern) {
return SyntaxKind.NONE;
}
return SyntaxKind.INDEXED_EXPRESSION;
}
}
/*
* This section tries to break the ambiguity in parsing a statement that starts with a open-bracket.
* The ambiguity lies in between:
* 1) Assignment that starts with list binding pattern
* 2) Var-decl statement that starts with tuple type
* 3) Statement that starts with list constructor, such as sync-send, etc.
*/
/**
* Parse any statement that starts with an open-bracket.
*
* @param annots Annotations attached to the statement.
* @return Parsed node
*/
private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) {
startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT);
return parseStatementStartsWithOpenBracket(annots, true, possibleMappingField);
}
private STNode parseMemberBracketedList(boolean possibleMappingField) {
STNode annots = STNodeFactory.createEmptyNodeList();
return parseStatementStartsWithOpenBracket(annots, false, possibleMappingField);
}
/**
* The bracketed list at the start of a statement can be one of the following.
* 1) List binding pattern
* 2) Tuple type
* 3) List constructor
*
* @param isRoot Is this the root of the list
* @return Parsed node
*/
private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) {
startContext(ParserRuleContext.STMT_START_BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
List<STNode> memberList = new ArrayList<>();
while (!isBracketedListEnd(peek().kind)) {
STNode member = parseStatementStartBracketedListMember();
SyntaxKind currentNodeType = getStmtStartBracketedListType(member);
switch (currentNodeType) {
case TUPLE_TYPE_DESC:
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case LIST_BINDING_PATTERN:
return parseAsListBindingPattern(openBracket, memberList, member, isRoot);
case LIST_CONSTRUCTOR:
return parseAsListConstructor(openBracket, memberList, member, isRoot);
case LIST_BP_OR_LIST_CONSTRUCTOR:
return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC_OR_LIST_CONST:
return parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot);
case NONE:
default:
memberList.add(member);
break;
}
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd == null) {
break;
}
memberList.add(memberEnd);
}
STNode closeBracket = parseCloseBracket();
STNode bracketedList = parseStatementStartBracketedListRhs(annots, openBracket, memberList, closeBracket,
isRoot, possibleMappingField);
return bracketedList;
}
/**
* Parse a member of a list-binding-pattern, tuple-type-desc, or
* list-constructor-expr, when the parent is ambiguous.
*
* @return Parsed node
*/
private STNode parseStatementStartBracketedListMember() {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseStatementStartBracketedListMember(typeDescQualifiers);
}
private STNode parseStatementStartBracketedListMember(List<STNode> qualifiers) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseMemberBracketedList(false);
case IDENTIFIER_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
if (isWildcardBP(identifier)) {
STNode varName = ((STSimpleNameReferenceNode) identifier).name;
return getWildcardBindingPattern(varName);
}
nextToken = peek();
if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {
STNode ellipsis = parseEllipsis();
return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);
}
if (nextToken.kind != SyntaxKind.OPEN_BRACKET_TOKEN && isValidTypeContinuationToken(nextToken)) {
return parseComplexTypeDescriptor(identifier, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true);
case OPEN_BRACE_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseMappingBindingPatterOrMappingConstructor();
case ERROR_KEYWORD:
reportInvalidQualifierList(qualifiers);
STToken nextNextToken = getNextNextToken();
if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||
nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return parseErrorBindingPatternOrErrorConstructor();
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case ELLIPSIS_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseListBindingPatternMember();
case XML_KEYWORD:
case STRING_KEYWORD:
reportInvalidQualifierList(qualifiers);
if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {
return parseExpression(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case TABLE_KEYWORD:
case STREAM_KEYWORD:
reportInvalidQualifierList(qualifiers);
if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
return parseExpression(false);
case OPEN_PAREN_TOKEN:
return parseTypeDescOrExpr(qualifiers);
case FUNCTION_KEYWORD:
return parseAnonFuncExprOrFuncTypeDesc(qualifiers);
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseExpression(false);
}
if (isTypeStartingToken(nextToken.kind)) {
return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER, qualifiers);
return parseStatementStartBracketedListMember(qualifiers);
}
}
private STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList,
STNode member, boolean isRoot) {
memberList.add(member);
STNode memberEnd = parseBracketedListMemberEnd();
STNode tupleTypeDescOrListCons;
if (memberEnd == null) {
STNode closeBracket = parseCloseBracket();
tupleTypeDescOrListCons =
parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);
} else {
memberList.add(memberEnd);
tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot);
}
return tupleTypeDescOrListCons;
}
/**
* Parse tuple type desc or list constructor.
*
* @return Parsed node
*/
private STNode parseTupleTypeDescOrListConstructor(STNode annots) {
startContext(ParserRuleContext.BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
List<STNode> memberList = new ArrayList<>();
return parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false);
}
private STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList,
boolean isRoot) {
STToken nextToken = peek();
while (!isBracketedListEnd(nextToken.kind)) {
STNode member = parseTupleTypeDescOrListConstructorMember(annots);
SyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member);
switch (currentNodeType) {
case LIST_CONSTRUCTOR:
return parseAsListConstructor(openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC:
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC_OR_LIST_CONST:
default:
memberList.add(member);
break;
}
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd == null) {
break;
}
memberList.add(memberEnd);
nextToken = peek();
}
STNode closeBracket = parseCloseBracket();
return parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);
}
private STNode parseTupleTypeDescOrListConstructorMember(STNode annots) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
return parseTupleTypeDescOrListConstructor(annots);
case IDENTIFIER_TOKEN:
STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) {
STNode ellipsis = parseEllipsis();
return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);
}
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false);
case OPEN_BRACE_TOKEN:
return parseMappingConstructorExpr();
case ERROR_KEYWORD:
STToken nextNextToken = getNextNextToken();
if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||
nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return parseErrorConstructorExpr(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case XML_KEYWORD:
case STRING_KEYWORD:
if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {
return parseExpression(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case TABLE_KEYWORD:
case STREAM_KEYWORD:
if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
return parseExpression(false);
case OPEN_PAREN_TOKEN:
return parseTypeDescOrExpr();
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
return parseExpression(false);
}
if (isTypeStartingToken(nextToken.kind)) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER, annots);
return parseTupleTypeDescOrListConstructorMember(annots);
}
}
private SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) {
return getStmtStartBracketedListType(memberNode);
}
private STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List<STNode> members, STNode closeBracket,
boolean isRoot) {
STNode tupleTypeOrListConst;
switch (peek().kind) {
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
if (!isRoot) {
endContext();
return new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members,
closeBracket);
}
default:
if (isValidExprRhsStart(peek().kind, closeBracket.kind) ||
(isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) {
members = getExpressionList(members);
STNode memberExpressions = STNodeFactory.createNodeList(members);
tupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket,
memberExpressions, closeBracket);
break;
}
STNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members));
STNode tupleTypeDesc =
STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);
tupleTypeOrListConst =
parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
endContext();
if (!isRoot) {
return tupleTypeOrListConst;
}
STNode annots = STNodeFactory.createEmptyNodeList();
return parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot);
} | switch (peek().kind) { | private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {
switch (peek(lookahead + 1).kind) {
case IDENTIFIER_TOKEN:
SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;
switch (tokenAfterIdentifier) {
case ON_KEYWORD:
case OPEN_BRACE_TOKEN:
return true;
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
case QUESTION_MARK_TOKEN:
return false;
default:
return false;
}
case ON_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse listener declaration, given the qualifier.
* <p>
* <code>
* listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ;
* </code>
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.LISTENER_DECL);
STNode listenerKeyword = parseListenerKeyword();
if (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) {
STNode listenerDecl =
parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true);
endContext();
return listenerDecl;
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse listener keyword.
*
* @return Parsed node
*/
private STNode parseListenerKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LISTENER_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LISTENER_KEYWORD);
return parseListenerKeyword();
}
}
/**
* Parse constant declaration, given the qualifier.
* <p>
* <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code>
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.CONSTANT_DECL);
STNode constKeyword = parseConstantKeyword();
return parseConstDecl(metadata, qualifier, constKeyword);
}
/**
* Parse the components that follows after the const keyword of a constant declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case ANNOTATION_KEYWORD:
endContext();
return parseAnnotationDeclaration(metadata, qualifier, constKeyword);
case IDENTIFIER_TOKEN:
STNode constantDecl =
parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false);
endContext();
return constantDecl;
default:
if (isTypeStartingToken(nextToken.kind)) {
break;
}
recover(peek(), ParserRuleContext.CONST_DECL_TYPE, metadata, qualifier, constKeyword);
return parseConstDecl(metadata, qualifier, constKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
private STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,
boolean isListener) {
STNode varNameOrTypeName = parseStatementStartIdentifier();
return parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener);
}
/**
* Parse the component that follows the first identifier in a const decl. The identifier
* can be either the type-name (a user defined type) or the var-name there the type-name
* is not present.
*
* @param qualifier Qualifier that precedes the constant decl
* @param keyword Keyword
* @param typeOrVarName Identifier that follows the const-keywoord
* @return Parsed node
*/
private STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword,
STNode typeOrVarName, boolean isListener) {
if (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STNode type = typeOrVarName;
STNode variableName = parseVariableName();
return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);
}
STNode type;
STNode variableName;
switch (peek().kind) {
case IDENTIFIER_TOKEN:
type = typeOrVarName;
variableName = parseVariableName();
break;
case EQUAL_TOKEN:
variableName = ((STSimpleNameReferenceNode) typeOrVarName).name;
type = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.CONST_DECL_RHS, metadata, qualifier, keyword, typeOrVarName,
isListener);
return parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener);
}
return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);
}
private STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener,
STNode type, STNode variableName) {
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
if (isListener) {
return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName,
equalsToken, initializer, semicolonToken);
}
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse const keyword.
*
* @return Parsed node
*/
private STNode parseConstantKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONST_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.CONST_KEYWORD);
return parseConstantKeyword();
}
}
/**
* Parse typeof expression.
* <p>
* <code>
* typeof-expr := typeof expression
* </code>
*
* @param isRhsExpr
* @return Typeof expression node
*/
private STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode typeofKeyword = parseTypeofKeyword();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);
return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);
}
/**
* Parse typeof-keyword.
*
* @return Typeof-keyword node
*/
private STNode parseTypeofKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TYPEOF_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TYPEOF_KEYWORD);
return parseTypeofKeyword();
}
}
/**
* Parse optional type descriptor given the type.
* <p>
* <code>optional-type-descriptor := type-descriptor `?`</code>
* </p>
*
* @param typeDescriptorNode Preceding type descriptor
* @return Parsed node
*/
private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {
startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);
STNode questionMarkToken = parseQuestionMark();
endContext();
typeDescriptorNode = validateForUsageOfVar(typeDescriptorNode);
return STNodeFactory.createOptionalTypeDescriptorNode(typeDescriptorNode, questionMarkToken);
}
/**
* Parse unary expression.
* <p>
* <code>
* unary-expr := + expression | - expression | ~ expression | ! expression
* </code>
*
* @param isRhsExpr
* @return Unary expression node
*/
private STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode unaryOperator = parseUnaryOperator();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);
return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);
}
/**
* Parse unary operator.
* <code>UnaryOperator := + | - | ~ | !</code>
*
* @return Parsed node
*/
private STNode parseUnaryOperator() {
STToken token = peek();
if (isUnaryOperator(token.kind)) {
return consume();
} else {
recover(token, ParserRuleContext.UNARY_OPERATOR);
return parseUnaryOperator();
}
}
/**
* Check whether the given token kind is a unary operator.
*
* @param kind STToken kind
* @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise
*/
private boolean isUnaryOperator(SyntaxKind kind) {
switch (kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse array type descriptor.
* <p>
* <code>
* array-type-descriptor := member-type-descriptor [ [ array-length ] ]
* member-type-descriptor := type-descriptor
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* inferred-array-length := *
* </code>
* </p>
*
* @param memberTypeDesc
* @return Parsed Node
*/
private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) {
startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);
STNode openBracketToken = parseOpenBracket();
STNode arrayLengthNode = parseArrayLength();
STNode closeBracketToken = parseCloseBracket();
endContext();
return createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken);
}
private STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode,
STNode closeBracketToken) {
memberTypeDesc = validateForUsageOfVar(memberTypeDesc);
if (arrayLengthNode != null) {
switch (arrayLengthNode.kind) {
case ASTERISK_LITERAL:
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
break;
case NUMERIC_LITERAL:
SyntaxKind numericLiteralKind = arrayLengthNode.childInBucket(0).kind;
if (numericLiteralKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
numericLiteralKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
break;
}
default:
openBracketToken = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracketToken,
arrayLengthNode, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);
arrayLengthNode = STNodeFactory.createEmptyNode();
}
}
return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, openBracketToken, arrayLengthNode,
closeBracketToken);
}
/**
* Parse array length.
* <p>
* <code>
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* constant-reference-expr := variable-reference-expr
* </code>
* </p>
*
* @return Parsed array length
*/
private STNode parseArrayLength() {
STToken token = peek();
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case ASTERISK_TOKEN:
return parseBasicLiteral();
case CLOSE_BRACKET_TOKEN:
return STNodeFactory.createEmptyNode();
case IDENTIFIER_TOKEN:
return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);
default:
recover(token, ParserRuleContext.ARRAY_LENGTH);
return parseArrayLength();
}
}
/**
* Parse annotations.
* <p>
* <i>Note: In the <a href="https:
* annotations-list is specified as one-or-more annotations. And the usage is marked as
* optional annotations-list. However, for the consistency of the tree, here we make the
* annotation-list as zero-or-more annotations, and the usage is not-optional.</i>
* <p>
* <code>annots := annotation*</code>
*
* @return Parsed node
*/
private STNode parseOptionalAnnotations() {
startContext(ParserRuleContext.ANNOTATIONS);
List<STNode> annotList = new ArrayList<>();
STToken nextToken = peek();
while (nextToken.kind == SyntaxKind.AT_TOKEN) {
annotList.add(parseAnnotation());
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(annotList);
}
/**
* Parse annotation list with at least one annotation.
*
* @return Annotation list
*/
private STNode parseAnnotations() {
startContext(ParserRuleContext.ANNOTATIONS);
List<STNode> annotList = new ArrayList<>();
annotList.add(parseAnnotation());
while (peek().kind == SyntaxKind.AT_TOKEN) {
annotList.add(parseAnnotation());
}
endContext();
return STNodeFactory.createNodeList(annotList);
}
/**
* Parse annotation attachment.
* <p>
* <code>annotation := @ annot-tag-reference annot-value</code>
*
* @return Parsed node
*/
private STNode parseAnnotation() {
STNode atToken = parseAtToken();
STNode annotReference;
if (isPredeclaredIdentifier(peek().kind)) {
annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);
} else {
annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
}
STNode annotValue;
if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {
annotValue = parseMappingConstructorExpr();
} else {
annotValue = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);
}
/**
* Parse '@' token.
*
* @return Parsed node
*/
private STNode parseAtToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.AT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.AT);
return parseAtToken();
}
}
/**
* Parse metadata. Meta data consist of optional doc string and
* an annotations list.
* <p>
* <code>metadata := [DocumentationString] annots</code>
*
* @return Parse node
*/
private STNode parseMetaData() {
STNode docString;
STNode annotations;
switch (peek().kind) {
case DOCUMENTATION_STRING:
docString = parseMarkdownDocumentation();
annotations = parseOptionalAnnotations();
break;
case AT_TOKEN:
docString = STNodeFactory.createEmptyNode();
annotations = parseOptionalAnnotations();
break;
default:
return STNodeFactory.createEmptyNode();
}
return createMetadata(docString, annotations);
}
/**
* Create metadata node.
*
* @return A metadata node
*/
private STNode createMetadata(STNode docString, STNode annotations) {
if (annotations == null && docString == null) {
return STNodeFactory.createEmptyNode();
} else {
return STNodeFactory.createMetadataNode(docString, annotations);
}
}
/**
* Parse is expression.
* <code>
* is-expr := expression is type-descriptor
* </code>
*
* @param lhsExpr Preceding expression of the is expression
* @return Is expression node
*/
private STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode isKeyword = parseIsKeyword();
STNode typeDescriptor = parseTypeDescriptorInExpression(isInConditionalExpr);
return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isKeyword, typeDescriptor);
}
/**
* Parse is-keyword.
*
* @return Is-keyword node
*/
private STNode parseIsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.IS_KEYWORD);
return parseIsKeyword();
}
}
/**
* Parse local type definition statement statement.
* <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code>
*
* @return local type definition statement statement
*/
private STNode parseLocalTypeDefinitionStatement(STNode annots) {
startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);
STNode typeKeyword = parseTypeKeyword();
STNode typeName = parseTypeName();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,
semicolon);
}
/**
* Parse statement which is only consists of an action or expression.
*
* @param annots Annotations
* @return Statement node
*/
private STNode parseExpressionStatement(STNode annots) {
startContext(ParserRuleContext.EXPRESSION_STATEMENT);
STNode expression = parseActionOrExpressionInLhs(annots);
return getExpressionAsStatement(expression);
}
/**
* Parse statements that starts with an expression.
*
* @return Statement node
*/
private STNode parseStatementStartWithExpr(STNode annots) {
startContext(ParserRuleContext.AMBIGUOUS_STMT);
STNode expr = parseActionOrExpressionInLhs(annots);
return parseStatementStartWithExprRhs(expr);
}
/**
* Parse the component followed by the expression, at the beginning of a statement.
*
* @param expression Action or expression in LHS
* @return Statement node
*/
private STNode parseStatementStartWithExprRhs(STNode expression) {
SyntaxKind nextTokenKind = peek().kind;
if (isAction(expression) || nextTokenKind == SyntaxKind.SEMICOLON_TOKEN) {
return getExpressionAsStatement(expression);
}
switch (nextTokenKind) {
case EQUAL_TOKEN:
switchContext(ParserRuleContext.ASSIGNMENT_STMT);
return parseAssignmentStmtRhs(expression);
case IDENTIFIER_TOKEN:
default:
if (isCompoundBinaryOperator(nextTokenKind)) {
return parseCompoundAssignmentStmtRhs(expression);
}
ParserRuleContext context;
if (isPossibleExpressionStatement(expression)) {
context = ParserRuleContext.EXPR_STMT_RHS;
} else {
context = ParserRuleContext.STMT_START_WITH_EXPR_RHS;
}
recover(peek(), context, expression);
return parseStatementStartWithExprRhs(expression);
}
}
private boolean isPossibleExpressionStatement(STNode expression) {
switch (expression.kind) {
case METHOD_CALL:
case FUNCTION_CALL:
case CHECK_EXPRESSION:
case REMOTE_METHOD_CALL_ACTION:
case CHECK_ACTION:
case BRACED_ACTION:
case START_ACTION:
case TRAP_ACTION:
case FLUSH_ACTION:
case ASYNC_SEND_ACTION:
case SYNC_SEND_ACTION:
case RECEIVE_ACTION:
case WAIT_ACTION:
case QUERY_ACTION:
case COMMIT_ACTION:
return true;
default:
return false;
}
}
private STNode getExpressionAsStatement(STNode expression) {
switch (expression.kind) {
case METHOD_CALL:
case FUNCTION_CALL:
case CHECK_EXPRESSION:
return parseCallStatement(expression);
case REMOTE_METHOD_CALL_ACTION:
case CHECK_ACTION:
case BRACED_ACTION:
case START_ACTION:
case TRAP_ACTION:
case FLUSH_ACTION:
case ASYNC_SEND_ACTION:
case SYNC_SEND_ACTION:
case RECEIVE_ACTION:
case WAIT_ACTION:
case QUERY_ACTION:
case COMMIT_ACTION:
return parseActionStatement(expression);
default:
STNode semicolon = parseSemicolon();
endContext();
STNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT,
expression, semicolon);
exprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT);
return exprStmt;
}
}
private STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) {
STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression);
STNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression;
if (lengthExprs.isEmpty()) {
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),
indexedExpr.closeBracket);
}
STNode lengthExpr = lengthExprs.get(0);
switch (lengthExpr.kind) {
case ASTERISK_LITERAL:
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
break;
case NUMERIC_LITERAL:
SyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind;
if (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
innerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
break;
}
default:
STNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(
indexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);
indexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics);
lengthExpr = STNodeFactory.createEmptyNode();
}
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket);
}
/**
* <p>
* Parse call statement, given the call expression.
* </p>
* <code>
* call-stmt := call-expr ;
* <br/>
* call-expr := function-call-expr | method-call-expr | checking-keyword call-expr
* </code>
*
* @param expression Call expression associated with the call statement
* @return Call statement node
*/
private STNode parseCallStatement(STNode expression) {
STNode semicolon = parseSemicolon();
endContext();
if (expression.kind == SyntaxKind.CHECK_EXPRESSION) {
expression = validateCallExpression(expression);
}
return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);
}
private STNode validateCallExpression(STNode callExpr) {
STCheckExpressionNode checkExpr = (STCheckExpressionNode) callExpr;
STNode expr = checkExpr.expression;
if (expr.kind == SyntaxKind.FUNCTION_CALL || expr.kind == SyntaxKind.METHOD_CALL) {
return callExpr;
}
STNode checkKeyword = checkExpr.checkKeyword;
if (expr.kind == SyntaxKind.CHECK_EXPRESSION) {
expr = validateCallExpression(expr);
return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkKeyword, expr);
}
STNode checkingKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(checkKeyword, expr,
DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_EXPECTED_CALL_EXPRESSION);
STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
funcName = STNodeFactory.createSimpleNameReferenceNode(funcName);
STNode openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN);
STNode arguments = STNodeFactory.createEmptyNodeList();
STNode closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN);
STNode funcCallExpr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, arguments,
closeParenToken);
return STNodeFactory.createCheckExpressionNode(SyntaxKind.CHECK_EXPRESSION, checkingKeyword, funcCallExpr);
}
private STNode parseActionStatement(STNode action) {
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);
}
/**
* Parse remote method call action, given the starting expression.
* <p>
* <code>
* remote-method-call-action := expression -> method-name ( arg-list )
* <br/>
* async-send-action := expression -> peer-worker ;
* </code>
*
* @param isRhsExpr Is this an RHS action
* @param expression LHS expression
* @return
*/
private STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) {
STNode rightArrow = parseRightArrow();
return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);
}
private STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) {
STNode name;
STToken nextToken = peek();
switch (nextToken.kind) {
case FUNCTION_KEYWORD:
STNode functionKeyword = consume();
name = STNodeFactory.createSimpleNameReferenceNode(functionKeyword);
return parseAsyncSendAction(expression, rightArrow, name);
case IDENTIFIER_TOKEN:
name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());
break;
case CONTINUE_KEYWORD:
case COMMIT_KEYWORD:
name = getKeywordAsSimpleNameRef();
break;
default:
STToken token = peek();
recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS, expression, isRhsExpr, rightArrow);
return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);
}
return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);
}
private STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
return parseRemoteMethodCallAction(expression, rightArrow, name);
case SEMICOLON_TOKEN:
return parseAsyncSendAction(expression, rightArrow, name);
default:
recover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END, expression, rightArrow, name);
return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);
}
}
private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) {
return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker);
}
private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) {
STNode openParenToken = parseArgListOpenParenthesis();
STNode arguments = parseArgsList();
STNode closeParenToken = parseArgListCloseParenthesis();
return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments,
closeParenToken);
}
/**
* Parse right arrow (<code>-></code>) token.
*
* @return Parsed node
*/
private STNode parseRightArrow() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.RIGHT_ARROW);
return parseRightArrow();
}
}
/**
* Parse parameterized type descriptor.
* parameterized-type-descriptor := map type-parameter | future type-parameter | typedesc type-parameter
*
* @return Parsed node
*/
private STNode parseParameterizedTypeDescriptor(STNode parameterizedTypeKeyword) {
STNode typeParameter = parseTypeParameter();
return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeKeyword, typeParameter);
}
/**
* Parse <code> < </code> token.
*
* @return Parsed node
*/
private STNode parseGTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.GT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.GT);
return parseGTToken();
}
}
/**
* Parse <code> > </code> token.
*
* @return Parsed node
*/
private STNode parseLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.LT);
return parseLTToken();
}
}
/**
* Parse nil literal. Here nil literal is only referred to ( ).
*
* @return Parsed node
*/
private STNode parseNilLiteral() {
startContext(ParserRuleContext.NIL_LITERAL);
STNode openParenthesisToken = parseOpenParenthesis();
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);
}
/**
* Parse annotation declaration, given the qualifier.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {
startContext(ParserRuleContext.ANNOTATION_DECL);
STNode annotationKeyword = parseAnnotationKeyword();
STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);
endContext();
return annotDecl;
}
/**
* Parse annotation keyword.
*
* @return Parsed node
*/
private STNode parseAnnotationKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ANNOTATION_KEYWORD);
return parseAnnotationKeyword();
}
}
/**
* Parse the components that follows after the annotation keyword of a annotation declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @param annotationKeyword
* @return Parsed node
*/
private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);
default:
if (isTypeStartingToken(nextToken.kind)) {
break;
}
recover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE, metadata, qualifier, constKeyword,
annotationKeyword);
return parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
/**
* Parse annotation tag.
* <p>
* <code>annot-tag := identifier</code>
*
* @return
*/
private STNode parseAnnotationTag() {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.ANNOTATION_TAG);
return parseAnnotationTag();
}
}
private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);
if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,
typeDescOrAnnotTag, annotTag);
}
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) {
STNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag,
ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false);
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
STNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name;
return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag);
}
/**
* Parse the component that follows the first identifier in an annotation decl. The identifier
* can be either the type-name (a user defined type) or the annot-tag, where the type-name
* is not present.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the annotation decl
* @param constKeyword Const keyword
* @param annotationKeyword Annotation keyword
* @param typeDescOrAnnotTag Identifier that follows the annotation-keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDescOrAnnotTag) {
STToken nextToken = peek();
STNode typeDesc;
STNode annotTag;
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
typeDesc = typeDescOrAnnotTag;
annotTag = parseAnnotationTag();
break;
case SEMICOLON_TOKEN:
case ON_KEYWORD:
typeDesc = STNodeFactory.createEmptyNode();
annotTag = typeDescOrAnnotTag;
break;
default:
recover(peek(), ParserRuleContext.ANNOT_DECL_RHS, metadata, qualifier, constKeyword, annotationKeyword,
typeDescOrAnnotTag);
return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag);
}
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDesc, STNode annotTag) {
STNode onKeyword;
STNode attachPoints;
STToken nextToken = peek();
switch (nextToken.kind) {
case SEMICOLON_TOKEN:
onKeyword = STNodeFactory.createEmptyNode();
attachPoints = STNodeFactory.createEmptyNodeList();
break;
case ON_KEYWORD:
onKeyword = parseOnKeyword();
attachPoints = parseAnnotationAttachPoints();
onKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword,
DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);
break;
default:
recover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS, metadata, qualifier, constKeyword,
annotationKeyword, typeDesc, annotTag);
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,
typeDesc, annotTag, onKeyword, attachPoints, semicolonToken);
}
/**
* Parse annotation attach points.
* <p>
* <code>
* annot-attach-points := annot-attach-point (, annot-attach-point)*
* <br/><br/>
* annot-attach-point := dual-attach-point | source-only-attach-point
* <br/><br/>
* dual-attach-point := [source] dual-attach-point-ident
* <br/><br/>
* dual-attach-point-ident :=
* type
* | class
* | [object|service remote] function
* | parameter
* | return
* | service
* | [object|record] field
* <br/><br/>
* source-only-attach-point := source source-only-attach-point-ident
* <br/><br/>
* source-only-attach-point-ident :=
* annotation
* | external
* | var
* | const
* | listener
* | worker
* </code>
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoints() {
startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);
List<STNode> attachPoints = new ArrayList<>();
STToken nextToken = peek();
if (isEndAnnotAttachPointList(nextToken.kind)) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode attachPoint = parseAnnotationAttachPoint();
attachPoints.add(attachPoint);
nextToken = peek();
STNode leadingComma;
while (!isEndAnnotAttachPointList(nextToken.kind)) {
leadingComma = parseAttachPointEnd();
if (leadingComma == null) {
break;
}
attachPoints.add(leadingComma);
attachPoint = parseAnnotationAttachPoint();
if (attachPoint == null) {
attachPoint = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);
attachPoints.add(attachPoint);
break;
}
attachPoints.add(attachPoint);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(attachPoints);
}
/**
* Parse annotation attach point end.
*
* @return Parsed node
*/
private STNode parseAttachPointEnd() {
switch (peek().kind) {
case SEMICOLON_TOKEN:
return null;
case COMMA_TOKEN:
return consume();
default:
recover(peek(), ParserRuleContext.ATTACH_POINT_END);
return parseAttachPointEnd();
}
}
private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse annotation attach point.
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoint() {
switch (peek().kind) {
case EOF_TOKEN:
return null;
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
case SOURCE_KEYWORD:
STNode sourceKeyword = parseSourceKeyword();
return parseAttachPointIdent(sourceKeyword);
case OBJECT_KEYWORD:
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
case RECORD_KEYWORD:
case CLASS_KEYWORD:
sourceKeyword = STNodeFactory.createEmptyNode();
STNode firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
recover(peek(), ParserRuleContext.ATTACH_POINT);
return parseAnnotationAttachPoint();
}
}
/**
* Parse source keyword.
*
* @return Parsed node
*/
private STNode parseSourceKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SOURCE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.SOURCE_KEYWORD);
return parseSourceKeyword();
}
}
/**
* Parse attach point ident gievn.
* <p>
* <code>
* source-only-attach-point-ident := annotation | external | var | const | listener | worker
* <br/><br/>
* dual-attach-point-ident := type | class | [object|service remote] function | parameter
* | return | service | [object|record] field
* </code>
*
* @param sourceKeyword Source keyword
* @return Parsed node
*/
private STNode parseAttachPointIdent(STNode sourceKeyword) {
switch (peek().kind) {
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
STNode firstIdent = consume();
STNode identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
case OBJECT_KEYWORD:
case RESOURCE_KEYWORD:
case RECORD_KEYWORD:
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
case CLASS_KEYWORD:
firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT, sourceKeyword);
return parseAttachPointIdent(sourceKeyword);
}
}
/**
* Parse dual-attach-point ident.
*
* @param sourceKeyword Source keyword
* @param firstIdent first part of the dual attach-point
* @return Parsed node
*/
private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {
STNode secondIdent;
switch (firstIdent.kind) {
case OBJECT_KEYWORD:
secondIdent = parseIdentAfterObjectIdent();
break;
case RESOURCE_KEYWORD:
secondIdent = parseFunctionIdent();
break;
case RECORD_KEYWORD:
secondIdent = parseFieldIdent();
break;
case SERVICE_KEYWORD:
return parseServiceAttachPoint(sourceKeyword, firstIdent);
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case FIELD_KEYWORD:
case CLASS_KEYWORD:
default:
STNode identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
}
STNode identList = STNodeFactory.createNodeList(firstIdent, secondIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
}
/**
* Parse remote ident.
*
* @return Parsed node
*/
private STNode parseRemoteIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.REMOTE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.REMOTE_IDENT);
return parseRemoteIdent();
}
}
/**
* Parse service attach point.
* <code>service-attach-point := service | service remote function</code>
*
* @return Parsed node
*/
private STNode parseServiceAttachPoint(STNode sourceKeyword, STNode firstIdent) {
STNode identList;
STToken token = peek();
switch (token.kind) {
case REMOTE_KEYWORD:
STNode secondIdent = parseRemoteIdent();
STNode thirdIdent = parseFunctionIdent();
identList = STNodeFactory.createNodeList(firstIdent, secondIdent, thirdIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
case COMMA_TOKEN:
case SEMICOLON_TOKEN:
identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
default:
recover(token, ParserRuleContext.SERVICE_IDENT_RHS);
return parseServiceAttachPoint(sourceKeyword, firstIdent);
}
}
/**
* Parse the idents that are supported after object-ident.
*
* @return Parsed node
*/
private STNode parseIdentAfterObjectIdent() {
STToken token = peek();
switch (token.kind) {
case FUNCTION_KEYWORD:
case FIELD_KEYWORD:
return consume();
default:
recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);
return parseIdentAfterObjectIdent();
}
}
/**
* Parse function ident.
*
* @return Parsed node
*/
private STNode parseFunctionIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FUNCTION_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FUNCTION_IDENT);
return parseFunctionIdent();
}
}
/**
* Parse field ident.
*
* @return Parsed node
*/
private STNode parseFieldIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FIELD_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FIELD_IDENT);
return parseFieldIdent();
}
}
/**
* Parse XML namespace declaration.
* <p>
* <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;
* <br/>
* xml-namespace-uri := simple-const-expr
* <br/>
* xml-namespace-prefix := identifier
* </code>
*
* @return
*/
private STNode parseXMLNamespaceDeclaration(boolean isModuleVar) {
startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);
STNode xmlnsKeyword = parseXMLNSKeyword();
STNode namespaceUri = parseSimpleConstExpr();
while (!isValidXMLNameSpaceURI(namespaceUri)) {
xmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri,
DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI);
namespaceUri = parseSimpleConstExpr();
}
STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);
endContext();
return xmlnsDecl;
}
/**
* Parse xmlns keyword.
*
* @return Parsed node
*/
private STNode parseXMLNSKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XMLNS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.XMLNS_KEYWORD);
return parseXMLNSKeyword();
}
}
private boolean isValidXMLNameSpaceURI(STNode expr) {
switch (expr.kind) {
case STRING_LITERAL:
case QUALIFIED_NAME_REFERENCE:
case SIMPLE_NAME_REFERENCE:
return true;
case IDENTIFIER_TOKEN:
default:
return false;
}
}
private STNode parseSimpleConstExpr() {
startContext(ParserRuleContext.CONSTANT_EXPRESSION);
STNode expr = parseSimpleConstExprInternal();
endContext();
return expr;
}
/**
* Parse simple constants expr.
*
* @return Parsed node
*/
private STNode parseSimpleConstExprInternal() {
STToken nextToken = peek();
switch (nextToken.kind) {
case STRING_LITERAL_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
return parseBasicLiteral();
case PLUS_TOKEN:
case MINUS_TOKEN:
return parseSignedIntOrFloat();
case OPEN_PAREN_TOKEN:
return parseNilLiteral();
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
}
recover(nextToken, ParserRuleContext.CONSTANT_EXPRESSION_START);
return parseSimpleConstExprInternal();
}
}
/**
* Parse the portion after the namsepsace-uri of an XML declaration.
*
* @param xmlnsKeyword XMLNS keyword
* @param namespaceUri Namespace URI
* @return Parsed node
*/
private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) {
STNode asKeyword = STNodeFactory.createEmptyNode();
STNode namespacePrefix = STNodeFactory.createEmptyNode();
switch (peek().kind) {
case AS_KEYWORD:
asKeyword = parseAsKeyword();
namespacePrefix = parseNamespacePrefix();
break;
case SEMICOLON_TOKEN:
break;
default:
recover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL, xmlnsKeyword, namespaceUri, isModuleVar);
return parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);
}
STNode semicolon = parseSemicolon();
if (isModuleVar) {
return STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword,
namespacePrefix, semicolon);
}
return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,
semicolon);
}
/**
* Parse import prefix.
*
* @return Parsed node
*/
private STNode parseNamespacePrefix() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.NAMESPACE_PREFIX);
return parseNamespacePrefix();
}
}
/**
* Parse named worker declaration.
* <p>
* <code>named-worker-decl := [annots] [transactional] worker worker-name return-type-descriptor { sequence-stmt }
* </code>
*
* @param annots Annotations attached to the worker decl
* @param qualifiers Preceding transactional keyword in a list
* @return Parsed node
*/
private STNode parseNamedWorkerDeclaration(STNode annots, List<STNode> qualifiers) {
startContext(ParserRuleContext.NAMED_WORKER_DECL);
STNode transactionalKeyword = getTransactionalKeyword(qualifiers);
STNode workerKeyword = parseWorkerKeyword();
STNode workerName = parseWorkerName();
STNode returnTypeDesc = parseReturnTypeDescriptor();
STNode workerBody = parseBlockNode();
endContext();
return STNodeFactory.createNamedWorkerDeclarationNode(annots, transactionalKeyword, workerKeyword, workerName,
returnTypeDesc, workerBody);
}
private STNode getTransactionalKeyword(List<STNode> qualifierList) {
List<STNode> validatedList = new ArrayList<>();
for (int i = 0; i < qualifierList.size(); i++) {
STNode qualifier = qualifierList.get(i);
int nextIndex = i + 1;
if (isSyntaxKindInList(validatedList, qualifier.kind)) {
updateLastNodeInListWithInvalidNode(validatedList, qualifier,
DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());
} else if (qualifier.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {
validatedList.add(qualifier);
} else if (qualifierList.size() == nextIndex) {
addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED,
((STToken) qualifier).text());
} else {
updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
}
}
STNode transactionalKeyword;
if (validatedList.isEmpty()) {
transactionalKeyword = STNodeFactory.createEmptyNode();
} else {
transactionalKeyword = validatedList.get(0);
}
return transactionalKeyword;
}
private STNode parseReturnTypeDescriptor() {
STToken token = peek();
if (token.kind != SyntaxKind.RETURNS_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
STNode returnsKeyword = consume();
STNode annot = parseOptionalAnnotations();
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);
return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);
}
/**
* Parse worker keyword.
*
* @return Parsed node
*/
private STNode parseWorkerKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {
return consume();
} else {
recover(peek(), ParserRuleContext.WORKER_KEYWORD);
return parseWorkerKeyword();
}
}
/**
* Parse worker name.
* <p>
* <code>worker-name := identifier</code>
*
* @return Parsed node
*/
private STNode parseWorkerName() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.WORKER_NAME);
return parseWorkerName();
}
}
/**
* Parse lock statement.
* <code>lock-stmt := lock block-stmt [on-fail-clause]</code>
*
* @return Lock statement
*/
private STNode parseLockStatement() {
startContext(ParserRuleContext.LOCK_STMT);
STNode lockKeyword = parseLockKeyword();
STNode blockStatement = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause);
}
/**
* Parse lock-keyword.
*
* @return lock-keyword node
*/
private STNode parseLockKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LOCK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LOCK_KEYWORD);
return parseLockKeyword();
}
}
/**
* Parse union type descriptor.
* union-type-descriptor := type-descriptor | type-descriptor
*
* @param leftTypeDesc Type desc in the LHS os the union type desc.
* @param context Current context.
* @return parsed union type desc node
*/
private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,
boolean isTypedBindingPattern) {
STNode pipeToken = consume();
STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false);
return createUnionTypeDesc(leftTypeDesc, pipeToken, rightTypeDesc);
}
private STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) {
leftTypeDesc = validateForUsageOfVar(leftTypeDesc);
rightTypeDesc = validateForUsageOfVar(rightTypeDesc);
return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);
}
/**
* Parse pipe token.
*
* @return parsed pipe token node
*/
private STNode parsePipeToken() {
STToken token = peek();
if (token.kind == SyntaxKind.PIPE_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.PIPE);
return parsePipeToken();
}
}
private boolean isTypeStartingToken(SyntaxKind nodeKind) {
switch (nodeKind) {
case IDENTIFIER_TOKEN:
case SERVICE_KEYWORD:
case RECORD_KEYWORD:
case OBJECT_KEYWORD:
case ABSTRACT_KEYWORD:
case CLIENT_KEYWORD:
case OPEN_PAREN_TOKEN:
case MAP_KEYWORD:
case FUTURE_KEYWORD:
case TYPEDESC_KEYWORD:
case ERROR_KEYWORD:
case XML_KEYWORD:
case STREAM_KEYWORD:
case TABLE_KEYWORD:
case FUNCTION_KEYWORD:
case OPEN_BRACKET_TOKEN:
case DISTINCT_KEYWORD:
case ISOLATED_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case TRANSACTION_KEYWORD:
return true;
default:
if (isSingletonTypeDescStart(nodeKind, true)) {
return true;
}
return isSimpleType(nodeKind);
}
}
/**
* Check if the token kind is a type descriptor in terminal expression.
* <p>
* simple-type-in-expr :=
* boolean | int | byte | float | decimal | string | handle | json | anydata | any | never
*
* @param nodeKind token kind to check
* @return <code>true</code> for simple type token in expression. <code>false</code> otherwise.
*/
private boolean isSimpleTypeInExpression(SyntaxKind nodeKind) {
switch (nodeKind) {
case VAR_KEYWORD:
case READONLY_KEYWORD:
return false;
default:
return isSimpleType(nodeKind);
}
}
static boolean isSimpleType(SyntaxKind nodeKind) {
switch (nodeKind) {
case INT_KEYWORD:
case FLOAT_KEYWORD:
case DECIMAL_KEYWORD:
case BOOLEAN_KEYWORD:
case STRING_KEYWORD:
case BYTE_KEYWORD:
case JSON_KEYWORD:
case HANDLE_KEYWORD:
case ANY_KEYWORD:
case ANYDATA_KEYWORD:
case NEVER_KEYWORD:
case VAR_KEYWORD:
case READONLY_KEYWORD:
return true;
default:
return false;
}
}
static boolean isPredeclaredPrefix(SyntaxKind nodeKind) {
switch (nodeKind) {
case BOOLEAN_KEYWORD:
case DECIMAL_KEYWORD:
case ERROR_KEYWORD:
case FLOAT_KEYWORD:
case FUTURE_KEYWORD:
case INT_KEYWORD:
case MAP_KEYWORD:
case OBJECT_KEYWORD:
case STREAM_KEYWORD:
case STRING_KEYWORD:
case TABLE_KEYWORD:
case TRANSACTION_KEYWORD:
case TYPEDESC_KEYWORD:
case XML_KEYWORD:
return true;
default:
return false;
}
}
private boolean isQualifiedIdentifierPredeclaredPrefix(SyntaxKind nodeKind) {
return isPredeclaredPrefix(nodeKind) && getNextNextToken().kind == SyntaxKind.COLON_TOKEN;
}
private SyntaxKind getBuiltinTypeSyntaxKind(SyntaxKind typeKeyword) {
switch (typeKeyword) {
case INT_KEYWORD:
return SyntaxKind.INT_TYPE_DESC;
case FLOAT_KEYWORD:
return SyntaxKind.FLOAT_TYPE_DESC;
case DECIMAL_KEYWORD:
return SyntaxKind.DECIMAL_TYPE_DESC;
case BOOLEAN_KEYWORD:
return SyntaxKind.BOOLEAN_TYPE_DESC;
case STRING_KEYWORD:
return SyntaxKind.STRING_TYPE_DESC;
case BYTE_KEYWORD:
return SyntaxKind.BYTE_TYPE_DESC;
case JSON_KEYWORD:
return SyntaxKind.JSON_TYPE_DESC;
case HANDLE_KEYWORD:
return SyntaxKind.HANDLE_TYPE_DESC;
case ANY_KEYWORD:
return SyntaxKind.ANY_TYPE_DESC;
case ANYDATA_KEYWORD:
return SyntaxKind.ANYDATA_TYPE_DESC;
case NEVER_KEYWORD:
return SyntaxKind.NEVER_TYPE_DESC;
case VAR_KEYWORD:
return SyntaxKind.VAR_TYPE_DESC;
case READONLY_KEYWORD:
return SyntaxKind.READONLY_TYPE_DESC;
default:
assert false : typeKeyword + " is not a built-in type";
return SyntaxKind.TYPE_REFERENCE;
}
}
/**
* Parse fork-keyword.
*
* @return Fork-keyword node
*/
private STNode parseForkKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FORK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FORK_KEYWORD);
return parseForkKeyword();
}
}
/**
* Parse fork statement.
* <code>fork-stmt := fork { named-worker-decl+ }</code>
*
* @return Fork statement
*/
private STNode parseForkStatement() {
startContext(ParserRuleContext.FORK_STMT);
STNode forkKeyword = parseForkKeyword();
STNode openBrace = parseOpenBrace();
ArrayList<STNode> workers = new ArrayList<>();
while (!isEndOfStatements()) {
STNode stmt = parseStatement();
if (stmt == null) {
break;
}
if (stmt.kind == SyntaxKind.LOCAL_TYPE_DEFINITION_STATEMENT) {
addInvalidNodeToNextToken(stmt, DiagnosticErrorCode.ERROR_LOCAL_TYPE_DEFINITION_NOT_ALLOWED);
continue;
}
switch (stmt.kind) {
case NAMED_WORKER_DECLARATION:
workers.add(stmt);
break;
default:
if (workers.isEmpty()) {
openBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt,
DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);
} else {
updateLastNodeInListWithInvalidNode(workers, stmt,
DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);
}
}
}
STNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers);
STNode closeBrace = parseCloseBrace();
endContext();
STNode forkStmt =
STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);
if (isNodeListEmpty(namedWorkerDeclarations)) {
return SyntaxErrors.addDiagnostic(forkStmt,
DiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT);
}
return forkStmt;
}
/**
* Parse trap expression.
* <p>
* <code>
* trap-expr := trap expression
* </code>
*
* @param allowActions Allow actions
* @param isRhsExpr Whether this is a RHS expression or not
* @return Trap expression node
*/
private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {
STNode trapKeyword = parseTrapKeyword();
STNode expr = parseExpression(OperatorPrecedence.TRAP, isRhsExpr, allowActions, isInConditionalExpr);
if (isAction(expr)) {
return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr);
}
return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr);
}
/**
* Parse trap-keyword.
*
* @return Trap-keyword node
*/
private STNode parseTrapKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TRAP_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TRAP_KEYWORD);
return parseTrapKeyword();
}
}
/**
* Parse list constructor expression.
* <p>
* <code>
* list-constructor-expr := [ [ expr-list ] ]
* <br/>
* expr-list := expression (, expression)*
* </code>
*
* @return Parsed node
*/
private STNode parseListConstructorExpr() {
startContext(ParserRuleContext.LIST_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode expressions = parseOptionalExpressionsList();
STNode closeBracket = parseCloseBracket();
endContext();
return STNodeFactory.createListConstructorExpressionNode(openBracket, expressions, closeBracket);
}
/**
* Parse optional expression list.
*
* @return Parsed node
*/
private STNode parseOptionalExpressionsList() {
List<STNode> expressions = new ArrayList<>();
if (isEndOfListConstructor(peek().kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode expr = parseExpression();
expressions.add(expr);
return parseOptionalExpressionsList(expressions);
}
private STNode parseOptionalExpressionsList(List<STNode> expressions) {
STNode listConstructorMemberEnd;
while (!isEndOfListConstructor(peek().kind)) {
listConstructorMemberEnd = parseListConstructorMemberEnd();
if (listConstructorMemberEnd == null) {
break;
}
expressions.add(listConstructorMemberEnd);
STNode expr = parseExpression();
expressions.add(expr);
}
return STNodeFactory.createNodeList(expressions);
}
private boolean isEndOfListConstructor(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
default:
return false;
}
}
private STNode parseListConstructorMemberEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END);
return parseListConstructorMemberEnd();
}
}
/**
* Parse foreach statement.
* <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]</code>
*
* @return foreach statement
*/
private STNode parseForEachStatement() {
startContext(ParserRuleContext.FOREACH_STMT);
STNode forEachKeyword = parseForEachKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT);
STNode inKeyword = parseInKeyword();
STNode actionOrExpr = parseActionOrExpression();
STNode blockStatement = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr,
blockStatement, onFailClause);
}
/**
* Parse foreach-keyword.
*
* @return ForEach-keyword node
*/
private STNode parseForEachKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FOREACH_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FOREACH_KEYWORD);
return parseForEachKeyword();
}
}
/**
* Parse in-keyword.
*
* @return In-keyword node
*/
private STNode parseInKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IN_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.IN_KEYWORD);
return parseInKeyword();
}
}
/**
* Parse type cast expression.
* <p>
* <code>
* type-cast-expr := < type-cast-param > expression
* <br/>
* type-cast-param := [annots] type-descriptor | annots
* </code>
*
* @return Parsed node
*/
private STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {
startContext(ParserRuleContext.TYPE_CAST);
STNode ltToken = parseLTToken();
return parseTypeCastExpr(ltToken, isRhsExpr, allowActions, isInConditionalExpr);
}
private STNode parseTypeCastExpr(STNode ltToken, boolean isRhsExpr, boolean allowActions,
boolean isInConditionalExpr) {
STNode typeCastParam = parseTypeCastParam();
STNode gtToken = parseGTToken();
endContext();
STNode expression =
parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);
return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);
}
private STNode parseTypeCastParam() {
STNode annot;
STNode type;
STToken token = peek();
switch (token.kind) {
case AT_TOKEN:
annot = parseOptionalAnnotations();
token = peek();
if (isTypeStartingToken(token.kind)) {
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
} else {
type = STNodeFactory.createEmptyNode();
}
break;
default:
annot = STNodeFactory.createEmptyNode();
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
break;
}
return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type);
}
/**
* Parse table constructor expression.
* <p>
* <code>
* table-constructor-expr-rhs := [ [row-list] ]
* </code>
*
* @param tableKeyword tableKeyword that precedes this rhs
* @param keySpecifier keySpecifier that precedes this rhs
* @return Parsed node
*/
private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {
switchContext(ParserRuleContext.TABLE_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode rowList = parseRowList();
STNode closeBracket = parseCloseBracket();
return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,
closeBracket);
}
/**
* Parse table-keyword.
*
* @return Table-keyword node
*/
private STNode parseTableKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TABLE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TABLE_KEYWORD);
return parseTableKeyword();
}
}
/**
* Parse table rows.
* <p>
* <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code>
*
* @return Parsed node
*/
private STNode parseRowList() {
STToken nextToken = peek();
if (isEndOfTableRowList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> mappings = new ArrayList<>();
STNode mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
STNode rowEnd;
while (!isEndOfTableRowList(nextToken.kind)) {
rowEnd = parseTableRowEnd();
if (rowEnd == null) {
break;
}
mappings.add(rowEnd);
mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
}
return STNodeFactory.createNodeList(mappings);
}
private boolean isEndOfTableRowList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
case COMMA_TOKEN:
case OPEN_BRACE_TOKEN:
return false;
default:
return isEndOfMappingConstructor(tokenKind);
}
}
private STNode parseTableRowEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.TABLE_ROW_END);
return parseTableRowEnd();
}
}
/**
* Parse key specifier.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier() {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode keyKeyword = parseKeyKeyword();
STNode openParen = parseOpenParenthesis();
STNode fieldNames = parseFieldNames();
STNode closeParen = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);
}
/**
* Parse key-keyword.
*
* @return Key-keyword node
*/
private STNode parseKeyKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.KEY_KEYWORD) {
return consume();
}
if (isKeyKeyword(token)) {
return getKeyKeyword(consume());
}
recover(token, ParserRuleContext.KEY_KEYWORD);
return parseKeyKeyword();
}
static boolean isKeyKeyword(STToken token) {
return token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text());
}
private STNode getKeyKeyword(STToken token) {
return STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),
token.diagnostics());
}
/**
* Parse field names.
* <p>
* <code>field-name-list := [ field-name (, field-name)* ]</code>
*
* @return Parsed node
*/
private STNode parseFieldNames() {
STToken nextToken = peek();
if (isEndOfFieldNamesList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> fieldNames = new ArrayList<>();
STNode fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
STNode leadingComma;
while (!isEndOfFieldNamesList(nextToken.kind)) {
leadingComma = parseComma();
fieldNames.add(leadingComma);
fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
}
return STNodeFactory.createNodeList(fieldNames);
}
private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case IDENTIFIER_TOKEN:
return false;
default:
return true;
}
}
/**
* Parse error type descriptor.
* <p>
* error-type-descriptor := error [type-parameter]
* type-parameter := < type-descriptor >
* </p>
*
* @return Parsed node
*/
private STNode parseErrorTypeDescriptor() {
STNode errorKeywordToken = parseErrorKeyword();
return parseErrorTypeDescriptor(errorKeywordToken);
}
private STNode parseErrorTypeDescriptor(STNode errorKeywordToken) {
STNode errorTypeParamsNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
errorTypeParamsNode = parseTypeParameter();
} else {
errorTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createErrorTypeDescriptorNode(errorKeywordToken, errorTypeParamsNode);
}
/**
* Parse error-keyword.
*
* @return Parsed error-keyword node
*/
private STNode parseErrorKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ERROR_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ERROR_KEYWORD);
return parseErrorKeyword();
}
}
/**
* Parse typedesc type descriptor.
* typedesc-type-descriptor := typedesc type-parameter
*
* @return Parsed typedesc type node
*/
private STNode parseTypedescTypeDescriptor(STNode typedescKeywordToken) {
STNode typedescTypeParamsNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
typedescTypeParamsNode = parseTypeParameter();
} else {
typedescTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createTypedescTypeDescriptorNode(typedescKeywordToken, typedescTypeParamsNode);
}
/**
* Parse stream type descriptor.
* <p>
* stream-type-descriptor := stream [stream-type-parameters]
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type descriptor node
*/
private STNode parseStreamTypeDescriptor(STNode streamKeywordToken) {
STNode streamTypeParamsNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
streamTypeParamsNode = parseStreamTypeParamsNode();
} else {
streamTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);
}
/**
* Parse xml type descriptor.
* xml-type-descriptor := xml type-parameter
*
* @return Parsed typedesc type node
*/
private STNode parseXmlTypeDescriptor(STNode xmlKeywordToken) {
STNode typedescTypeParamsNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
typedescTypeParamsNode = parseTypeParameter();
} else {
typedescTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createXmlTypeDescriptorNode(xmlKeywordToken, typedescTypeParamsNode);
}
/**
* Parse stream type params node.
* <p>
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type params node
*/
private STNode parseStreamTypeParamsNode() {
STNode ltToken = parseLTToken();
startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode leftTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);
endContext();
return streamTypedesc;
}
private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {
STNode commaToken, rightTypeDescNode, gtToken;
switch (peek().kind) {
case COMMA_TOKEN:
commaToken = parseComma();
rightTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
break;
case GT_TOKEN:
commaToken = STNodeFactory.createEmptyNode();
rightTypeDescNode = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS, ltToken, leftTypeDescNode);
return parseStreamTypeParamsNode(ltToken, leftTypeDescNode);
}
gtToken = parseGTToken();
return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,
gtToken);
}
/**
* Parse stream-keyword.
*
* @return Parsed stream-keyword node
*/
private STNode parseStreamKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.STREAM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.STREAM_KEYWORD);
return parseStreamKeyword();
}
}
/**
* Parse let expression.
* <p>
* <code>
* let-expr := let let-var-decl [, let-var-decl]* in expression
* </code>
*
* @return Parsed node
*/
private STNode parseLetExpression(boolean isRhsExpr) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr);
STNode inKeyword = parseInKeyword();
letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,
DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);
}
/**
* Parse let-keyword.
*
* @return Let-keyword node
*/
private STNode parseLetKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LET_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LET_KEYWORD);
return parseLetKeyword();
}
}
/**
* Parse let variable declarations.
* <p>
* <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code>
*
* @return Parsed node
*/
private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) {
startContext(context);
List<STNode> varDecls = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfLetVarDeclarations(nextToken.kind)) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode varDec = parseLetVarDecl(isRhsExpr);
varDecls.add(varDec);
nextToken = peek();
STNode leadingComma;
while (!isEndOfLetVarDeclarations(nextToken.kind)) {
leadingComma = parseComma();
varDecls.add(leadingComma);
varDec = parseLetVarDecl(isRhsExpr);
varDecls.add(varDec);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(varDecls);
}
private boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case AT_TOKEN:
return false;
case IN_KEYWORD:
return true;
default:
return !isTypeStartingToken(tokenKind);
}
}
/**
* Parse let variable declaration.
* <p>
* <code>let-var-decl := [annots] typed-binding-pattern = expression</code>
*
* @return Parsed node
*/
private STNode parseLetVarDecl(boolean isRhsExpr) {
STNode annot = parseOptionalAnnotations();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL);
STNode assign = parseAssignOp();
STNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false);
return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression);
}
/**
* Parse raw backtick string template expression.
* <p>
* <code>BacktickString := `expression`</code>
*
* @return Template expression node
*/
private STNode parseTemplateExpression() {
STNode type = STNodeFactory.createEmptyNode();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
private STNode parseTemplateContent() {
List<STNode> items = new ArrayList<>();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
items.add(contentItem);
nextToken = peek();
}
return STNodeFactory.createNodeList(items);
}
private boolean isEndOfBacktickContent(SyntaxKind kind) {
switch (kind) {
case EOF_TOKEN:
case BACKTICK_TOKEN:
return true;
default:
return false;
}
}
private STNode parseTemplateItem() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return parseInterpolation();
}
return consume();
}
/**
* Parse string template expression.
* <p>
* <code>string-template-expr := string ` expression `</code>
*
* @return String template expression node
*/
private STNode parseStringTemplateExpression() {
STNode type = parseStringKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
/**
* Parse <code>string</code> keyword.
*
* @return string keyword node
*/
private STNode parseStringKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.STRING_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.STRING_KEYWORD);
return parseStringKeyword();
}
}
/**
* Parse XML template expression.
* <p>
* <code>xml-template-expr := xml BacktickString</code>
*
* @return XML template expression
*/
private STNode parseXMLTemplateExpression() {
STNode xmlKeyword = parseXMLKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContentAsXML();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,
startingBackTick, content, endingBackTick);
}
/**
* Parse <code>xml</code> keyword.
*
* @return xml keyword node
*/
private STNode parseXMLKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XML_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.XML_KEYWORD);
return parseXMLKeyword();
}
}
/**
* Parse the content of the template string as XML. This method first read the
* input in the same way as the raw-backtick-template (BacktickString). Then
* it parses the content as XML.
*
* @return XML node
*/
private STNode parseTemplateContentAsXML() {
ArrayDeque<STNode> expressions = new ArrayDeque<>();
StringBuilder xmlStringBuilder = new StringBuilder();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {
xmlStringBuilder.append(((STToken) contentItem).text());
} else {
xmlStringBuilder.append("${}");
expressions.add(contentItem);
}
nextToken = peek();
}
CharReader charReader = CharReader.from(xmlStringBuilder.toString());
AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader));
XMLParser xmlParser = new XMLParser(tokenReader, expressions);
return xmlParser.parse();
}
/**
* Parse interpolation of a back-tick string.
* <p>
* <code>
* interpolation := ${ expression }
* </code>
*
* @return Interpolation node
*/
private STNode parseInterpolation() {
startContext(ParserRuleContext.INTERPOLATION);
STNode interpolStart = parseInterpolationStart();
STNode expr = parseExpression();
while (!isEndOfInterpolation()) {
STToken nextToken = consume();
expr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken,
DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text());
}
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);
}
private boolean isEndOfInterpolation() {
SyntaxKind nextTokenKind = peek().kind;
switch (nextTokenKind) {
case EOF_TOKEN:
case BACKTICK_TOKEN:
return true;
default:
ParserMode currentLexerMode = this.tokenReader.getCurrentMode();
return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN && currentLexerMode != ParserMode.INTERPOLATION &&
currentLexerMode != ParserMode.INTERPOLATION_BRACED_CONTENT;
}
}
/**
* Parse interpolation start token.
* <p>
* <code>interpolation-start := ${</code>
*
* @return Interpolation start token
*/
private STNode parseInterpolationStart() {
STToken token = peek();
if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);
return parseInterpolationStart();
}
}
/**
* Parse back-tick token.
*
* @return Back-tick token
*/
private STNode parseBacktickToken(ParserRuleContext ctx) {
STToken token = peek();
if (token.kind == SyntaxKind.BACKTICK_TOKEN) {
return consume();
} else {
recover(token, ctx);
return parseBacktickToken(ctx);
}
}
/**
* Parse table type descriptor.
* <p>
* table-type-descriptor := table row-type-parameter [key-constraint]
* row-type-parameter := type-parameter
* key-constraint := key-specifier | key-type-constraint
* key-specifier := key ( [ field-name (, field-name)* ] )
* key-type-constraint := key type-parameter
* </p>
*
* @return Parsed table type desc node.
*/
private STNode parseTableTypeDescriptor(STNode tableKeywordToken) {
STNode rowTypeParameterNode = parseRowTypeParameter();
STNode keyConstraintNode;
STToken nextToken = peek();
if (isKeyKeyword(nextToken)) {
STNode keyKeywordToken = getKeyKeyword(consume());
keyConstraintNode = parseKeyConstraint(keyKeywordToken);
} else {
keyConstraintNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);
}
/**
* Parse row type parameter node.
* <p>
* row-type-parameter := type-parameter
* </p>
*
* @return Parsed node.
*/
private STNode parseRowTypeParameter() {
startContext(ParserRuleContext.ROW_TYPE_PARAM);
STNode rowTypeParameterNode = parseTypeParameter();
endContext();
return rowTypeParameterNode;
}
/**
* Parse type parameter node.
* <p>
* type-parameter := < type-descriptor >
* </p>
*
* @return Parsed node
*/
private STNode parseTypeParameter() {
STNode ltToken = parseLTToken();
STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
STNode gtToken = parseGTToken();
return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);
}
/**
* Parse key constraint.
* <p>
* key-constraint := key-specifier | key-type-constraint
* </p>
*
* @return Parsed node.
*/
private STNode parseKeyConstraint(STNode keyKeywordToken) {
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
return parseKeySpecifier(keyKeywordToken);
case LT_TOKEN:
return parseKeyTypeConstraint(keyKeywordToken);
default:
recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS, keyKeywordToken);
return parseKeyConstraint(keyKeywordToken);
}
}
/**
* Parse key specifier given parsed key keyword token.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier(STNode keyKeywordToken) {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode openParenToken = parseOpenParenthesis();
STNode fieldNamesNode = parseFieldNames();
STNode closeParenToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);
}
/**
* Parse key type constraint.
* <p>
* key-type-constraint := key type-parameter
* </p>
*
* @return Parsed node
*/
private STNode parseKeyTypeConstraint(STNode keyKeywordToken) {
STNode typeParameterNode = parseTypeParameter();
return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);
}
/**
* Parse function type descriptor.
* <p>
* <code>function-type-descriptor := [isolated] function function-signature</code>
*
* @param qualifiers Preceding type descriptor qualifiers
* @return Function type descriptor node
*/
private STNode parseFunctionTypeDesc(List<STNode> qualifiers) {
startContext(ParserRuleContext.FUNC_TYPE_DESC);
STNode qualifierList;
STNode functionKeyword = parseFunctionKeyword();
STNode signature;
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
signature = parseFuncSignature(true);
qualifierList = createFuncTypeQualNodeList(qualifiers, true);
break;
default:
signature = STNodeFactory.createEmptyNode();
qualifierList = createFuncTypeQualNodeList(qualifiers, false);
break;
}
endContext();
return STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, signature);
}
private STNode createFuncTypeQualNodeList(List<STNode> qualifierList, boolean hasFuncSignature) {
List<STNode> validatedList = new ArrayList<>();
for (int i = 0; i < qualifierList.size(); i++) {
STNode qualifier = qualifierList.get(i);
int nextIndex = i + 1;
if (isSyntaxKindInList(validatedList, qualifier.kind)) {
updateLastNodeInListWithInvalidNode(validatedList, qualifier,
DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());
} else if (hasFuncSignature && isRegularFuncQual(qualifier.kind)) {
validatedList.add(qualifier);
} else if (qualifier.kind == SyntaxKind.ISOLATED_KEYWORD) {
validatedList.add(qualifier);
} else if (qualifierList.size() == nextIndex) {
addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED,
((STToken) qualifier).text());
} else {
updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
}
}
return STNodeFactory.createNodeList(validatedList);
}
private boolean isRegularFuncQual(SyntaxKind tokenKind) {
switch (tokenKind) {
case ISOLATED_KEYWORD:
case TRANSACTIONAL_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse explicit anonymous function expression.
* <p>
* <code>explicit-anonymous-function-expr :=
* [annots] (isolated| transactional) function function-signature anon-func-body</code>
*
* @param annots Annotations.
* @param qualifiers Function qualifiers
* @param isRhsExpr Is expression in rhs context
* @return Anonymous function expression node
*/
private STNode parseExplicitFunctionExpression(STNode annots, List<STNode> qualifiers, boolean isRhsExpr) {
startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);
STNode qualifierList = createFuncTypeQualNodeList(qualifiers, true);
STNode funcKeyword = parseFunctionKeyword();
STNode funcSignature = parseFuncSignature(false);
STNode funcBody = parseAnonFuncBody(isRhsExpr);
return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, funcKeyword,
funcSignature, funcBody);
}
/**
* Parse anonymous function body.
* <p>
* <code>anon-func-body := block-function-body | expr-function-body</code>
*
* @param isRhsExpr Is expression in rhs context
* @return Anon function body node
*/
private STNode parseAnonFuncBody(boolean isRhsExpr) {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
case EOF_TOKEN:
STNode body = parseFunctionBodyBlock(true);
endContext();
return body;
case RIGHT_DOUBLE_ARROW_TOKEN:
endContext();
return parseExpressionFuncBody(true, isRhsExpr);
default:
recover(peek(), ParserRuleContext.ANON_FUNC_BODY, isRhsExpr);
return parseAnonFuncBody(isRhsExpr);
}
}
/**
* Parse expression function body.
* <p>
* <code>expr-function-body := => expression</code>
*
* @param isAnon Is anonymous function.
* @param isRhsExpr Is expression in rhs context
* @return Expression function body node
*/
private STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) {
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode semiColon;
if (isAnon) {
semiColon = STNodeFactory.createEmptyNode();
} else {
semiColon = parseSemicolon();
}
return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon);
}
/**
* Parse '=>' token.
*
* @return Double right arrow token
*/
private STNode parseDoubleRightArrow() {
STToken token = peek();
if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.EXPR_FUNC_BODY_START);
return parseDoubleRightArrow();
}
}
private STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) {
switch (params.kind) {
case SIMPLE_NAME_REFERENCE:
case INFER_PARAM_LIST:
break;
case BRACED_EXPRESSION:
params = getAnonFuncParam((STBracedExpressionNode) params);
break;
default:
STToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
syntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params,
DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);
params = STNodeFactory.createSimpleNameReferenceNode(syntheticParam);
}
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression);
}
/**
* Create a new anon-func-param node from a braced expression.
*
* @param bracedExpression Braced expression
* @return Anon-func param node
*/
private STNode getAnonFuncParam(STBracedExpressionNode bracedExpression) {
List<STNode> paramList = new ArrayList<>();
STNode innerExpression = bracedExpression.expression;
STNode openParen = bracedExpression.openParen;
if (innerExpression.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
paramList.add(innerExpression);
} else {
openParen = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openParen, innerExpression,
DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);
}
return STNodeFactory.createImplicitAnonymousFunctionParameters(openParen,
STNodeFactory.createNodeList(paramList), bracedExpression.closeParen);
}
/**
* Parse implicit anon function expression.
*
* @param openParen Open parenthesis token
* @param firstParam First parameter
* @param isRhsExpr Is expression in rhs context
* @return Implicit anon function expression node
*/
private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) {
List<STNode> paramList = new ArrayList<>();
paramList.add(firstParam);
STToken nextToken = peek();
STNode paramEnd;
STNode param;
while (!isEndOfAnonFuncParametersList(nextToken.kind)) {
paramEnd = parseImplicitAnonFuncParamEnd();
if (paramEnd == null) {
break;
}
paramList.add(paramEnd);
param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM);
param = STNodeFactory.createSimpleNameReferenceNode(param);
paramList.add(param);
nextToken = peek();
}
STNode params = STNodeFactory.createNodeList(paramList);
STNode closeParen = parseCloseParenthesis();
endContext();
STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);
return parseImplicitAnonFunc(inferedParams, isRhsExpr);
}
private STNode parseImplicitAnonFuncParamEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS);
return parseImplicitAnonFuncParamEnd();
}
}
private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case RETURNS_KEYWORD:
case TYPE_KEYWORD:
case LISTENER_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case DO_KEYWORD:
case OPEN_BRACE_TOKEN:
case RIGHT_DOUBLE_ARROW_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse tuple type descriptor.
* <p>
* <code>tuple-type-descriptor := [ tuple-member-type-descriptors ]
* <br/><br/>
* tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]
* | [ tuple-rest-descriptor ]
* <br/><br/>
* tuple-rest-descriptor := type-descriptor ...
* </code>
*
* @return
*/
private STNode parseTupleTypeDesc() {
STNode openBracket = parseOpenBracket();
startContext(ParserRuleContext.TYPE_DESC_IN_TUPLE);
STNode memberTypeDesc = parseTupleMemberTypeDescList();
STNode closeBracket = parseCloseBracket();
endContext();
openBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket,
DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC);
return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket);
}
/**
* Parse tuple member type descriptors.
*
* @return Parsed node
*/
private STNode parseTupleMemberTypeDescList() {
List<STNode> typeDescList = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfTypeList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
return parseTupleTypeMembers(typeDesc, typeDescList);
}
private STNode parseTupleTypeMembers(STNode typeDesc, List<STNode> typeDescList) {
STNode tupleMemberRhs = parseTypeDescInTupleRhs();
if (tupleMemberRhs != null) {
typeDesc = STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);
}
while (!isEndOfTypeList(peek().kind)) {
if (typeDesc.kind == SyntaxKind.REST_TYPE) {
typeDesc = invalidateTypeDescAfterRestDesc(typeDesc);
break;
}
tupleMemberRhs = parseTupleMemberRhs();
if (tupleMemberRhs == null) {
break;
}
typeDescList.add(typeDesc);
typeDescList.add(tupleMemberRhs);
typeDesc = parseMemberDescriptor();
}
typeDescList.add(typeDesc);
return STNodeFactory.createNodeList(typeDescList);
}
private STNode parseMemberDescriptor() {
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
STNode tupleMemberRhs = parseTypeDescInTupleRhs();
if (tupleMemberRhs != null) {
return STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);
}
return typeDesc;
}
private STNode invalidateTypeDescAfterRestDesc(STNode restDescriptor) {
while (!isEndOfTypeList(peek().kind)) {
STNode tupleMemberRhs = parseTupleMemberRhs();
if (tupleMemberRhs == null) {
break;
}
restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, tupleMemberRhs, null);
restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, parseMemberDescriptor(),
DiagnosticErrorCode.ERROR_TYPE_DESC_AFTER_REST_DESCRIPTOR);
}
return restDescriptor;
}
private STNode parseTupleMemberRhs() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.TUPLE_TYPE_MEMBER_RHS);
return parseTupleMemberRhs();
}
}
private STNode parseTypeDescInTupleRhs() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
case CLOSE_BRACKET_TOKEN:
return null;
case ELLIPSIS_TOKEN:
return parseEllipsis();
default:
recover(nextToken, ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS);
return parseTypeDescInTupleRhs();
}
}
private boolean isEndOfTypeList(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case CLOSE_BRACKET_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case EOF_TOKEN:
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse table constructor or query expression.
* <p>
* <code>
* table-constructor-or-query-expr := table-constructor-expr | query-expr
* <br/>
* table-constructor-expr := table [key-specifier] [ [row-list] ]
* <br/>
* query-expr := [query-construct-type] query-pipeline select-clause
* [query-construct-type] query-pipeline select-clause on-conflict-clause?
* <br/>
* query-construct-type := table key-specifier | stream
* </code>
*
* @return Parsed node
*/
private STNode parseTableConstructorOrQuery(boolean isRhsExpr) {
startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);
STNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr);
endContext();
return tableOrQueryExpr;
}
private STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr) {
STNode queryConstructType;
switch (peek().kind) {
case FROM_KEYWORD:
queryConstructType = STNodeFactory.createEmptyNode();
return parseQueryExprRhs(queryConstructType, isRhsExpr);
case STREAM_KEYWORD:
queryConstructType = parseQueryConstructType(parseStreamKeyword(), null);
return parseQueryExprRhs(queryConstructType, isRhsExpr);
case TABLE_KEYWORD:
STNode tableKeyword = parseTableKeyword();
return parseTableConstructorOrQuery(tableKeyword, isRhsExpr);
default:
recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START, isRhsExpr);
return parseTableConstructorOrQueryInternal(isRhsExpr);
}
}
private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) {
STNode keySpecifier;
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
keySpecifier = STNodeFactory.createEmptyNode();
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
case KEY_KEYWORD:
keySpecifier = parseKeySpecifier();
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);
case IDENTIFIER_TOKEN:
if (isKeyKeyword(nextToken)) {
keySpecifier = parseKeySpecifier();
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);
}
break;
default:
break;
}
recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS, tableKeyword, isRhsExpr);
return parseTableConstructorOrQuery(tableKeyword, isRhsExpr);
}
private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) {
switch (peek().kind) {
case FROM_KEYWORD:
return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr);
case OPEN_BRACKET_TOKEN:
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
default:
recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS, tableKeyword, keySpecifier,
isRhsExpr);
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);
}
}
/**
* Parse query construct type.
* <p>
* <code>query-construct-type := table key-specifier | stream</code>
*
* @return Parsed node
*/
private STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) {
return STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier);
}
/**
* Parse query action or expression.
* <p>
* <code>
* query-expr-rhs := query-pipeline select-clause
* query-pipeline select-clause on-conflict-clause?
* <br/>
* query-pipeline := from-clause intermediate-clause*
* </code>
*
* @param queryConstructType queryConstructType that precedes this rhs
* @return Parsed node
*/
private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) {
switchContext(ParserRuleContext.QUERY_EXPRESSION);
STNode fromClause = parseFromClause(isRhsExpr);
List<STNode> clauses = new ArrayList<>();
STNode intermediateClause;
STNode selectClause = null;
while (!isEndOfIntermediateClause(peek().kind)) {
intermediateClause = parseIntermediateClause(isRhsExpr);
if (intermediateClause == null) {
break;
}
if (selectClause != null) {
selectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause,
DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE);
continue;
}
if (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) {
selectClause = intermediateClause;
} else {
clauses.add(intermediateClause);
}
}
if (peek().kind == SyntaxKind.DO_KEYWORD) {
STNode intermediateClauses = STNodeFactory.createNodeList(clauses);
STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);
return parseQueryAction(queryConstructType, queryPipeline, selectClause, isRhsExpr);
}
if (selectClause == null) {
STNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD);
STNode expr = STNodeFactory
.createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));
selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr);
if (clauses.isEmpty()) {
fromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);
} else {
int lastIndex = clauses.size() - 1;
STNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex),
DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);
clauses.set(lastIndex, intClauseWithDiagnostic);
}
}
STNode intermediateClauses = STNodeFactory.createNodeList(clauses);
STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);
STNode onConflictClause = parseOnConflictClause(isRhsExpr);
return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause,
onConflictClause);
}
/**
* Parse an intermediate clause.
* <p>
* <code>
* intermediate-clause := from-clause | where-clause | let-clause | join-clause | limit-clause | order-by-clause
* </code>
*
* @return Parsed node
*/
private STNode parseIntermediateClause(boolean isRhsExpr) {
switch (peek().kind) {
case FROM_KEYWORD:
return parseFromClause(isRhsExpr);
case WHERE_KEYWORD:
return parseWhereClause(isRhsExpr);
case LET_KEYWORD:
return parseLetClause(isRhsExpr);
case SELECT_KEYWORD:
return parseSelectClause(isRhsExpr);
case JOIN_KEYWORD:
case OUTER_KEYWORD:
return parseJoinClause(isRhsExpr);
case ORDER_KEYWORD:
case BY_KEYWORD:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
return parseOrderByClause(isRhsExpr);
case LIMIT_KEYWORD:
return parseLimitClause(isRhsExpr);
case DO_KEYWORD:
case SEMICOLON_TOKEN:
case ON_KEYWORD:
case CONFLICT_KEYWORD:
return null;
default:
recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS, isRhsExpr);
return parseIntermediateClause(isRhsExpr);
}
}
/**
* Parse join-keyword.
*
* @return Join-keyword node
*/
private STNode parseJoinKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.JOIN_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.JOIN_KEYWORD);
return parseJoinKeyword();
}
}
/**
* Parse equals keyword.
*
* @return Parsed node
*/
private STNode parseEqualsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.EQUALS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.EQUALS_KEYWORD);
return parseEqualsKeyword();
}
}
private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) {
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case EOF_TOKEN:
case RESOURCE_KEYWORD:
case LISTENER_KEYWORD:
case DOCUMENTATION_STRING:
case PRIVATE_KEYWORD:
case RETURNS_KEYWORD:
case SERVICE_KEYWORD:
case TYPE_KEYWORD:
case CONST_KEYWORD:
case FINAL_KEYWORD:
case DO_KEYWORD:
return true;
default:
return isValidExprRhsStart(tokenKind, SyntaxKind.NONE);
}
}
/**
* Parse from clause.
* <p>
* <code>from-clause := from typed-binding-pattern in expression</code>
*
* @return Parsed node
*/
private STNode parseFromClause(boolean isRhsExpr) {
STNode fromKeyword = parseFromKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE);
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression);
}
/**
* Parse from-keyword.
*
* @return From-keyword node
*/
private STNode parseFromKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FROM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FROM_KEYWORD);
return parseFromKeyword();
}
}
/**
* Parse where clause.
* <p>
* <code>where-clause := where expression</code>
*
* @return Parsed node
*/
private STNode parseWhereClause(boolean isRhsExpr) {
STNode whereKeyword = parseWhereKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createWhereClauseNode(whereKeyword, expression);
}
/**
* Parse where-keyword.
*
* @return Where-keyword node
*/
private STNode parseWhereKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WHERE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.WHERE_KEYWORD);
return parseWhereKeyword();
}
}
/**
* Parse limit-keyword.
*
* @return limit-keyword node
*/
private STNode parseLimitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LIMIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LIMIT_KEYWORD);
return parseLimitKeyword();
}
}
/**
* Parse let clause.
* <p>
* <code>let-clause := let let-var-decl [, let-var-decl]* </code>
*
* @return Parsed node
*/
private STNode parseLetClause(boolean isRhsExpr) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr);
letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,
DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);
return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);
}
/**
* Parse order-keyword.
*
* @return Order-keyword node
*/
private STNode parseOrderKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ORDER_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ORDER_KEYWORD);
return parseOrderKeyword();
}
}
/**
* Parse by-keyword.
*
* @return By-keyword node
*/
private STNode parseByKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BY_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BY_KEYWORD);
return parseByKeyword();
}
}
/**
* Parse order by clause.
* <p>
* <code>order-by-clause := order by order-key-list
* </code>
*
* @return Parsed node
*/
private STNode parseOrderByClause(boolean isRhsExpr) {
STNode orderKeyword = parseOrderKeyword();
STNode byKeyword = parseByKeyword();
STNode orderKeys = parseOrderKeyList(isRhsExpr);
byKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY);
return STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys);
}
/**
* Parse order key.
* <p>
* <code>order-key-list := order-key [, order-key]*</code>
*
* @return Parsed node
*/
private STNode parseOrderKeyList(boolean isRhsExpr) {
startContext(ParserRuleContext.ORDER_KEY_LIST);
List<STNode> orderKeys = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfOrderKeys(nextToken.kind)) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode orderKey = parseOrderKey(isRhsExpr);
orderKeys.add(orderKey);
nextToken = peek();
STNode orderKeyListMemberEnd;
while (!isEndOfOrderKeys(nextToken.kind)) {
orderKeyListMemberEnd = parseOrderKeyListMemberEnd();
if (orderKeyListMemberEnd == null) {
break;
}
orderKeys.add(orderKeyListMemberEnd);
orderKey = parseOrderKey(isRhsExpr);
orderKeys.add(orderKey);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(orderKeys);
}
private boolean isEndOfOrderKeys(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
return false;
case SEMICOLON_TOKEN:
case EOF_TOKEN:
return true;
default:
return isQueryClauseStartToken(tokenKind);
}
}
private boolean isQueryClauseStartToken(SyntaxKind tokenKind) {
switch (tokenKind) {
case SELECT_KEYWORD:
case LET_KEYWORD:
case WHERE_KEYWORD:
case OUTER_KEYWORD:
case JOIN_KEYWORD:
case ORDER_KEYWORD:
case DO_KEYWORD:
case FROM_KEYWORD:
case LIMIT_KEYWORD:
return true;
default:
return false;
}
}
private STNode parseOrderKeyListMemberEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return parseComma();
case EOF_TOKEN:
return null;
default:
if (isQueryClauseStartToken(nextToken.kind)) {
return null;
}
recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END);
return parseOrderKeyListMemberEnd();
}
}
/**
* Parse order key.
* <p>
* <code>order-key := expression (ascending | descending)?</code>
*
* @return Parsed node
*/
private STNode parseOrderKey(boolean isRhsExpr) {
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode orderDirection;
STToken nextToken = peek();
switch (nextToken.kind) {
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
orderDirection = consume();
break;
default:
orderDirection = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createOrderKeyNode(expression, orderDirection);
}
/**
* Parse select clause.
* <p>
* <code>select-clause := select expression</code>
*
* @return Parsed node
*/
private STNode parseSelectClause(boolean isRhsExpr) {
startContext(ParserRuleContext.SELECT_CLAUSE);
STNode selectKeyword = parseSelectKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
endContext();
return STNodeFactory.createSelectClauseNode(selectKeyword, expression);
}
/**
* Parse select-keyword.
*
* @return Select-keyword node
*/
private STNode parseSelectKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SELECT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.SELECT_KEYWORD);
return parseSelectKeyword();
}
}
/**
* Parse on-conflict clause.
* <p>
* <code>
* onConflictClause := on conflict expression
* </code>
*
* @return On conflict clause node
*/
private STNode parseOnConflictClause(boolean isRhsExpr) {
STToken nextToken = peek();
if (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
startContext(ParserRuleContext.ON_CONFLICT_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode conflictKeyword = parseConflictKeyword();
endContext();
STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr);
}
/**
* Parse conflict keyword.
*
* @return Conflict keyword node
*/
private STNode parseConflictKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONFLICT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.CONFLICT_KEYWORD);
return parseConflictKeyword();
}
}
/**
* Parse limit clause.
* <p>
* <code>limitClause := limit expression</code>
*
* @return Limit expression node
*/
private STNode parseLimitClause(boolean isRhsExpr) {
STNode limitKeyword = parseLimitKeyword();
STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createLimitClauseNode(limitKeyword, expr);
}
/**
* Parse join clause.
* <p>
* <code>
* join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause
* <br/>
* join-var-decl := join (typeName | var) bindingPattern
* <br/>
* outer-join-var-decl := outer join var binding-pattern
* </code>
*
* @return Join clause
*/
private STNode parseJoinClause(boolean isRhsExpr) {
startContext(ParserRuleContext.JOIN_CLAUSE);
STNode outerKeyword;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.OUTER_KEYWORD) {
outerKeyword = consume();
} else {
outerKeyword = STNodeFactory.createEmptyNode();
}
STNode joinKeyword = parseJoinKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE);
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
endContext();
STNode onCondition = parseOnClause(isRhsExpr);
return STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression,
onCondition);
}
/**
* Parse on clause.
* <p>
* <code>on clause := `on` expression `equals` expression</code>
*
* @return On clause node
*/
private STNode parseOnClause(boolean isRhsExpr) {
STToken nextToken = peek();
if (isQueryClauseStartToken(nextToken.kind)) {
return createMissingOnClauseNode();
}
startContext(ParserRuleContext.ON_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode equalsKeyword = parseEqualsKeyword();
endContext();
STNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);
}
private STNode createMissingOnClauseNode() {
STNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD,
DiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD);
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);
STNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD,
DiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD);
STNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);
STNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);
return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);
}
/**
* Parse start action.
* <p>
* <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code>
*
* @return Start action node
*/
private STNode parseStartAction(STNode annots) {
STNode startKeyword = parseStartKeyword();
STNode expr = parseActionOrExpression();
switch (expr.kind) {
case FUNCTION_CALL:
case METHOD_CALL:
case REMOTE_METHOD_CALL_ACTION:
break;
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
STNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN);
STNode arguments = STNodeFactory.createEmptyNodeList();
STNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN);
expr = STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken);
break;
default:
startKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr,
DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION);
STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
funcName = STNodeFactory.createSimpleNameReferenceNode(funcName);
openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN);
arguments = STNodeFactory.createEmptyNodeList();
closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN);
expr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken, arguments,
closeParenToken);
break;
}
return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr);
}
/**
* Parse start keyword.
*
* @return Start keyword node
*/
private STNode parseStartKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.START_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.START_KEYWORD);
return parseStartKeyword();
}
}
/**
* Parse flush action.
* <p>
* <code>flush-action := flush [peer-worker]</code>
*
* @return flush action node
*/
private STNode parseFlushAction() {
STNode flushKeyword = parseFlushKeyword();
STNode peerWorker = parseOptionalPeerWorkerName();
return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker);
}
/**
* Parse flush keyword.
*
* @return flush keyword node
*/
private STNode parseFlushKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FLUSH_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FLUSH_KEYWORD);
return parseFlushKeyword();
}
}
/**
* Parse peer worker.
* <p>
* <code>peer-worker := worker-name | function</code>
*
* @return peer worker name node
*/
private STNode parseOptionalPeerWorkerName() {
STToken token = peek();
switch (token.kind) {
case IDENTIFIER_TOKEN:
case FUNCTION_KEYWORD:
return STNodeFactory.createSimpleNameReferenceNode(consume());
default:
return STNodeFactory.createEmptyNode();
}
}
/**
* Parse intersection type descriptor.
* <p>
* intersection-type-descriptor := type-descriptor & type-descriptor
* </p>
*
* @return Parsed node
*/
private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,
boolean isTypedBindingPattern) {
STNode bitwiseAndToken = consume();
STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false);
return createIntersectionTypeDesc(leftTypeDesc, bitwiseAndToken, rightTypeDesc);
}
private STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) {
leftTypeDesc = validateForUsageOfVar(leftTypeDesc);
rightTypeDesc = validateForUsageOfVar(rightTypeDesc);
return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc);
}
/**
* Parse singleton type descriptor.
* <p>
* singleton-type-descriptor := simple-const-expr
* simple-const-expr :=
* nil-literal
* | boolean-literal
* | [Sign] int-literal
* | [Sign] floating-point-literal
* | string-literal
* | constant-reference-expr
* </p>
*/
private STNode parseSingletonTypeDesc() {
STNode simpleContExpr = parseSimpleConstExpr();
return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr);
}
private STNode parseSignedIntOrFloat() {
STNode operator = parseUnaryOperator();
STNode literal;
STToken nextToken = peek();
switch (nextToken.kind) {
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
literal = parseBasicLiteral();
break;
default:
literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN);
literal = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal);
}
return STNodeFactory.createUnaryExpressionNode(operator, literal);
}
private boolean isSingletonTypeDescStart(SyntaxKind tokenKind, boolean inTypeDescCtx) {
STToken nextNextToken = getNextNextToken();
switch (tokenKind) {
case STRING_LITERAL_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
if (inTypeDescCtx || isValidTypeDescRHSOutSideTypeDescCtx(nextNextToken)) {
return true;
}
return false;
case PLUS_TOKEN:
case MINUS_TOKEN:
return isIntOrFloat(nextNextToken);
default:
return false;
}
}
static boolean isIntOrFloat(STToken token) {
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
return true;
default:
return false;
}
}
private boolean isValidTypeDescRHSOutSideTypeDescCtx(STToken token) {
switch (token.kind) {
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
case OPEN_PAREN_TOKEN:
case OPEN_BRACKET_TOKEN:
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return true;
default:
return false;
}
}
/**
* Check whether the parser reached to a valid expression start.
*
* @param nextTokenKind Kind of the next immediate token.
* @param nextTokenIndex Index to the next token.
* @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise
*/
private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) {
nextTokenIndex++;
switch (nextTokenKind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
SyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind;
return nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN || nextNextTokenKind == SyntaxKind.COMMA_TOKEN ||
nextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN ||
isValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE);
case IDENTIFIER_TOKEN:
return isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE);
case OPEN_PAREN_TOKEN:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case OPEN_BRACE_TOKEN:
case TYPEOF_KEYWORD:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
case TRAP_KEYWORD:
case OPEN_BRACKET_TOKEN:
case LT_TOKEN:
case FROM_KEYWORD:
case LET_KEYWORD:
case BACKTICK_TOKEN:
case NEW_KEYWORD:
case LEFT_ARROW_TOKEN:
case FUNCTION_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case ISOLATED_KEYWORD:
return true;
case PLUS_TOKEN:
case MINUS_TOKEN:
return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);
case TABLE_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD;
case STREAM_KEYWORD:
STToken nextNextToken = peek(nextTokenIndex);
return nextNextToken.kind == SyntaxKind.KEY_KEYWORD ||
nextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN ||
nextNextToken.kind == SyntaxKind.FROM_KEYWORD;
case ERROR_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN;
case XML_KEYWORD:
case STRING_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN;
case START_KEYWORD:
case FLUSH_KEYWORD:
case WAIT_KEYWORD:
default:
return false;
}
}
/**
* Parse sync send action.
* <p>
* <code>sync-send-action := expression ->> peer-worker</code>
*
* @param expression LHS expression of the sync send action
* @return Sync send action node
*/
private STNode parseSyncSendAction(STNode expression) {
STNode syncSendToken = parseSyncSendToken();
STNode peerWorker = parsePeerWorkerName();
return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker);
}
/**
* Parse peer worker.
* <p>
* <code>peer-worker := worker-name | function</code>
*
* @return peer worker name node
*/
private STNode parsePeerWorkerName() {
STToken token = peek();
switch (token.kind) {
case IDENTIFIER_TOKEN:
case FUNCTION_KEYWORD:
return STNodeFactory.createSimpleNameReferenceNode(consume());
default:
recover(token, ParserRuleContext.PEER_WORKER_NAME);
return parsePeerWorkerName();
}
}
/**
* Parse sync send token.
* <p>
* <code>sync-send-token := ->> </code>
*
* @return sync send token
*/
private STNode parseSyncSendToken() {
STToken token = peek();
if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.SYNC_SEND_TOKEN);
return parseSyncSendToken();
}
}
/**
* Parse receive action.
* <p>
* <code>receive-action := single-receive-action | multiple-receive-action</code>
*
* @return Receive action
*/
private STNode parseReceiveAction() {
STNode leftArrow = parseLeftArrowToken();
STNode receiveWorkers = parseReceiveWorkers();
return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers);
}
private STNode parseReceiveWorkers() {
switch (peek().kind) {
case FUNCTION_KEYWORD:
case IDENTIFIER_TOKEN:
return parsePeerWorkerName();
case OPEN_BRACE_TOKEN:
return parseMultipleReceiveWorkers();
default:
recover(peek(), ParserRuleContext.RECEIVE_WORKERS);
return parseReceiveWorkers();
}
}
/**
* Parse multiple worker receivers.
* <p>
* <code>{ receive-field (, receive-field)* }</code>
*
* @return Multiple worker receiver node
*/
private STNode parseMultipleReceiveWorkers() {
startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS);
STNode openBrace = parseOpenBrace();
STNode receiveFields = parseReceiveFields();
STNode closeBrace = parseCloseBrace();
endContext();
openBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace,
DiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION);
return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace);
}
private STNode parseReceiveFields() {
List<STNode> receiveFields = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfReceiveFields(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode receiveField = parseReceiveField();
receiveFields.add(receiveField);
nextToken = peek();
STNode recieveFieldEnd;
while (!isEndOfReceiveFields(nextToken.kind)) {
recieveFieldEnd = parseReceiveFieldEnd();
if (recieveFieldEnd == null) {
break;
}
receiveFields.add(recieveFieldEnd);
receiveField = parseReceiveField();
receiveFields.add(receiveField);
nextToken = peek();
}
return STNodeFactory.createNodeList(receiveFields);
}
private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
default:
return false;
}
}
private STNode parseReceiveFieldEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.RECEIVE_FIELD_END);
return parseReceiveFieldEnd();
}
}
/**
* Parse receive field.
* <p>
* <code>receive-field := peer-worker | field-name : peer-worker</code>
*
* @return Receiver field node
*/
private STNode parseReceiveField() {
switch (peek().kind) {
case FUNCTION_KEYWORD:
STNode functionKeyword = consume();
return STNodeFactory.createSimpleNameReferenceNode(functionKeyword);
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME);
return createQualifiedReceiveField(identifier);
default:
recover(peek(), ParserRuleContext.RECEIVE_FIELD);
return parseReceiveField();
}
}
private STNode createQualifiedReceiveField(STNode identifier) {
if (peek().kind != SyntaxKind.COLON_TOKEN) {
return identifier;
}
STNode colon = parseColon();
STNode peerWorker = parsePeerWorkerName();
return STNodeFactory.createQualifiedNameReferenceNode(identifier, colon, peerWorker);
}
/**
* Parse left arrow (<-) token.
*
* @return left arrow token
*/
private STNode parseLeftArrowToken() {
STToken token = peek();
if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.LEFT_ARROW_TOKEN);
return parseLeftArrowToken();
}
}
/**
* Parse signed right shift token (>>).
*
* @return Parsed node
*/
private STNode parseSignedRightShiftToken() {
STNode openGTToken = consume();
STToken endLGToken = consume();
STNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, openGTToken.leadingMinutiae(),
endLGToken.trailingMinutiae());
if (hasTrailingMinutiae(openGTToken)) {
doubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken,
DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP);
}
return doubleGTToken;
}
/**
* Parse unsigned right shift token (>>>).
*
* @return Parsed node
*/
private STNode parseUnsignedRightShiftToken() {
STNode openGTToken = consume();
STNode middleGTToken = consume();
STNode endLGToken = consume();
STNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN,
openGTToken.leadingMinutiae(), endLGToken.trailingMinutiae());
boolean validOpenGTToken = !hasTrailingMinutiae(openGTToken);
boolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken);
if (validOpenGTToken && validMiddleGTToken) {
return unsignedRightShiftToken;
}
unsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken,
DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP);
return unsignedRightShiftToken;
}
/**
* Parse wait action.
* <p>
* <code>wait-action := single-wait-action | multiple-wait-action | alternate-wait-action </code>
*
* @return Wait action node
*/
private STNode parseWaitAction() {
STNode waitKeyword = parseWaitKeyword();
if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {
return parseMultiWaitAction(waitKeyword);
}
return parseSingleOrAlternateWaitAction(waitKeyword);
}
/**
* Parse wait keyword.
*
* @return wait keyword
*/
private STNode parseWaitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WAIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.WAIT_KEYWORD);
return parseWaitKeyword();
}
}
/**
* Parse single or alternate wait actions.
* <p>
* <code>
* alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+
* <br/>
* wait-future-expr := expression but not mapping-constructor-expr
* </code>
*
* @param waitKeyword wait keyword
* @return Single or alternate wait action node
*/
private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) {
startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS);
STToken nextToken = peek();
if (isEndOfWaitFutureExprList(nextToken.kind)) {
endContext();
STNode waitFutureExprs = STNodeFactory
.createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));
waitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs,
DiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION);
return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs);
}
List<STNode> waitFutureExprList = new ArrayList<>();
STNode waitField = parseWaitFutureExpr();
waitFutureExprList.add(waitField);
nextToken = peek();
STNode waitFutureExprEnd;
while (!isEndOfWaitFutureExprList(nextToken.kind)) {
waitFutureExprEnd = parseWaitFutureExprEnd();
if (waitFutureExprEnd == null) {
break;
}
waitFutureExprList.add(waitFutureExprEnd);
waitField = parseWaitFutureExpr();
waitFutureExprList.add(waitField);
nextToken = peek();
}
endContext();
return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0));
}
private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case OPEN_BRACE_TOKEN:
return true;
case PIPE_TOKEN:
default:
return false;
}
}
private STNode parseWaitFutureExpr() {
STNode waitFutureExpr = parseActionOrExpression();
if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) {
waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr,
DiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR);
} else if (isAction(waitFutureExpr)) {
waitFutureExpr =
SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR);
}
return waitFutureExpr;
}
private STNode parseWaitFutureExprEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
return parsePipeToken();
default:
if (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) {
return null;
}
recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END);
return parseWaitFutureExprEnd();
}
}
/**
* Parse multiple wait action.
* <p>
* <code>multiple-wait-action := wait { wait-field (, wait-field)* }</code>
*
* @param waitKeyword Wait keyword
* @return Multiple wait action node
*/
private STNode parseMultiWaitAction(STNode waitKeyword) {
startContext(ParserRuleContext.MULTI_WAIT_FIELDS);
STNode openBrace = parseOpenBrace();
STNode waitFields = parseWaitFields();
STNode closeBrace = parseCloseBrace();
endContext();
openBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace,
DiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION);
STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace);
return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode);
}
private STNode parseWaitFields() {
List<STNode> waitFields = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfWaitFields(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode waitField = parseWaitField();
waitFields.add(waitField);
nextToken = peek();
STNode waitFieldEnd;
while (!isEndOfWaitFields(nextToken.kind)) {
waitFieldEnd = parseWaitFieldEnd();
if (waitFieldEnd == null) {
break;
}
waitFields.add(waitFieldEnd);
waitField = parseWaitField();
waitFields.add(waitField);
nextToken = peek();
}
return STNodeFactory.createNodeList(waitFields);
}
private boolean isEndOfWaitFields(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
default:
return false;
}
}
private STNode parseWaitFieldEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.WAIT_FIELD_END);
return parseWaitFieldEnd();
}
}
/**
* Parse wait field.
* <p>
* <code>wait-field := variable-name | field-name : wait-future-expr</code>
*
* @return Receiver field node
*/
private STNode parseWaitField() {
switch (peek().kind) {
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME);
identifier = STNodeFactory.createSimpleNameReferenceNode(identifier);
return createQualifiedWaitField(identifier);
default:
recover(peek(), ParserRuleContext.WAIT_FIELD_NAME);
return parseWaitField();
}
}
private STNode createQualifiedWaitField(STNode identifier) {
if (peek().kind != SyntaxKind.COLON_TOKEN) {
return identifier;
}
STNode colon = parseColon();
STNode waitFutureExpr = parseWaitFutureExpr();
return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr);
}
/**
* Parse annot access expression.
* <p>
* <code>
* annot-access-expr := expression .@ annot-tag-reference
* <br/>
* annot-tag-reference := qualified-identifier | identifier
* </code>
*
* @param lhsExpr Preceding expression of the annot access access
* @return Parsed node
*/
private STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode annotAccessToken = parseAnnotChainingToken();
STNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr);
return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference);
}
/**
* Parse annot-chaining-token.
*
* @return Parsed node
*/
private STNode parseAnnotChainingToken() {
STToken token = peek();
if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN);
return parseAnnotChainingToken();
}
}
/**
* Parse field access identifier.
* <p>
* <code>field-access-identifier := qualified-identifier | identifier</code>
*
* @return Parsed node
*/
private STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) {
return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr);
}
/**
* Parse query action.
* <p>
* <code>query-action := query-pipeline do-clause
* <br/>
* do-clause := do block-stmt
* </code>
*
* @param queryConstructType Query construct type. This is only for validation
* @param queryPipeline Query pipeline
* @param selectClause Select clause if any This is only for validation.
* @return Query action node
*/
private STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause,
boolean isRhsExpr) {
if (queryConstructType != null) {
queryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType,
DiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION);
}
if (selectClause != null) {
queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause,
DiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION);
}
startContext(ParserRuleContext.DO_CLAUSE);
STNode doKeyword = parseDoKeyword();
STNode blockStmt = parseBlockNode();
endContext();
return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt);
}
/**
* Parse 'do' keyword.
*
* @return do keyword node
*/
private STNode parseDoKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.DO_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.DO_KEYWORD);
return parseDoKeyword();
}
}
/**
* Parse optional field access or xml optional attribute access expression.
* <p>
* <code>
* optional-field-access-expr := expression ?. field-name
* <br/>
* xml-optional-attribute-access-expr := expression ?. xml-attribute-name
* <br/>
* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier
* <br/>
* xml-qualified-name := xml-namespace-prefix : identifier
* <br/>
* xml-namespace-prefix := identifier
* </code>
*
* @param lhsExpr Preceding expression of the optional access
* @return Parsed node
*/
private STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode optionalFieldAccessToken = parseOptionalChainingToken();
STNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr);
return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName);
}
/**
* Parse optional chaining token.
*
* @return parsed node
*/
private STNode parseOptionalChainingToken() {
STToken token = peek();
if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN);
return parseOptionalChainingToken();
}
}
/**
* Parse conditional expression.
* <p>
* <code>conditional-expr := expression ? expression : expression</code>
*
* @param lhsExpr Preceding expression of the question mark
* @return Parsed node
*/
private STNode parseConditionalExpression(STNode lhsExpr) {
startContext(ParserRuleContext.CONDITIONAL_EXPRESSION);
STNode questionMark = parseQuestionMark();
STNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true);
STNode nextToken = peek();
STNode endExpr;
STNode colon;
if (nextToken.kind != SyntaxKind.COLON_TOKEN && middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) middleExpr;
STNode modulePrefix = qualifiedNameRef.modulePrefix;
if (modulePrefix.kind == SyntaxKind.IDENTIFIER_TOKEN) {
middleExpr = STNodeFactory.createSimpleNameReferenceNode(modulePrefix);
} else {
middleExpr = modulePrefix;
}
colon = qualifiedNameRef.colon;
endContext();
endExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);
} else {
if (middleExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
middleExpr = generateQualifiedNameRef(middleExpr);
}
colon = parseColon();
endContext();
endExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false);
}
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr);
}
private STNode generateQualifiedNameRef(STNode qualifiedName) {
STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) qualifiedName;
STNode modulePrefix = qualifiedNameRef.modulePrefix;
if (modulePrefix.kind != SyntaxKind.IDENTIFIER_TOKEN) {
STBuiltinSimpleNameReferenceNode builtInType = (STBuiltinSimpleNameReferenceNode) modulePrefix;
STToken nameToken = (STToken) builtInType.name;
STNode preDeclaredPrefix = STNodeFactory.createIdentifierToken(nameToken.text(),
nameToken.leadingMinutiae(), nameToken.trailingMinutiae());
return STNodeFactory.createQualifiedNameReferenceNode(preDeclaredPrefix,
qualifiedNameRef.colon, qualifiedNameRef.identifier);
} else {
return qualifiedName;
}
}
/**
* Parse enum declaration.
* <p>
* module-enum-decl :=
* metadata
* [public] enum identifier { enum-member (, enum-member)* }
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @param metadata
* @param qualifier
* @return Parsed enum node.
*/
private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.MODULE_ENUM_DECLARATION);
STNode enumKeywordToken = parseEnumKeyword();
STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME);
STNode openBraceToken = parseOpenBrace();
STNode enumMemberList = parseEnumMemberList();
STNode closeBraceToken = parseCloseBrace();
endContext();
openBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken,
DiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER);
return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier,
openBraceToken, enumMemberList, closeBraceToken);
}
/**
* Parse 'enum' keyword.
*
* @return enum keyword node
*/
private STNode parseEnumKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ENUM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ENUM_KEYWORD);
return parseEnumKeyword();
}
}
/**
* Parse enum member list.
* <p>
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @return enum member list node.
*/
private STNode parseEnumMemberList() {
startContext(ParserRuleContext.ENUM_MEMBER_LIST);
if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> enumMemberList = new ArrayList<>();
STNode enumMember = parseEnumMember();
STNode enumMemberRhs;
while (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) {
enumMemberRhs = parseEnumMemberEnd();
if (enumMemberRhs == null) {
break;
}
enumMemberList.add(enumMember);
enumMemberList.add(enumMemberRhs);
enumMember = parseEnumMember();
}
enumMemberList.add(enumMember);
endContext();
return STNodeFactory.createNodeList(enumMemberList);
}
/**
* Parse enum member.
* <p>
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @return Parsed enum member node.
*/
private STNode parseEnumMember() {
STNode metadata;
switch (peek().kind) {
case DOCUMENTATION_STRING:
case AT_TOKEN:
metadata = parseMetaData();
break;
default:
metadata = STNodeFactory.createEmptyNode();
}
STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME);
return parseEnumMemberRhs(metadata, identifierNode);
}
private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) {
STNode equalToken, constExprNode;
switch (peek().kind) {
case EQUAL_TOKEN:
equalToken = parseAssignOp();
constExprNode = parseExpression();
break;
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
equalToken = STNodeFactory.createEmptyNode();
constExprNode = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS, metadata, identifierNode);
return parseEnumMemberRhs(metadata, identifierNode);
}
return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode);
}
private STNode parseEnumMemberEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.ENUM_MEMBER_END);
return parseEnumMemberEnd();
}
}
private STNode parseTransactionStmtOrVarDecl(STNode annots, List<STNode> qualifiers, STToken transactionKeyword) {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
reportInvalidStatementAnnots(annots, qualifiers);
reportInvalidQualifierList(qualifiers);
return parseTransactionStatement(transactionKeyword);
case COLON_TOKEN:
if (getNextNextToken().kind == SyntaxKind.IDENTIFIER_TOKEN) {
STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);
return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);
}
default:
Solution solution = recover(peek(), ParserRuleContext.TRANSACTION_STMT_RHS_OR_TYPE_REF);
if (solution.action == Action.KEEP ||
(solution.action == Action.INSERT && solution.tokenKind == SyntaxKind.COLON_TOKEN)) {
STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);
return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);
}
return parseTransactionStmtOrVarDecl(annots, qualifiers, transactionKeyword);
}
}
/**
* Parse transaction statement.
* <p>
* <code>transaction-stmt := `transaction` block-stmt [on-fail-clause]</code>
*
* @return Transaction statement node
*/
private STNode parseTransactionStatement(STNode transactionKeyword) {
startContext(ParserRuleContext.TRANSACTION_STMT);
STNode blockStmt = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause);
}
/**
* Parse commit action.
* <p>
* <code>commit-action := "commit"</code>
*
* @return Commit action node
*/
private STNode parseCommitAction() {
STNode commitKeyword = parseCommitKeyword();
return STNodeFactory.createCommitActionNode(commitKeyword);
}
/**
* Parse commit keyword.
*
* @return parsed node
*/
private STNode parseCommitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.COMMIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.COMMIT_KEYWORD);
return parseCommitKeyword();
}
}
/**
* Parse retry statement.
* <p>
* <code>
* retry-stmt := `retry` retry-spec block-stmt [on-fail-clause]
* <br/>
* retry-spec := [type-parameter] [ `(` arg-list `)` ]
* </code>
*
* @return Retry statement node
*/
private STNode parseRetryStatement() {
startContext(ParserRuleContext.RETRY_STMT);
STNode retryKeyword = parseRetryKeyword();
STNode retryStmt = parseRetryKeywordRhs(retryKeyword);
return retryStmt;
}
private STNode parseRetryKeywordRhs(STNode retryKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case LT_TOKEN:
STNode typeParam = parseTypeParameter();
return parseRetryTypeParamRhs(retryKeyword, typeParam);
case OPEN_PAREN_TOKEN:
case OPEN_BRACE_TOKEN:
case TRANSACTION_KEYWORD:
typeParam = STNodeFactory.createEmptyNode();
return parseRetryTypeParamRhs(retryKeyword, typeParam);
default:
recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS, retryKeyword);
return parseRetryKeywordRhs(retryKeyword);
}
}
private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) {
STNode args;
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
args = parseParenthesizedArgList();
break;
case OPEN_BRACE_TOKEN:
case TRANSACTION_KEYWORD:
args = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS, retryKeyword, typeParam);
return parseRetryTypeParamRhs(retryKeyword, typeParam);
}
STNode blockStmt = parseRetryBody();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause);
}
private STNode parseRetryBody() {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
return parseBlockNode();
case TRANSACTION_KEYWORD:
return parseTransactionStatement(consume());
default:
recover(peek(), ParserRuleContext.RETRY_BODY);
return parseRetryBody();
}
}
/**
* Parse optional on fail clause.
*
* @return Parsed node
*/
private STNode parseOptionalOnFailClause() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.ON_KEYWORD) {
return parseOnFailClause();
}
if (isEndOfRegularCompoundStmt(nextToken.kind)) {
return STNodeFactory.createEmptyNode();
}
recover(nextToken, ParserRuleContext.REGULAR_COMPOUND_STMT_RHS);
return parseOptionalOnFailClause();
}
private boolean isEndOfRegularCompoundStmt(SyntaxKind nodeKind) {
switch (nodeKind) {
case CLOSE_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case AT_TOKEN:
case EOF_TOKEN:
return true;
default:
return isStatementStartingToken(nodeKind);
}
}
private boolean isStatementStartingToken(SyntaxKind nodeKind) {
switch (nodeKind) {
case FINAL_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case DO_KEYWORD:
case PANIC_KEYWORD:
case CONTINUE_KEYWORD:
case BREAK_KEYWORD:
case RETURN_KEYWORD:
case TYPE_KEYWORD:
case LOCK_KEYWORD:
case OPEN_BRACE_TOKEN:
case FORK_KEYWORD:
case FOREACH_KEYWORD:
case XMLNS_KEYWORD:
case TRANSACTION_KEYWORD:
case RETRY_KEYWORD:
case ROLLBACK_KEYWORD:
case MATCH_KEYWORD:
case FAIL_KEYWORD:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case TRAP_KEYWORD:
case START_KEYWORD:
case FLUSH_KEYWORD:
case LEFT_ARROW_TOKEN:
case WAIT_KEYWORD:
case COMMIT_KEYWORD:
case WORKER_KEYWORD:
return true;
default:
if (isTypeStartingToken(nodeKind)) {
return true;
}
if (isValidExpressionStart(nodeKind, 1)) {
return true;
}
return false;
}
}
/**
* Parse on fail clause.
* <p>
* <code>
* on-fail-clause := on fail typed-binding-pattern statement-block
* </code>
*
* @return On fail clause node
*/
private STNode parseOnFailClause() {
startContext(ParserRuleContext.ON_FAIL_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode failKeyword = parseFailKeyword();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false);
STNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_REF);
STNode blockStatement = parseBlockNode();
endContext();
return STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typeDescriptor, identifier,
blockStatement);
}
/**
* Parse retry keyword.
*
* @return parsed node
*/
private STNode parseRetryKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.RETRY_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.RETRY_KEYWORD);
return parseRetryKeyword();
}
}
/**
* Parse transaction statement.
* <p>
* <code>rollback-stmt := "rollback" [expression] ";"</code>
*
* @return Rollback statement node
*/
private STNode parseRollbackStatement() {
startContext(ParserRuleContext.ROLLBACK_STMT);
STNode rollbackKeyword = parseRollbackKeyword();
STNode expression;
if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) {
expression = STNodeFactory.createEmptyNode();
} else {
expression = parseExpression();
}
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon);
}
/**
* Parse rollback keyword.
*
* @return Rollback keyword node
*/
private STNode parseRollbackKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ROLLBACK_KEYWORD);
return parseRollbackKeyword();
}
}
/**
* Parse transactional expression.
* <p>
* <code>transactional-expr := "transactional"</code>
*
* @return Transactional expression node
*/
private STNode parseTransactionalExpression() {
STNode transactionalKeyword = parseTransactionalKeyword();
return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword);
}
/**
* Parse transactional keyword.
*
* @return Transactional keyword node
*/
private STNode parseTransactionalKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD);
return parseTransactionalKeyword();
}
}
/**
* Parse base16 literal.
* <p>
* <code>
* byte-array-literal := Base16Literal | Base64Literal
* <br/>
* Base16Literal := base16 WS ` HexGroup* WS `
* <br/>
* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `
* </code>
*
* @return parsed node
*/
private STNode parseByteArrayLiteral() {
STNode type;
if (peek().kind == SyntaxKind.BASE16_KEYWORD) {
type = parseBase16Keyword();
} else {
type = parseBase64Keyword();
}
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
if (startingBackTick.isMissing()) {
startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode content = STNodeFactory.createEmptyNode();
STNode byteArrayLiteral =
STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick);
byteArrayLiteral =
SyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT);
return byteArrayLiteral;
}
STNode content = parseByteArrayContent();
return parseByteArrayLiteral(type, startingBackTick, content);
}
/**
* Parse byte array literal.
*
* @param typeKeyword keyword token, possible values are `base16` and `base64`
* @param startingBackTick starting backtick token
* @param byteArrayContent byte array literal content to be validated
* @return parsed byte array literal node
*/
private STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) {
STNode content = STNodeFactory.createEmptyNode();
STNode newStartingBackTick = startingBackTick;
STNodeList items = (STNodeList) byteArrayContent;
if (items.size() == 1) {
STNode item = items.get(0);
if (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else if (item.kind != SyntaxKind.TEMPLATE_STRING) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else {
content = item;
}
} else if (items.size() > 1) {
STNode clonedStartingBackTick = startingBackTick;
for (int index = 0; index < items.size(); index++) {
STNode item = items.get(index);
clonedStartingBackTick =
SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item);
}
newStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick,
DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);
}
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick);
}
/**
* Parse <code>base16</code> keyword.
*
* @return base16 keyword node
*/
private STNode parseBase16Keyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BASE16_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BASE16_KEYWORD);
return parseBase16Keyword();
}
}
/**
* Parse <code>base64</code> keyword.
*
* @return base64 keyword node
*/
private STNode parseBase64Keyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BASE64_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BASE64_KEYWORD);
return parseBase64Keyword();
}
}
/**
* Validate and parse byte array literal content.
* An error is reported, if the content is invalid.
*
* @return parsed node
*/
private STNode parseByteArrayContent() {
STToken nextToken = peek();
List<STNode> items = new ArrayList<>();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode content = parseTemplateItem();
items.add(content);
nextToken = peek();
}
return STNodeFactory.createNodeList(items);
}
/**
* Validate base16 literal content.
* <p>
* <code>
* Base16Literal := base16 WS ` HexGroup* WS `
* <br/>
* HexGroup := WS HexDigit WS HexDigit
* <br/>
* WS := WhiteSpaceChar*
* <br/>
* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20
* </code>
*
* @param content the string surrounded by the backticks
* @return <code>true</code>, if the string content is valid. <code>false</code> otherwise.
*/
static boolean isValidBase16LiteralContent(String content) {
char[] charArray = content.toCharArray();
int hexDigitCount = 0;
for (char c : charArray) {
switch (c) {
case LexerTerminals.TAB:
case LexerTerminals.NEWLINE:
case LexerTerminals.CARRIAGE_RETURN:
case LexerTerminals.SPACE:
break;
default:
if (isHexDigit(c)) {
hexDigitCount++;
} else {
return false;
}
break;
}
}
return hexDigitCount % 2 == 0;
}
/**
* Validate base64 literal content.
* <p>
* <code>
* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `
* <br/>
* Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char
* <br/>
* PaddedBase64Group :=
* WS Base64Char WS Base64Char WS Base64Char WS PaddingChar
* | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar
* <br/>
* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /
* <br/>
* PaddingChar := =
* <br/>
* WS := WhiteSpaceChar*
* <br/>
* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20
* </code>
*
* @param content the string surrounded by the backticks
* @return <code>true</code>, if the string content is valid. <code>false</code> otherwise.
*/
static boolean isValidBase64LiteralContent(String content) {
char[] charArray = content.toCharArray();
int base64CharCount = 0;
int paddingCharCount = 0;
for (char c : charArray) {
switch (c) {
case LexerTerminals.TAB:
case LexerTerminals.NEWLINE:
case LexerTerminals.CARRIAGE_RETURN:
case LexerTerminals.SPACE:
break;
case LexerTerminals.EQUAL:
paddingCharCount++;
break;
default:
if (isBase64Char(c)) {
if (paddingCharCount == 0) {
base64CharCount++;
} else {
return false;
}
} else {
return false;
}
break;
}
}
if (paddingCharCount > 2) {
return false;
} else if (paddingCharCount == 0) {
return base64CharCount % 4 == 0;
} else {
return base64CharCount % 4 == 4 - paddingCharCount;
}
}
/**
* <p>
* Check whether a given char is a base64 char.
* </p>
* <code>Base64Char := A .. Z | a .. z | 0 .. 9 | + | /</code>
*
* @param c character to check
* @return <code>true</code>, if the character represents a base64 char. <code>false</code> otherwise.
*/
static boolean isBase64Char(int c) {
if ('a' <= c && c <= 'z') {
return true;
}
if ('A' <= c && c <= 'Z') {
return true;
}
if (c == '+' || c == '/') {
return true;
}
return isDigit(c);
}
static boolean isHexDigit(int c) {
if ('a' <= c && c <= 'f') {
return true;
}
if ('A' <= c && c <= 'F') {
return true;
}
return isDigit(c);
}
static boolean isDigit(int c) {
return ('0' <= c && c <= '9');
}
/**
* Parse xml filter expression.
* <p>
* <code>xml-filter-expr := expression .< xml-name-pattern ></code>
*
* @param lhsExpr Preceding expression of .< token
* @return Parsed node
*/
private STNode parseXMLFilterExpression(STNode lhsExpr) {
STNode xmlNamePatternChain = parseXMLFilterExpressionRhs();
return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain);
}
/**
* Parse xml filter expression rhs.
* <p>
* <code>filer-expression-rhs := .< xml-name-pattern ></code>
*
* @return Parsed node
*/
private STNode parseXMLFilterExpressionRhs() {
STNode dotLTToken = parseDotLTToken();
return parseXMLNamePatternChain(dotLTToken);
}
/**
* Parse xml name pattern chain.
* <p>
* <code>
* xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step
* <br/>
* filer-expression-rhs := .< xml-name-pattern >
* <br/>
* xml-element-children-step := /< xml-name-pattern >
* <br/>
* xml-element-descendants-step := /**\/<xml-name-pattern >
* </code>
*
* @param startToken Preceding token of xml name pattern
* @return Parsed node
*/
private STNode parseXMLNamePatternChain(STNode startToken) {
startContext(ParserRuleContext.XML_NAME_PATTERN);
STNode xmlNamePattern = parseXMLNamePattern();
STNode gtToken = parseGTToken();
endContext();
startToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken,
DiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN);
return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken);
}
/**
* Parse <code> .< </code> token.
*
* @return Parsed node
*/
private STNode parseDotLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.DOT_LT_TOKEN);
return parseDotLTToken();
}
}
/**
* Parse xml name pattern.
* <p>
* <code>xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*</code>
*
* @return Parsed node
*/
private STNode parseXMLNamePattern() {
List<STNode> xmlAtomicNamePatternList = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfXMLNamePattern(nextToken.kind)) {
return STNodeFactory.createNodeList(xmlAtomicNamePatternList);
}
STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern();
xmlAtomicNamePatternList.add(xmlAtomicNamePattern);
STNode separator;
while (!isEndOfXMLNamePattern(peek().kind)) {
separator = parseXMLNamePatternSeparator();
if (separator == null) {
break;
}
xmlAtomicNamePatternList.add(separator);
xmlAtomicNamePattern = parseXMLAtomicNamePattern();
xmlAtomicNamePatternList.add(xmlAtomicNamePattern);
}
return STNodeFactory.createNodeList(xmlAtomicNamePatternList);
}
private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) {
switch (tokenKind) {
case GT_TOKEN:
case EOF_TOKEN:
return true;
case IDENTIFIER_TOKEN:
case ASTERISK_TOKEN:
case COLON_TOKEN:
default:
return false;
}
}
private STNode parseXMLNamePatternSeparator() {
STToken token = peek();
switch (token.kind) {
case PIPE_TOKEN:
return consume();
case GT_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS);
return parseXMLNamePatternSeparator();
}
}
/**
* Parse xml atomic name pattern.
* <p>
* <code>
* xml-atomic-name-pattern :=
* *
* | identifier
* | xml-namespace-prefix : identifier
* | xml-namespace-prefix : *
* </code>
*
* @return Parsed node
*/
private STNode parseXMLAtomicNamePattern() {
startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN);
STNode atomicNamePattern = parseXMLAtomicNamePatternBody();
endContext();
return atomicNamePattern;
}
private STNode parseXMLAtomicNamePatternBody() {
STToken token = peek();
STNode identifier;
switch (token.kind) {
case ASTERISK_TOKEN:
return consume();
case IDENTIFIER_TOKEN:
identifier = consume();
break;
default:
recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START);
return parseXMLAtomicNamePatternBody();
}
return parseXMLAtomicNameIdentifier(identifier);
}
private STNode parseXMLAtomicNameIdentifier(STNode identifier) {
STToken token = peek();
if (token.kind == SyntaxKind.COLON_TOKEN) {
STNode colon = consume();
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {
STToken endToken = consume();
return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken);
}
}
return STNodeFactory.createSimpleNameReferenceNode(identifier);
}
/**
* Parse xml step expression.
* <p>
* <code>xml-step-expr := expression xml-step-start</code>
*
* @param lhsExpr Preceding expression of /*, /<, or /**\/< token
* @return Parsed node
*/
private STNode parseXMLStepExpression(STNode lhsExpr) {
STNode xmlStepStart = parseXMLStepStart();
return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart);
}
/**
* Parse xml filter expression rhs.
* <p>
* <code>
* xml-step-start :=
* xml-all-children-step
* | xml-element-children-step
* | xml-element-descendants-step
* <br/>
* xml-all-children-step := /*
* </code>
*
* @return Parsed node
*/
private STNode parseXMLStepStart() {
STToken token = peek();
STNode startToken;
switch (token.kind) {
case SLASH_ASTERISK_TOKEN:
return consume();
case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:
startToken = parseDoubleSlashDoubleAsteriskLTToken();
break;
case SLASH_LT_TOKEN:
default:
startToken = parseSlashLTToken();
break;
}
return parseXMLNamePatternChain(startToken);
}
/**
* Parse <code> /< </code> token.
*
* @return Parsed node
*/
private STNode parseSlashLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN);
return parseSlashLTToken();
}
}
/**
* Parse <code> /< </code> token.
*
* @return Parsed node
*/
private STNode parseDoubleSlashDoubleAsteriskLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN);
return parseDoubleSlashDoubleAsteriskLTToken();
}
}
/**
* Parse match statement.
* <p>
* <code>match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]</code>
*
* @return Match statement
*/
private STNode parseMatchStatement() {
startContext(ParserRuleContext.MATCH_STMT);
STNode matchKeyword = parseMatchKeyword();
STNode actionOrExpr = parseActionOrExpression();
startContext(ParserRuleContext.MATCH_BODY);
STNode openBrace = parseOpenBrace();
List<STNode> matchClausesList = new ArrayList<>();
while (!isEndOfMatchClauses(peek().kind)) {
STNode clause = parseMatchClause();
matchClausesList.add(clause);
}
STNode matchClauses = STNodeFactory.createNodeList(matchClausesList);
if (isNodeListEmpty(matchClauses)) {
openBrace = SyntaxErrors.addDiagnostic(openBrace,
DiagnosticErrorCode.ERROR_MATCH_STATEMENT_SHOULD_HAVE_ONE_OR_MORE_MATCH_CLAUSES);
}
STNode closeBrace = parseCloseBrace();
endContext();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace,
onFailClause);
}
/**
* Parse match keyword.
*
* @return Match keyword node
*/
private STNode parseMatchKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.MATCH_KEYWORD) {
return consume();
} else {
recover(nextToken, ParserRuleContext.MATCH_KEYWORD);
return parseMatchKeyword();
}
}
private boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse a single match match clause.
* <p>
* <code>
* match-clause := match-pattern-list [match-guard] => block-stmt
* <br/>
* match-guard := if expression
* </code>
*
* @return A match clause
*/
private STNode parseMatchClause() {
STNode matchPatterns = parseMatchPatternList();
STNode matchGuard = parseMatchGuard();
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode blockStmt = parseBlockNode();
return STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt);
}
/**
* Parse match guard.
* <p>
* <code>match-guard := if expression</code>
*
* @return Match guard
*/
private STNode parseMatchGuard() {
switch (peek().kind) {
case IF_KEYWORD:
STNode ifKeyword = parseIfKeyword();
STNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false);
return STNodeFactory.createMatchGuardNode(ifKeyword, expr);
case RIGHT_DOUBLE_ARROW_TOKEN:
return STNodeFactory.createEmptyNode();
default:
recover(peek(), ParserRuleContext.OPTIONAL_MATCH_GUARD);
return parseMatchGuard();
}
}
/**
* Parse match patterns list.
* <p>
* <code>match-pattern-list := match-pattern (| match-pattern)*</code>
*
* @return Match patterns list
*/
private STNode parseMatchPatternList() {
startContext(ParserRuleContext.MATCH_PATTERN);
List<STNode> matchClauses = new ArrayList<>();
while (!isEndOfMatchPattern(peek().kind)) {
STNode clause = parseMatchPattern();
if (clause == null) {
break;
}
matchClauses.add(clause);
STNode seperator = parseMatchPatternListMemberRhs();
if (seperator == null) {
break;
}
matchClauses.add(seperator);
}
endContext();
return STNodeFactory.createNodeList(matchClauses);
}
private boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case PIPE_TOKEN:
case IF_KEYWORD:
case RIGHT_ARROW_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse match pattern.
* <p>
* <code>
* match-pattern := var binding-pattern
* | wildcard-match-pattern
* | const-pattern
* | list-match-pattern
* | mapping-match-pattern
* | error-match-pattern
* </code>
*
* @return Match pattern
*/
private STNode parseMatchPattern() {
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case PLUS_TOKEN:
case MINUS_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
return parseSimpleConstExpr();
case IDENTIFIER_TOKEN:
STNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);
return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);
case VAR_KEYWORD:
return parseVarTypedBindingPattern();
case OPEN_BRACKET_TOKEN:
return parseListMatchPattern();
case OPEN_BRACE_TOKEN:
return parseMappingMatchPattern();
case ERROR_KEYWORD:
return parseErrorMatchPattern();
default:
recover(peek(), ParserRuleContext.MATCH_PATTERN_START);
return parseMatchPattern();
}
}
private STNode parseMatchPatternListMemberRhs() {
switch (peek().kind) {
case PIPE_TOKEN:
return parsePipeToken();
case IF_KEYWORD:
case RIGHT_DOUBLE_ARROW_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.MATCH_PATTERN_LIST_MEMBER_RHS);
return parseMatchPatternListMemberRhs();
}
}
/**
* Parse var typed binding pattern.
* <p>
* <code>var binding-pattern</code>
* </p>
*
* @return Parsed typed binding pattern node
*/
private STNode parseVarTypedBindingPattern() {
STNode varKeyword = parseVarKeyword();
STNode varTypeDesc = createBuiltinSimpleNameReference(varKeyword);
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createTypedBindingPatternNode(varTypeDesc, bindingPattern);
}
/**
* Parse var keyword.
*
* @return Var keyword node
*/
private STNode parseVarKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.VAR_KEYWORD) {
return consume();
} else {
recover(nextToken, ParserRuleContext.VAR_KEYWORD);
return parseVarKeyword();
}
}
/**
* Parse list match pattern.
* <p>
* <code>
* list-match-pattern := [ list-member-match-patterns ]
* list-member-match-patterns :=
* match-pattern (, match-pattern)* [, rest-match-pattern]
* | [ rest-match-pattern ]
* </code>
* </p>
*
* @return Parsed list match pattern node
*/
private STNode parseListMatchPattern() {
startContext(ParserRuleContext.LIST_MATCH_PATTERN);
STNode openBracketToken = parseOpenBracket();
List<STNode> matchPatternList = new ArrayList<>();
STNode listMatchPatternMemberRhs = null;
boolean isEndOfFields = false;
while (!isEndOfListMatchPattern()) {
STNode listMatchPatternMember = parseListMatchPatternMember();
matchPatternList.add(listMatchPatternMember);
listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();
if (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
isEndOfFields = true;
break;
}
if (listMatchPatternMemberRhs != null) {
matchPatternList.add(listMatchPatternMemberRhs);
} else {
break;
}
}
while (isEndOfFields && listMatchPatternMemberRhs != null) {
updateLastNodeInListWithInvalidNode(matchPatternList, listMatchPatternMemberRhs, null);
if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {
break;
}
STNode invalidField = parseListMatchPatternMember();
updateLastNodeInListWithInvalidNode(matchPatternList, invalidField,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);
listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();
}
STNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList);
STNode closeBracketToken = parseCloseBracket();
endContext();
return STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, closeBracketToken);
}
public boolean isEndOfListMatchPattern() {
switch (peek().kind) {
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseListMatchPatternMember() {
STNode nextToken = peek();
switch (nextToken.kind) {
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
default:
return parseMatchPattern();
}
}
/**
* Parse rest match pattern.
* <p>
* <code>
* rest-match-pattern := ... var variable-name
* </code>
* </p>
*
* @return Parsed rest match pattern node
*/
private STNode parseRestMatchPattern() {
startContext(ParserRuleContext.REST_MATCH_PATTERN);
STNode ellipsisToken = parseEllipsis();
STNode varKeywordToken = parseVarKeyword();
STNode variableName = parseVariableName();
endContext();
STSimpleNameReferenceNode simpleNameReferenceNode =
(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName);
return STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode);
}
private STNode parseListMatchPatternMemberRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS);
return parseListMatchPatternMemberRhs();
}
}
/**
* Parse mapping match pattern.
* <p>
* mapping-match-pattern := { field-match-patterns }
* <br/>
* field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern]
* | [ rest-match-pattern ]
* <br/>
* field-match-pattern := field-name : match-pattern
* <br/>
* rest-match-pattern := ... var variable-name
* </p>
*
* @return Parsed Node.
*/
private STNode parseMappingMatchPattern() {
startContext(ParserRuleContext.MAPPING_MATCH_PATTERN);
STNode openBraceToken = parseOpenBrace();
List<STNode> fieldMatchPatternList = new ArrayList<>();
STNode fieldMatchPatternRhs = null;
boolean isEndOfFields = false;
while (!isEndOfMappingMatchPattern()) {
STNode fieldMatchPatternMember = parseFieldMatchPatternMember();
fieldMatchPatternList.add(fieldMatchPatternMember);
fieldMatchPatternRhs = parseFieldMatchPatternRhs();
if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
isEndOfFields = true;
break;
}
if (fieldMatchPatternRhs != null) {
fieldMatchPatternList.add(fieldMatchPatternRhs);
} else {
break;
}
}
while (isEndOfFields && fieldMatchPatternRhs != null) {
updateLastNodeInListWithInvalidNode(fieldMatchPatternList, fieldMatchPatternRhs, null);
if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {
break;
}
STNode invalidField = parseFieldMatchPatternMember();
updateLastNodeInListWithInvalidNode(fieldMatchPatternList, invalidField,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);
fieldMatchPatternRhs = parseFieldMatchPatternRhs();
}
STNode fieldMatchPatterns = STNodeFactory.createNodeList(fieldMatchPatternList);
STNode closeBraceToken = parseCloseBrace();
endContext();
return STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, closeBraceToken);
}
private STNode parseFieldMatchPatternMember() {
switch (peek().kind) {
case IDENTIFIER_TOKEN:
return parseFieldMatchPattern();
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
default:
recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER);
return parseFieldMatchPatternMember();
}
}
/**
* Parse filed match pattern.
* <p>
* field-match-pattern := field-name : match-pattern
* </p>
*
* @return Parsed field match pattern node
*/
public STNode parseFieldMatchPattern() {
STNode fieldNameNode = parseVariableName();
STNode colonToken = parseColon();
STNode matchPattern = parseMatchPattern();
return STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern);
}
public boolean isEndOfMappingMatchPattern() {
switch (peek().kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseFieldMatchPatternRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS);
return parseFieldMatchPatternRhs();
}
}
private STNode parseErrorMatchPatternOrConsPattern(STNode typeRefOrConstExpr) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,
ParserRuleContext.ERROR_KEYWORD);
startContext(ParserRuleContext.ERROR_MATCH_PATTERN);
return parseErrorMatchPattern(errorKeyword, typeRefOrConstExpr);
default:
if (isMatchPatternEnd(peek().kind)) {
return typeRefOrConstExpr;
}
recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_OR_CONST_PATTERN, typeRefOrConstExpr);
return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);
}
}
private boolean isMatchPatternEnd(SyntaxKind tokenKind) {
switch (tokenKind) {
case RIGHT_DOUBLE_ARROW_TOKEN:
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case CLOSE_PAREN_TOKEN:
case PIPE_TOKEN:
case IF_KEYWORD:
case EOF_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse functional match pattern.
* <p>
* error-match-pattern := error [error-type-reference] ( error-arg-list-match-pattern )
* error-arg-list-match-pattern :=
* error-message-match-pattern [, error-cause-match-pattern] [, error-field-match-patterns]
* | [error-field-match-patterns]
* error-message-match-pattern := simple-match-pattern
* error-cause-match-pattern := simple-match-pattern | error-match-pattern
* simple-match-pattern :=
* wildcard-match-pattern
* | const-pattern
* | var variable-name
* error-field-match-patterns :=
* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]
* | rest-match-pattern
* named-arg-match-pattern := arg-name = match-pattern
* </p>
*
* @return Parsed functional match pattern node.
*/
private STNode parseErrorMatchPattern() {
startContext(ParserRuleContext.ERROR_MATCH_PATTERN);
STNode errorKeyword = consume();
return parseErrorMatchPattern(errorKeyword);
}
private STNode parseErrorMatchPattern(STNode errorKeyword) {
STToken nextToken = peek();
STNode typeRef;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
typeRef = STNodeFactory.createEmptyNode();
break;
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
typeRef = parseTypeReference();
break;
}
recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_ERROR_KEYWORD_RHS);
return parseErrorMatchPattern(errorKeyword);
}
return parseErrorMatchPattern(errorKeyword, typeRef);
}
private STNode parseErrorMatchPattern(STNode errorKeyword, STNode typeRef) {
STNode openParenthesisToken = parseOpenParenthesis();
STNode argListMatchPatternNode = parseErrorArgListMatchPatterns();
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createErrorMatchPatternNode(errorKeyword, typeRef, openParenthesisToken,
argListMatchPatternNode, closeParenthesisToken);
}
private STNode parseErrorArgListMatchPatterns() {
List<STNode> argListMatchPatterns = new ArrayList<>();
if (isEndOfErrorFieldMatchPatterns()) {
return STNodeFactory.createNodeList(argListMatchPatterns);
}
startContext(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_FIRST_ARG);
STNode firstArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_START);
endContext();
if (firstArg == null) {
return STNodeFactory.createNodeList(argListMatchPatterns);
}
if (isSimpleMatchPattern(firstArg.kind)) {
argListMatchPatterns.add(firstArg);
STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_END);
if (argEnd != null) {
STNode secondArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_RHS);
if (isValidSecondArgMatchPattern(secondArg.kind)) {
argListMatchPatterns.add(argEnd);
argListMatchPatterns.add(secondArg);
} else {
updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListMatchPatterns, secondArg,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);
}
}
} else {
if (firstArg.kind != SyntaxKind.NAMED_ARG_MATCH_PATTERN &&
firstArg.kind != SyntaxKind.REST_MATCH_PATTERN) {
addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);
} else {
argListMatchPatterns.add(firstArg);
}
}
parseErrorFieldMatchPatterns(argListMatchPatterns);
return STNodeFactory.createNodeList(argListMatchPatterns);
}
private boolean isSimpleMatchPattern(SyntaxKind matchPatternKind) {
switch (matchPatternKind) {
case IDENTIFIER_TOKEN:
case SIMPLE_NAME_REFERENCE:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case NULL_LITERAL:
case NIL_LITERAL:
case BOOLEAN_LITERAL:
case TYPED_BINDING_PATTERN:
case UNARY_EXPRESSION:
return true;
default:
return false;
}
}
private boolean isValidSecondArgMatchPattern(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case ERROR_MATCH_PATTERN:
case NAMED_ARG_MATCH_PATTERN:
case REST_MATCH_PATTERN:
return true;
default:
if (isSimpleMatchPattern(syntaxKind)) {
return true;
}
return false;
}
}
/**
* Parse error field match patterns.
* error-field-match-patterns :=
* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]
* | rest-match-pattern
* named-arg-match-pattern := arg-name = match-pattern
* @param argListMatchPatterns
*/
private void parseErrorFieldMatchPatterns(List<STNode> argListMatchPatterns) {
SyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_MATCH_PATTERN;
while (!isEndOfErrorFieldMatchPatterns()) {
STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN_RHS);
if (argEnd == null) {
break;
}
STNode currentArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN);
DiagnosticErrorCode errorCode = validateErrorFieldMatchPatternOrder(lastValidArgKind, currentArg.kind);
if (errorCode == null) {
argListMatchPatterns.add(argEnd);
argListMatchPatterns.add(currentArg);
lastValidArgKind = currentArg.kind;
} else if (argListMatchPatterns.size() == 0) {
addInvalidNodeToNextToken(argEnd, null);
addInvalidNodeToNextToken(currentArg, errorCode);
} else {
updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode);
}
}
}
private boolean isEndOfErrorFieldMatchPatterns() {
return isEndOfErrorFieldBindingPatterns();
}
private STNode parseErrorArgListMatchPatternEnd(ParserRuleContext currentCtx) {
switch (peek().kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), currentCtx);
return parseErrorArgListMatchPatternEnd(currentCtx);
}
}
private STNode parseErrorArgListMatchPattern(ParserRuleContext context) {
STToken nextToken = peek();
switch (nextToken.kind) {
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
case IDENTIFIER_TOKEN:
return parseNamedOrSimpleMatchPattern();
case OPEN_PAREN_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case PLUS_TOKEN:
case MINUS_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case OPEN_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseMatchPattern();
case VAR_KEYWORD:
STNode varType = createBuiltinSimpleNameReference(consume());
STNode variableName = createCaptureOrWildcardBP(parseVariableName());
return STNodeFactory.createTypedBindingPatternNode(varType, variableName);
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(nextToken, context);
return parseErrorArgListMatchPattern(context);
}
}
private STNode parseNamedOrSimpleMatchPattern() {
STNode identifier = consume();
STToken secondToken = peek();
switch (secondToken.kind) {
case EQUAL_TOKEN:
return parseNamedArgMatchPattern(identifier);
case COMMA_TOKEN:
case CLOSE_PAREN_TOKEN:
default:
return identifier;
}
}
/**
* Parses the next named arg match pattern.
* <br/>
* <code>named-arg-match-pattern := arg-name = match-pattern</code>
* <br/>
* <br/>
*
* @return arg match pattern list node added the new arg match pattern
*/
private STNode parseNamedArgMatchPattern(STNode identifier) {
startContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN);
STNode equalToken = parseAssignOp();
STNode matchPattern = parseMatchPattern();
endContext();
return STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern);
}
private DiagnosticErrorCode validateErrorFieldMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {
switch (currentArgKind) {
case NAMED_ARG_MATCH_PATTERN:
case REST_MATCH_PATTERN:
if (prevArgKind == SyntaxKind.REST_MATCH_PATTERN) {
return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;
}
return null;
default:
return DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED;
}
}
/**
* Parse markdown documentation.
*
* @return markdown documentation node
*/
private STNode parseMarkdownDocumentation() {
List<STNode> markdownDocLineList = new ArrayList<>();
STToken nextToken = peek();
while (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) {
STToken documentationString = consume();
STNode parsedDocLines = parseDocumentationString(documentationString);
appendParsedDocumentationLines(markdownDocLineList, parsedDocLines);
nextToken = peek();
}
STNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList);
return STNodeFactory.createMarkdownDocumentationNode(markdownDocLines);
}
/**
* Parse documentation string.
*
* @return markdown documentation line list node
*/
private STNode parseDocumentationString(STToken documentationStringToken) {
List<STNode> leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae());
Collection<STNodeDiagnostic> diagnostics = new ArrayList<>((documentationStringToken.diagnostics()));
CharReader charReader = CharReader.from(documentationStringToken.text());
DocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList, diagnostics);
AbstractTokenReader tokenReader = new TokenReader(documentationLexer);
DocumentationParser documentationParser = new DocumentationParser(tokenReader);
return documentationParser.parse();
}
private List<STNode> getLeadingTriviaList(STNode leadingMinutiaeNode) {
List<STNode> leadingTriviaList = new ArrayList<>();
int bucketCount = leadingMinutiaeNode.bucketCount();
for (int i = 0; i < bucketCount; i++) {
leadingTriviaList.add(leadingMinutiaeNode.childInBucket(i));
}
return leadingTriviaList;
}
private void appendParsedDocumentationLines(List<STNode> markdownDocLineList, STNode parsedDocLines) {
int bucketCount = parsedDocLines.bucketCount();
for (int i = 0; i < bucketCount; i++) {
STNode markdownDocLine = parsedDocLines.childInBucket(i);
markdownDocLineList.add(markdownDocLine);
}
}
/**
* Parse any statement that starts with a token that has ambiguity between being
* a type-desc or an expression.
*
* @param annots Annotations
* @param qualifiers
* @return Statement node
*/
private STNode parseStmtStartsWithTypeOrExpr(STNode annots, List<STNode> qualifiers) {
startContext(ParserRuleContext.AMBIGUOUS_STMT);
STNode typeOrExpr = parseTypedBindingPatternOrExpr(qualifiers, true);
return parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr);
}
private STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) {
if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
List<STNode> varDeclQualifiers = new ArrayList<>();
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseVarDeclRhs(annots, varDeclQualifiers, typedBindingPatternOrExpr, false);
}
STNode expr = getExpression(typedBindingPatternOrExpr);
expr = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true);
return parseStatementStartWithExprRhs(expr);
}
private STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypedBindingPatternOrExpr(typeDescQualifiers, allowAssignment);
}
private STNode parseTypedBindingPatternOrExpr(List<STNode> qualifiers, boolean allowAssignment) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
STNode typeOrExpr;
if (isPredeclaredIdentifier(nextToken.kind)) {
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
}
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseTypedBPOrExprStartsWithOpenParenthesis();
case FUNCTION_KEYWORD:
return parseAnonFuncExprOrTypedBPWithFuncType(qualifiers);
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTupleTypeDescOrExprStartsWithOpenBracket();
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode basicLiteral = parseBasicLiteral();
return parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment);
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());
}
return parseTypedBindingPattern(qualifiers, ParserRuleContext.VAR_DECL_STMT);
}
}
/**
* Parse the component after the ambiguous starting node. Ambiguous node could be either an expr
* or a type-desc. The component followed by this ambiguous node could be the binding-pattern or
* the expression-rhs.
*
* @param typeOrExpr Type desc or the expression
* @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a
* valid lvalue expression
* @return Typed-binding-pattern node or an expression node
*/
private STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
STToken nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode pipe = parsePipeToken();
STNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);
if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;
typeOrExpr = getTypeDescFromExpr(typeOrExpr);
STNode newTypeDesc = createUnionTypeDesc(typeOrExpr, pipe, typedBP.typeDescriptor);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);
}
if (peek().kind == SyntaxKind.EQUAL_TOKEN) {
return createCaptureBPWithMissingVarName(typeOrExpr, pipe, rhsTypedBPOrExpr);
}
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,
rhsTypedBPOrExpr);
case BITWISE_AND_TOKEN:
nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode ampersand = parseBinaryOperator();
rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);
if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;
typeOrExpr = getTypeDescFromExpr(typeOrExpr);
STNode newTypeDesc = createIntersectionTypeDesc(typeOrExpr, ampersand, typedBP.typeDescriptor);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);
}
if (peek().kind == SyntaxKind.EQUAL_TOKEN) {
return createCaptureBPWithMissingVarName(typeOrExpr, ampersand, rhsTypedBPOrExpr);
}
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,
rhsTypedBPOrExpr);
case SEMICOLON_TOKEN:
if (isDefiniteExpr(typeOrExpr.kind)) {
return typeOrExpr;
}
if (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) {
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
}
return typeOrExpr;
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
if (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) {
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
}
return typeOrExpr;
case EQUAL_TOKEN:
return typeOrExpr;
case OPEN_BRACKET_TOKEN:
return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment,
ParserRuleContext.AMBIGUOUS_STMT);
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
default:
if (isCompoundBinaryOperator(nextToken.kind)) {
return typeOrExpr;
}
if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {
return typeOrExpr;
}
STToken token = peek();
recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS, typeOrExpr, allowAssignment);
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
}
}
private STNode createCaptureBPWithMissingVarName(STNode lhsType, STNode separatorToken, STNode rhsType) {
lhsType = getTypeDescFromExpr(lhsType);
rhsType = getTypeDescFromExpr(rhsType);
STNode newTypeDesc;
if (separatorToken.kind == SyntaxKind.PIPE_TOKEN) {
newTypeDesc = createUnionTypeDesc(lhsType, separatorToken, rhsType);
} else {
newTypeDesc = createIntersectionTypeDesc(lhsType, separatorToken, rhsType);
}
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
ParserRuleContext.VARIABLE_NAME);
STNode captureBP = STNodeFactory.createCaptureBindingPatternNode(identifier);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, captureBP);
}
private STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) {
typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false);
return parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);
}
private STNode parseTypedBPOrExprStartsWithOpenParenthesis() {
STNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis();
if (isDefiniteTypeDesc(exprOrTypeDesc.kind)) {
return parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc);
}
return parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false);
}
private boolean isDefiniteTypeDesc(SyntaxKind kind) {
return kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.SINGLETON_TYPE_DESC) <= 0;
}
private boolean isDefiniteExpr(SyntaxKind kind) {
if (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
return false;
}
return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&
kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;
}
/**
* Parse type or expression that starts with open parenthesis. Possible options are:
* 1) () - nil type-desc or nil-literal
* 2) (T) - Parenthesized type-desc
* 3) (expr) - Parenthesized expression
* 4) (param, param, ..) - Anon function params
*
* @return Type-desc or expression node
*/
private STNode parseTypedDescOrExprStartsWithOpenParenthesis() {
STNode openParen = parseOpenParenthesis();
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {
STNode closeParen = parseCloseParenthesis();
return parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen);
}
STNode typeOrExpr = parseTypeDescOrExpr();
if (isAction(typeOrExpr)) {
STNode closeParen = parseCloseParenthesis();
return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr,
closeParen);
}
if (isExpression(typeOrExpr.kind)) {
startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);
return parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false);
}
STNode closeParen = parseCloseParenthesis();
STNode typeDescNode = getTypeDescFromExpr(typeOrExpr);
return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeDescNode, closeParen);
}
/**
* Parse type-desc or expression. This method does not handle binding patterns.
*
* @return Type-desc node or expression node
*/
private STNode parseTypeDescOrExpr() {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypeDescOrExpr(typeDescQualifiers);
}
private STNode parseTypeDescOrExpr(List<STNode> qualifiers) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
STNode typeOrExpr;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis();
break;
case FUNCTION_KEYWORD:
typeOrExpr = parseAnonFuncExprOrFuncTypeDesc(qualifiers);
break;
case IDENTIFIER_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);
return parseTypeDescOrExprRhs(typeOrExpr);
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTupleTypeDescOrExprStartsWithOpenBracket();
break;
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode basicLiteral = parseBasicLiteral();
return parseTypeDescOrExprRhs(basicLiteral);
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());
}
return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
}
if (isDefiniteTypeDesc(typeOrExpr.kind)) {
return parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
}
return parseTypeDescOrExprRhs(typeOrExpr);
}
private boolean isExpression(SyntaxKind kind) {
switch (kind) {
case NUMERIC_LITERAL:
case STRING_LITERAL_TOKEN:
case NIL_LITERAL:
case NULL_LITERAL:
case BOOLEAN_LITERAL:
return true;
default:
return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&
kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;
}
}
/**
* Parse statement that starts with an empty parenthesis. Empty parenthesis can be
* 1) Nil literal
* 2) Nil type-desc
* 3) Anon-function params
*
* @param openParen Open parenthesis
* @param closeParen Close parenthesis
* @return Parsed node
*/
private STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) {
STToken nextToken = peek();
switch (nextToken.kind) {
case RIGHT_DOUBLE_ARROW_TOKEN:
STNode params = STNodeFactory.createEmptyNodeList();
STNode anonFuncParam =
STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);
endContext();
return anonFuncParam;
default:
return STNodeFactory.createNilLiteralNode(openParen, closeParen);
}
}
private STNode parseAnonFuncExprOrTypedBPWithFuncType(List<STNode> qualifiers) {
STNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(qualifiers);
if (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) {
return exprOrTypeDesc;
}
return parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT);
}
/**
* Parse anon-func-expr or function-type-desc, by resolving the ambiguity.
*
* @param qualifiers Preceding qualifiers
* @return Anon-func-expr or function-type-desc
*/
private STNode parseAnonFuncExprOrFuncTypeDesc(List<STNode> qualifiers) {
startContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC);
STNode qualifierList;
STNode functionKeyword = parseFunctionKeyword();
STNode funcSignature;
if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN) {
funcSignature = parseFuncSignature(true);
qualifierList = createFuncTypeQualNodeList(qualifiers, true);
endContext();
return parseAnonFuncExprOrFuncTypeDesc(qualifierList, functionKeyword, funcSignature);
}
funcSignature = STNodeFactory.createEmptyNode();
qualifierList = createFuncTypeQualNodeList(qualifiers, false);
STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,
funcSignature);
if (getCurrentContext() != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
}
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
private STNode parseAnonFuncExprOrFuncTypeDesc(STNode qualifierList, STNode functionKeyword, STNode funcSignature) {
ParserRuleContext currentCtx = getCurrentContext();
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
case RIGHT_DOUBLE_ARROW_TOKEN:
if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.EXPRESSION_STATEMENT);
}
startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);
funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);
STNode funcBody = parseAnonFuncBody(false);
STNode annots = STNodeFactory.createEmptyNodeList();
STNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList,
functionKeyword, funcSignature, funcBody);
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true);
case IDENTIFIER_TOKEN:
default:
STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,
funcSignature);
if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,
true);
}
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
}
private STNode parseTypeDescOrExprRhs(STNode typeOrExpr) {
STToken nextToken = peek();
STNode typeDesc;
switch (nextToken.kind) {
case PIPE_TOKEN:
STToken nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode pipe = parsePipeToken();
STNode rhsTypeDescOrExpr = parseTypeDescOrExpr();
if (isExpression(rhsTypeDescOrExpr.kind)) {
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, pipe,
rhsTypeDescOrExpr);
}
typeDesc = getTypeDescFromExpr(typeOrExpr);
rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);
return createUnionTypeDesc(typeDesc, pipe, rhsTypeDescOrExpr);
case BITWISE_AND_TOKEN:
nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode ampersand = parseBinaryOperator();
rhsTypeDescOrExpr = parseTypeDescOrExpr();
if (isExpression(rhsTypeDescOrExpr.kind)) {
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr, ampersand,
rhsTypeDescOrExpr);
}
typeDesc = getTypeDescFromExpr(typeOrExpr);
rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);
return createIntersectionTypeDesc(typeDesc, ampersand, rhsTypeDescOrExpr);
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
typeDesc = parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,
false);
return typeDesc;
case SEMICOLON_TOKEN:
return getTypeDescFromExpr(typeOrExpr);
case EQUAL_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
case COMMA_TOKEN:
return typeOrExpr;
case OPEN_BRACKET_TOKEN:
return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true,
ParserRuleContext.AMBIGUOUS_STMT);
case ELLIPSIS_TOKEN:
STNode ellipsis = parseEllipsis();
typeOrExpr = getTypeDescFromExpr(typeOrExpr);
return STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis);
default:
if (isCompoundBinaryOperator(nextToken.kind)) {
return typeOrExpr;
}
if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false);
}
recover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS, typeOrExpr);
return parseTypeDescOrExprRhs(typeOrExpr);
}
}
private boolean isAmbiguous(STNode node) {
switch (node.kind) {
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
case NIL_LITERAL:
case NULL_LITERAL:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case BOOLEAN_LITERAL:
case BRACKETED_LIST:
return true;
case BINARY_EXPRESSION:
STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;
if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||
binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return false;
}
return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);
case BRACED_EXPRESSION:
return isAmbiguous(((STBracedExpressionNode) node).expression);
case INDEXED_EXPRESSION:
STIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node;
if (!isAmbiguous(indexExpr.containerExpression)) {
return false;
}
STNode keys = indexExpr.keyExpression;
for (int i = 0; i < keys.bucketCount(); i++) {
STNode item = keys.childInBucket(i);
if (item.kind == SyntaxKind.COMMA_TOKEN) {
continue;
}
if (!isAmbiguous(item)) {
return false;
}
}
return true;
default:
return false;
}
}
private boolean isAllBasicLiterals(STNode node) {
switch (node.kind) {
case NIL_LITERAL:
case NULL_LITERAL:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case BOOLEAN_LITERAL:
return true;
case BINARY_EXPRESSION:
STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;
if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||
binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return false;
}
return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);
case BRACED_EXPRESSION:
return isAmbiguous(((STBracedExpressionNode) node).expression);
case BRACKETED_LIST:
STAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node;
for (STNode member : list.members) {
if (member.kind == SyntaxKind.COMMA_TOKEN) {
continue;
}
if (!isAllBasicLiterals(member)) {
return false;
}
}
return true;
case UNARY_EXPRESSION:
STUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node;
if (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN &&
unaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) {
return false;
}
return isNumericLiteral(unaryExpr.expression);
default:
return false;
}
}
private boolean isNumericLiteral(STNode node) {
switch (node.kind) {
case NUMERIC_LITERAL:
return true;
default:
return false;
}
}
private STNode parseTupleTypeDescOrExprStartsWithOpenBracket() {
startContext(ParserRuleContext.BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
List<STNode> members = new ArrayList<>();
STNode memberEnd;
while (!isEndOfListConstructor(peek().kind)) {
STNode expr = parseTypeDescOrExpr();
if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN && isDefiniteTypeDesc(expr.kind)) {
STNode ellipsis = consume();
expr = STNodeFactory.createRestDescriptorNode(expr, ellipsis);
}
members.add(expr);
memberEnd = parseBracketedListMemberEnd();
if (memberEnd == null) {
break;
}
members.add(memberEnd);
}
STNode memberNodes = STNodeFactory.createNodeList(members);
STNode closeBracket = parseCloseBracket();
endContext();
return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberNodes, closeBracket);
}
/**
* Parse binding-patterns.
* <p>
* <code>
* binding-pattern := capture-binding-pattern
* | wildcard-binding-pattern
* | list-binding-pattern
* | mapping-binding-pattern
* | functional-binding-pattern
* <br/><br/>
* <p>
* capture-binding-pattern := variable-name
* variable-name := identifier
* <br/><br/>
* <p>
* wildcard-binding-pattern := _
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* <p>
* mapping-binding-pattern := { field-binding-patterns }
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/>
* field-binding-pattern := field-name : binding-pattern | variable-name
* <br/>
* rest-binding-pattern := ... variable-name
* <p>
* <br/><br/>
* functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )
* <br/>
* arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]
* | other-arg-binding-patterns
* <br/>
* positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*
* <br/>
* positional-arg-binding-pattern := binding-pattern
* <br/>
* other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern]
* | [rest-binding-pattern]
* <br/>
* named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*
* <br/>
* named-arg-binding-pattern := arg-name = binding-pattern
* </code>
*
* @return binding-pattern node
*/
private STNode parseBindingPattern() {
switch (peek().kind) {
case OPEN_BRACKET_TOKEN:
return parseListBindingPattern();
case IDENTIFIER_TOKEN:
return parseBindingPatternStartsWithIdentifier();
case OPEN_BRACE_TOKEN:
return parseMappingBindingPattern();
case ERROR_KEYWORD:
return parseErrorBindingPattern();
default:
recover(peek(), ParserRuleContext.BINDING_PATTERN);
return parseBindingPattern();
}
}
private STNode parseBindingPatternStartsWithIdentifier() {
STNode argNameOrBindingPattern =
parseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);
STToken secondToken = peek();
if (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {
startContext(ParserRuleContext.ERROR_BINDING_PATTERN);
STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,
ParserRuleContext.ERROR_KEYWORD);
return parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern);
}
if (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) {
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);
identifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern);
return createCaptureOrWildcardBP(identifier);
}
return createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name);
}
private STNode createCaptureOrWildcardBP(STNode varName) {
STNode bindingPattern;
if (isWildcardBP(varName)) {
bindingPattern = getWildcardBindingPattern(varName);
} else {
bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName);
}
return bindingPattern;
}
/**
* Parse list-binding-patterns.
* <p>
* <code>
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* </code>
*
* @return list-binding-pattern node
*/
private STNode parseListBindingPattern() {
startContext(ParserRuleContext.LIST_BINDING_PATTERN);
STNode openBracket = parseOpenBracket();
List<STNode> bindingPatternsList = new ArrayList<>();
STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList);
endContext();
return listBindingPattern;
}
private STNode parseListBindingPattern(STNode openBracket, List<STNode> bindingPatternsList) {
if (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) {
STNode closeBracket = parseCloseBracket();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList);
return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);
}
STNode listBindingPatternMember = parseListBindingPatternMember();
bindingPatternsList.add(listBindingPatternMember);
STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList);
return listBindingPattern;
}
private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List<STNode> bindingPatterns) {
STNode member = firstMember;
STToken token = peek();
STNode listBindingPatternRhs = null;
while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {
listBindingPatternRhs = parseListBindingPatternMemberRhs();
if (listBindingPatternRhs == null) {
break;
}
bindingPatterns.add(listBindingPatternRhs);
member = parseListBindingPatternMember();
bindingPatterns.add(member);
token = peek();
}
STNode closeBracket = parseCloseBracket();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);
return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);
}
private STNode parseListBindingPatternMemberRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END);
return parseListBindingPatternMemberRhs();
}
}
private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse list-binding-pattern member.
* <p>
* <code>
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* </code>
*
* @return List binding pattern member
*/
private STNode parseListBindingPatternMember() {
switch (peek().kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
case OPEN_BRACKET_TOKEN:
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseBindingPattern();
default:
recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER);
return parseListBindingPatternMember();
}
}
/**
* Parse rest binding pattern.
* <p>
* <code>
* rest-binding-pattern := ... variable-name
* </code>
*
* @return Rest binding pattern node
*/
private STNode parseRestBindingPattern() {
startContext(ParserRuleContext.REST_BINDING_PATTERN);
STNode ellipsis = parseEllipsis();
STNode varName = parseVariableName();
endContext();
STSimpleNameReferenceNode simpleNameReferenceNode =
(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName);
return STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode);
}
/**
* Parse Typed-binding-pattern.
* <p>
* <code>
* typed-binding-pattern := inferable-type-descriptor binding-pattern
* <br/><br/>
* inferable-type-descriptor := type-descriptor | var
* </code>
*
* @return Typed binding pattern node
*/
private STNode parseTypedBindingPattern(ParserRuleContext context) {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypedBindingPattern(typeDescQualifiers, context);
}
private STNode parseTypedBindingPattern(List<STNode> qualifiers, ParserRuleContext context) {
STNode typeDesc = parseTypeDescriptor(qualifiers,
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false);
STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context);
return typeBindingPattern;
}
/**
* Parse mapping-binding-patterns.
* <p>
* <code>
* mapping-binding-pattern := { field-binding-patterns }
* <br/><br/>
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* field-binding-pattern := field-name : binding-pattern | variable-name
* </code>
*
* @return mapping-binding-pattern node
*/
private STNode parseMappingBindingPattern() {
startContext(ParserRuleContext.MAPPING_BINDING_PATTERN);
STNode openBrace = parseOpenBrace();
STToken token = peek();
if (isEndOfMappingBindingPattern(token.kind)) {
STNode closeBrace = parseCloseBrace();
STNode bindingPatternsNode = STNodeFactory.createEmptyNodeList();
endContext();
return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);
}
List<STNode> bindingPatterns = new ArrayList<>();
STNode prevMember = parseMappingBindingPatternMember();
if (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {
bindingPatterns.add(prevMember);
}
return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember);
}
private STNode parseMappingBindingPattern(STNode openBrace, List<STNode> bindingPatterns, STNode prevMember) {
STToken token = peek();
STNode mappingBindingPatternRhs = null;
while (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {
mappingBindingPatternRhs = parseMappingBindingPatternEnd();
if (mappingBindingPatternRhs == null) {
break;
}
bindingPatterns.add(mappingBindingPatternRhs);
prevMember = parseMappingBindingPatternMember();
if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {
break;
}
bindingPatterns.add(prevMember);
token = peek();
}
if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {
bindingPatterns.add(prevMember);
}
STNode closeBrace = parseCloseBrace();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);
endContext();
return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);
}
/**
* Parse mapping-binding-pattern entry.
* <p>
* <code>
* mapping-binding-pattern := { field-binding-patterns }
* <br/><br/>
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* field-binding-pattern := field-name : binding-pattern
* | variable-name
* </code>
*
* @return mapping-binding-pattern node
*/
private STNode parseMappingBindingPatternMember() {
STToken token = peek();
switch (token.kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
default:
return parseFieldBindingPattern();
}
}
private STNode parseMappingBindingPatternEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.MAPPING_BINDING_PATTERN_END);
return parseMappingBindingPatternEnd();
}
}
/**
* Parse field-binding-pattern.
* <code>field-binding-pattern := field-name : binding-pattern | varname</code>
*
* @return field-binding-pattern node
*/
private STNode parseFieldBindingPattern() {
switch (peek().kind) {
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME);
STNode fieldBindingPattern = parseFieldBindingPattern(identifier);
return fieldBindingPattern;
default:
recover(peek(), ParserRuleContext.FIELD_BINDING_PATTERN_NAME);
return parseFieldBindingPattern();
}
}
private STNode parseFieldBindingPattern(STNode identifier) {
STNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier);
if (peek().kind != SyntaxKind.COLON_TOKEN) {
return STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference);
}
STNode colon = parseColon();
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern);
}
private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) {
return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN || endOfModuleLevelNode(1);
}
private STNode parseErrorTypeDescOrErrorBP(STNode annots) {
STToken nextNextToken = peek(2);
switch (nextNextToken.kind) {
case OPEN_PAREN_TOKEN:
return parseAsErrorBindingPattern();
case LT_TOKEN:
return parseAsErrorTypeDesc(annots);
case IDENTIFIER_TOKEN:
SyntaxKind nextNextNextTokenKind = peek(3).kind;
if (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN ||
nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {
return parseAsErrorBindingPattern();
}
default:
return parseAsErrorTypeDesc(annots);
}
}
private STNode parseAsErrorBindingPattern() {
startContext(ParserRuleContext.ASSIGNMENT_STMT);
return parseAssignmentStmtRhs(parseErrorBindingPattern());
}
private STNode parseAsErrorTypeDesc(STNode annots) {
STNode finalKeyword = STNodeFactory.createEmptyNode();
return parseVariableDecl(getAnnotations(annots), finalKeyword);
}
/**
* Parse error binding pattern node.
* <p>
* <code>error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )</code>
* <br/><br/>
* error-arg-list-binding-pattern :=
* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]
* | [error-field-binding-patterns]
* <br/><br/>
* error-message-binding-pattern := simple-binding-pattern
* <br/><br/>
* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern
* <br/><br/>
* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern
* <br/><br/>
* error-field-binding-patterns :=
* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]
* | rest-binding-pattern
* <br/><br/>
* named-arg-binding-pattern := arg-name = binding-pattern
*
* @return Error binding pattern node.
*/
private STNode parseErrorBindingPattern() {
startContext(ParserRuleContext.ERROR_BINDING_PATTERN);
STNode errorKeyword = parseErrorKeyword();
return parseErrorBindingPattern(errorKeyword);
}
private STNode parseErrorBindingPattern(STNode errorKeyword) {
STToken nextToken = peek();
STNode typeRef;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
typeRef = STNodeFactory.createEmptyNode();
break;
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
typeRef = parseTypeReference();
break;
}
recover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS);
return parseErrorBindingPattern(errorKeyword);
}
return parseErrorBindingPattern(errorKeyword, typeRef);
}
private STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) {
STNode openParenthesis = parseOpenParenthesis();
STNode argListBindingPatterns = parseErrorArgListBindingPatterns();
STNode closeParenthesis = parseCloseParenthesis();
endContext();
return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis,
argListBindingPatterns, closeParenthesis);
}
/**
* Parse error arg list binding pattern.
* <p>
* <code>
* error-arg-list-binding-pattern :=
* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]
* | [error-field-binding-patterns]
* <br/><br/>
* <p>
* error-message-binding-pattern := simple-binding-pattern
* <br/><br/>
* <p>
* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern
* <br/><br/>
* <p>
* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern
* <br/><br/>
* <p>
* error-field-binding-patterns :=
* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]
* | rest-binding-pattern
* <br/><br/>
* <p>
* named-arg-binding-pattern := arg-name = binding-pattern
* </code>
*
* @return Error arg list binding patterns.
*/
private STNode parseErrorArgListBindingPatterns() {
List<STNode> argListBindingPatterns = new ArrayList<>();
if (isEndOfErrorFieldBindingPatterns()) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
return parseErrorArgListBindingPatterns(argListBindingPatterns);
}
private STNode parseErrorArgListBindingPatterns(List<STNode> argListBindingPatterns) {
STNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START, true);
if (firstArg == null) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
switch (firstArg.kind) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);
case ERROR_BINDING_PATTERN:
STNode missingIdentifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode missingErrorMsgBP = STNodeFactory.createCaptureBindingPatternNode(missingIdentifier);
missingErrorMsgBP = SyntaxErrors.addDiagnostic(missingErrorMsgBP,
DiagnosticErrorCode.ERROR_MISSING_ERROR_MESSAGE_BINDING_PATTERN);
STNode missingComma = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.COMMA_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_COMMA_TOKEN);
argListBindingPatterns.add(missingErrorMsgBP);
argListBindingPatterns.add(missingComma);
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);
case REST_BINDING_PATTERN:
case NAMED_ARG_BINDING_PATTERN:
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);
default:
addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);
return parseErrorArgListBindingPatterns(argListBindingPatterns);
}
}
private STNode parseErrorArgListBPWithoutErrorMsg(List<STNode> argListBindingPatterns) {
STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END);
if (argEnd == null) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
STNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS, false);
assert secondArg != null;
switch (secondArg.kind) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
case ERROR_BINDING_PATTERN:
case REST_BINDING_PATTERN:
case NAMED_ARG_BINDING_PATTERN:
argListBindingPatterns.add(argEnd);
argListBindingPatterns.add(secondArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, secondArg.kind);
default:
updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg,
DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);
return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);
}
}
private STNode parseErrorArgListBPWithoutErrorMsgAndCause(List<STNode> argListBindingPatterns,
SyntaxKind lastValidArgKind) {
while (!isEndOfErrorFieldBindingPatterns()) {
STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END);
if (argEnd == null) {
break;
}
STNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN, false);
assert currentArg != null;
DiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind);
if (errorCode == null) {
argListBindingPatterns.add(argEnd);
argListBindingPatterns.add(currentArg);
lastValidArgKind = currentArg.kind;
} else if (argListBindingPatterns.size() == 0) {
addInvalidNodeToNextToken(argEnd, null);
addInvalidNodeToNextToken(currentArg, errorCode);
} else {
updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode);
}
}
return STNodeFactory.createNodeList(argListBindingPatterns);
}
private boolean isEndOfErrorFieldBindingPatterns() {
SyntaxKind nextTokenKind = peek().kind;
switch (nextTokenKind) {
case CLOSE_PAREN_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) {
switch (peek().kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), currentCtx);
return parseErrorArgsBindingPatternEnd(currentCtx);
}
}
private STNode parseErrorArgListBindingPattern(ParserRuleContext context, boolean isFirstArg) {
switch (peek().kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
case IDENTIFIER_TOKEN:
STNode argNameOrSimpleBindingPattern = consume();
return parseNamedOrSimpleArgBindingPattern(argNameOrSimpleBindingPattern);
case OPEN_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseBindingPattern();
case CLOSE_PAREN_TOKEN:
if (isFirstArg) {
return null;
}
default:
recover(peek(), context);
return parseErrorArgListBindingPattern(context, isFirstArg);
}
}
private STNode parseNamedOrSimpleArgBindingPattern(STNode argNameOrSimpleBindingPattern) {
STToken secondToken = peek();
switch (secondToken.kind) {
case EQUAL_TOKEN:
STNode equal = consume();
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern,
equal, bindingPattern);
case COMMA_TOKEN:
case CLOSE_PAREN_TOKEN:
default:
return createCaptureOrWildcardBP(argNameOrSimpleBindingPattern);
}
}
private DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind,
SyntaxKind currentArgKind) {
switch (currentArgKind) {
case NAMED_ARG_BINDING_PATTERN:
case REST_BINDING_PATTERN:
if (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) {
return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;
}
return null;
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
case ERROR_BINDING_PATTERN:
case LIST_BINDING_PATTERN:
case MAPPING_BINDING_PATTERN:
default:
return DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED;
}
}
/*
* This parses Typed binding patterns and deals with ambiguity between types,
* and binding patterns. An example is 'T[a]'.
* The ambiguity lies in between:
* 1) Array Type
* 2) List binding pattern
* 3) Member access expression.
*/
/**
* Parse the component after the type-desc, of a typed-binding-pattern.
*
* @param typeDesc Starting type-desc of the typed-binding-pattern
* @return Typed-binding pattern
*/
private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) {
return parseTypedBindingPatternTypeRhs(typeDesc, context, true);
}
private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) {
switch (peek().kind) {
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
case OPEN_BRACKET_TOKEN:
STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context);
assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN;
return typedBindingPattern;
case CLOSE_PAREN_TOKEN:
case COMMA_TOKEN:
case CLOSE_BRACKET_TOKEN:
case CLOSE_BRACE_TOKEN:
if (!isRoot) {
return typeDesc;
}
default:
recover(peek(), ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS, typeDesc, context, isRoot);
return parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot);
}
}
/**
* Parse typed-binding pattern with list, array-type-desc, or member-access-expr.
*
* @param typeDescOrExpr Type desc or the expression at the start
* @param isTypedBindingPattern Is this is a typed-binding-pattern. If this is `false`, then it's still ambiguous
* @return Parsed node
*/
private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern,
boolean allowAssignment, ParserRuleContext context) {
startContext(ParserRuleContext.BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
if (isBracketedListEnd(peek().kind)) {
return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context);
}
STNode member = parseBracketedListMember(isTypedBindingPattern);
SyntaxKind currentNodeType = getBracketedListNodeType(member, isTypedBindingPattern);
switch (currentNodeType) {
case ARRAY_TYPE_DESC:
STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context);
return typedBindingPattern;
case LIST_BINDING_PATTERN:
STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false);
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
case INDEXED_EXPRESSION:
return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member);
case ARRAY_TYPE_DESC_OR_MEMBER_ACCESS:
break;
case NONE:
default:
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd != null) {
List<STNode> memberList = new ArrayList<>();
memberList.add(getBindingPattern(member));
memberList.add(memberEnd);
bindingPattern = parseAsListBindingPattern(openBracket, memberList);
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
}
}
STNode closeBracket = parseCloseBracket();
endContext();
return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern, allowAssignment, context);
}
private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) {
member = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true);
STNode closeBracket = parseCloseBracket();
endContext();
STNode keyExpr = STNodeFactory.createNodeList(member);
STNode memberAccessExpr =
STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket);
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false);
}
private boolean isBracketedListEnd(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse a member of an ambiguous bracketed list. This member could be:
* 1) Array length
* 2) Key expression of a member-access-expr
* 3) A member-binding pattern of a list-binding-pattern.
*
* @param isTypedBindingPattern Is this in a definite typed-binding pattern
* @return Parsed member node
*/
private STNode parseBracketedListMember(boolean isTypedBindingPattern) {
STToken nextToken = peek();
switch (nextToken.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case ASTERISK_TOKEN:
case STRING_LITERAL_TOKEN:
return parseBasicLiteral();
case CLOSE_BRACKET_TOKEN:
return STNodeFactory.createEmptyNode();
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
case ELLIPSIS_TOKEN:
case OPEN_BRACKET_TOKEN:
return parseStatementStartBracketedListMember();
case IDENTIFIER_TOKEN:
if (isTypedBindingPattern) {
return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
}
break;
default:
if ((!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) ||
isQualifiedIdentifierPredeclaredPrefix(nextToken.kind)) {
break;
}
ParserRuleContext recoverContext =
isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH
: ParserRuleContext.BRACKETED_LIST_MEMBER;
recover(peek(), recoverContext, isTypedBindingPattern);
return parseBracketedListMember(isTypedBindingPattern);
}
STNode expr = parseExpression();
if (isWildcardBP(expr)) {
return getWildcardBindingPattern(expr);
}
return expr;
}
/**
* Treat the current node as an array, and parse the remainder of the binding pattern.
*
* @param typeDesc Type-desc
* @param openBracket Open bracket
* @param member Member
* @return Parsed node
*/
private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) {
typeDesc = getTypeDescFromExpr(typeDesc);
switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);
STNode closeBracket = parseCloseBracket();
endContext();
endContext();
return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true,
context);
}
private STNode parseBracketedListMemberEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END);
return parseBracketedListMemberEnd();
}
}
/**
* We reach here to break ambiguity of T[a]. This could be:
* 1) Array Type Desc
* 2) Member access on LHS
* 3) Typed-binding-pattern
*
* @param typeDescOrExpr Type name or the expr that precede the open-bracket.
* @param openBracket Open bracket
* @param member Member
* @param closeBracket Open bracket
* @param isTypedBindingPattern Is this is a typed-binding-pattern.
* @return Specific node that matches to T[a], after solving ambiguity.
*/
private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket, boolean isTypedBindingPattern,
boolean allowAssignment, ParserRuleContext context) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);
case OPEN_BRACKET_TOKEN:
if (isTypedBindingPattern) {
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
arrayTypeDesc = createArrayTypeDesc(typeDesc, openBracket, member, closeBracket);
return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);
}
STNode keyExpr = getKeyExpr(member);
STNode expr =
STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);
return parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context);
case QUESTION_MARK_TOKEN:
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
typeDesc = parseComplexTypeDescriptor(arrayTypeDesc,
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
return parseTypedBindingPatternTypeRhs(typeDesc, context);
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
return parseComplexTypeDescInTypedBPOrExprRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern);
case IN_KEYWORD:
if (context != ParserRuleContext.FOREACH_STMT && context != ParserRuleContext.FROM_CLAUSE) {
break;
}
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
case EQUAL_TOKEN:
if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {
break;
}
if (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) {
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
}
keyExpr = getKeyExpr(member);
typeDescOrExpr = getExpression(typeDescOrExpr);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);
case SEMICOLON_TOKEN:
if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {
break;
}
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
case CLOSE_BRACE_TOKEN:
case COMMA_TOKEN:
if (context == ParserRuleContext.AMBIGUOUS_STMT) {
keyExpr = getKeyExpr(member);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,
closeBracket);
}
default:
if (!isTypedBindingPattern && isValidExprRhsStart(nextToken.kind, closeBracket.kind)) {
keyExpr = getKeyExpr(member);
typeDescOrExpr = getExpression(typeDescOrExpr);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,
closeBracket);
}
break;
}
ParserRuleContext recoveryCtx = ParserRuleContext.BRACKETED_LIST_RHS;
if (isTypedBindingPattern) {
recoveryCtx = ParserRuleContext.TYPE_DESC_RHS_OR_BP_RHS;
}
recover(peek(), recoveryCtx, typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern, allowAssignment, context);
return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern, allowAssignment, context);
}
private STNode getKeyExpr(STNode member) {
if (member == null) {
STToken keyIdentifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR);
STNode missingVarRef = STNodeFactory.createSimpleNameReferenceNode(keyIdentifier);
return STNodeFactory.createNodeList(missingVarRef);
}
return STNodeFactory.createNodeList(member);
}
private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket) {
STNode bindingPatterns = STNodeFactory.createEmptyNodeList();
if (!isEmpty(member)) {
SyntaxKind memberKind = member.kind;
if (memberKind == SyntaxKind.NUMERIC_LITERAL || memberKind == SyntaxKind.ASTERISK_LITERAL) {
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
STToken identifierToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_VARIABLE_NAME);
STNode variableName = STNodeFactory.createCaptureBindingPatternNode(identifierToken);
return STNodeFactory.createTypedBindingPatternNode(arrayTypeDesc, variableName);
}
if (member.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
openBracket = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracket, member,
DiagnosticErrorCode.ERROR_FIELD_BP_INSIDE_LIST_BP);
} else {
STNode bindingPattern = getBindingPattern(member);
bindingPatterns = STNodeFactory.createNodeList(bindingPattern);
}
}
STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, closeBracket);
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
}
/**
* Parse a union or intersection type-desc/binary-expression that involves ambiguous
* bracketed list in lhs.
* <p>
* e.g: <code>(T[a] & R..)</code> or <code>(T[a] | R.. )</code>
* <p>
* Complexity occurs in scenarios such as <code>T[a] |/& R[b]</code>. If the token after this
* is another binding-pattern, then <code>(T[a] |/& R[b])</code> becomes the type-desc. However,
* if the token follows this is an equal or semicolon, then <code>(T[a] |/& R)</code> becomes
* the type-desc, and <code>[b]</code> becomes the binding pattern.
*
* @param typeDescOrExpr Type desc or the expression
* @param openBracket Open bracket
* @param member Member
* @param closeBracket Close bracket
* @return Parsed node
*/
private STNode parseComplexTypeDescInTypedBPOrExprRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket, boolean isTypedBindingPattern) {
STNode pipeOrAndToken = parseUnionOrIntersectionToken();
STNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false);
if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);
lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);
STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr;
STNode newTypeDesc;
if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) {
newTypeDesc = createUnionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor);
} else {
newTypeDesc =
createIntersectionTypeDesc(lhsTypeDesc, pipeOrAndToken, rhsTypedBindingPattern.typeDescriptor);
}
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern);
}
if (isTypedBindingPattern) {
STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);
lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);
return createCaptureBPWithMissingVarName(lhsTypeDesc, pipeOrAndToken, typedBindingPatternOrExpr);
}
STNode keyExpr = getExpression(member);
STNode containerExpr = getExpression(typeDescOrExpr);
STNode lhsExpr =
STNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket);
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken,
typedBindingPatternOrExpr);
}
private STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) {
if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;
STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc);
lhsTypeDesc = createUnionTypeDesc(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);
} else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc;
STNode middleTypeDesc =
getArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc);
lhsTypeDesc = createIntersectionTypeDesc(intersectionTypeDesc.leftTypeDesc,
intersectionTypeDesc.bitwiseAndToken, middleTypeDesc);
} else {
lhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket);
}
return lhsTypeDesc;
}
/**
* Parse union (|) or intersection (&) type operator.
*
* @return pipe or bitwise and token
*/
private STNode parseUnionOrIntersectionToken() {
STToken token = peek();
if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN);
return parseUnionOrIntersectionToken();
}
}
/**
* Infer the type of the ambiguous bracketed list, based on the type of the member.
*
* @param memberNode Member node
* @return Inferred type of the bracketed list
*/
private SyntaxKind getBracketedListNodeType(STNode memberNode, boolean isTypedBindingPattern) {
if (isEmpty(memberNode)) {
return SyntaxKind.NONE;
}
if (isDefiniteTypeDesc(memberNode.kind)) {
return SyntaxKind.TUPLE_TYPE_DESC;
}
switch (memberNode.kind) {
case ASTERISK_LITERAL:
return SyntaxKind.ARRAY_TYPE_DESC;
case CAPTURE_BINDING_PATTERN:
case LIST_BINDING_PATTERN:
case REST_BINDING_PATTERN:
case MAPPING_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
return SyntaxKind.LIST_BINDING_PATTERN;
case QUALIFIED_NAME_REFERENCE:
case REST_TYPE:
return SyntaxKind.TUPLE_TYPE_DESC;
case NUMERIC_LITERAL:
if (isTypedBindingPattern) {
return SyntaxKind.ARRAY_TYPE_DESC;
}
return SyntaxKind.ARRAY_TYPE_DESC_OR_MEMBER_ACCESS;
case SIMPLE_NAME_REFERENCE:
case BRACKETED_LIST:
case MAPPING_BP_OR_MAPPING_CONSTRUCTOR:
return SyntaxKind.NONE;
case ERROR_CONSTRUCTOR:
if (isPossibleErrorBindingPattern((STErrorConstructorExpressionNode) memberNode)) {
return SyntaxKind.NONE;
}
return SyntaxKind.INDEXED_EXPRESSION;
default:
if (isTypedBindingPattern) {
return SyntaxKind.NONE;
}
return SyntaxKind.INDEXED_EXPRESSION;
}
}
/*
* This section tries to break the ambiguity in parsing a statement that starts with a open-bracket.
* The ambiguity lies in between:
* 1) Assignment that starts with list binding pattern
* 2) Var-decl statement that starts with tuple type
* 3) Statement that starts with list constructor, such as sync-send, etc.
*/
/**
* Parse any statement that starts with an open-bracket.
*
* @param annots Annotations attached to the statement.
* @return Parsed node
*/
private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) {
startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT);
return parseStatementStartsWithOpenBracket(annots, true, possibleMappingField);
}
private STNode parseMemberBracketedList(boolean possibleMappingField) {
STNode annots = STNodeFactory.createEmptyNodeList();
return parseStatementStartsWithOpenBracket(annots, false, possibleMappingField);
}
/**
* The bracketed list at the start of a statement can be one of the following.
* 1) List binding pattern
* 2) Tuple type
* 3) List constructor
*
* @param isRoot Is this the root of the list
* @return Parsed node
*/
private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) {
startContext(ParserRuleContext.STMT_START_BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
List<STNode> memberList = new ArrayList<>();
while (!isBracketedListEnd(peek().kind)) {
STNode member = parseStatementStartBracketedListMember();
SyntaxKind currentNodeType = getStmtStartBracketedListType(member);
switch (currentNodeType) {
case TUPLE_TYPE_DESC:
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case LIST_BINDING_PATTERN:
return parseAsListBindingPattern(openBracket, memberList, member, isRoot);
case LIST_CONSTRUCTOR:
return parseAsListConstructor(openBracket, memberList, member, isRoot);
case LIST_BP_OR_LIST_CONSTRUCTOR:
return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC_OR_LIST_CONST:
return parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot);
case NONE:
default:
memberList.add(member);
break;
}
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd == null) {
break;
}
memberList.add(memberEnd);
}
STNode closeBracket = parseCloseBracket();
STNode bracketedList = parseStatementStartBracketedListRhs(annots, openBracket, memberList, closeBracket,
isRoot, possibleMappingField);
return bracketedList;
}
/**
* Parse a member of a list-binding-pattern, tuple-type-desc, or
* list-constructor-expr, when the parent is ambiguous.
*
* @return Parsed node
*/
private STNode parseStatementStartBracketedListMember() {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseStatementStartBracketedListMember(typeDescQualifiers);
}
private STNode parseStatementStartBracketedListMember(List<STNode> qualifiers) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseMemberBracketedList(false);
case IDENTIFIER_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
if (isWildcardBP(identifier)) {
STNode varName = ((STSimpleNameReferenceNode) identifier).name;
return getWildcardBindingPattern(varName);
}
nextToken = peek();
if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {
STNode ellipsis = parseEllipsis();
return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);
}
if (nextToken.kind != SyntaxKind.OPEN_BRACKET_TOKEN && isValidTypeContinuationToken(nextToken)) {
return parseComplexTypeDescriptor(identifier, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true);
case OPEN_BRACE_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseMappingBindingPatterOrMappingConstructor();
case ERROR_KEYWORD:
reportInvalidQualifierList(qualifiers);
STToken nextNextToken = getNextNextToken();
if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||
nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return parseErrorBindingPatternOrErrorConstructor();
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case ELLIPSIS_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseListBindingPatternMember();
case XML_KEYWORD:
case STRING_KEYWORD:
reportInvalidQualifierList(qualifiers);
if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {
return parseExpression(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case TABLE_KEYWORD:
case STREAM_KEYWORD:
reportInvalidQualifierList(qualifiers);
if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
return parseExpression(false);
case OPEN_PAREN_TOKEN:
return parseTypeDescOrExpr(qualifiers);
case FUNCTION_KEYWORD:
return parseAnonFuncExprOrFuncTypeDesc(qualifiers);
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseExpression(false);
}
if (isTypeStartingToken(nextToken.kind)) {
return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER, qualifiers);
return parseStatementStartBracketedListMember(qualifiers);
}
}
private STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList,
STNode member, boolean isRoot) {
memberList.add(member);
STNode memberEnd = parseBracketedListMemberEnd();
STNode tupleTypeDescOrListCons;
if (memberEnd == null) {
STNode closeBracket = parseCloseBracket();
tupleTypeDescOrListCons =
parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);
} else {
memberList.add(memberEnd);
tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot);
}
return tupleTypeDescOrListCons;
}
/**
* Parse tuple type desc or list constructor.
*
* @return Parsed node
*/
private STNode parseTupleTypeDescOrListConstructor(STNode annots) {
startContext(ParserRuleContext.BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
List<STNode> memberList = new ArrayList<>();
return parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false);
}
private STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList,
boolean isRoot) {
STToken nextToken = peek();
while (!isBracketedListEnd(nextToken.kind)) {
STNode member = parseTupleTypeDescOrListConstructorMember(annots);
SyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member);
switch (currentNodeType) {
case LIST_CONSTRUCTOR:
return parseAsListConstructor(openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC:
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC_OR_LIST_CONST:
default:
memberList.add(member);
break;
}
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd == null) {
break;
}
memberList.add(memberEnd);
nextToken = peek();
}
STNode closeBracket = parseCloseBracket();
return parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);
}
private STNode parseTupleTypeDescOrListConstructorMember(STNode annots) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
return parseTupleTypeDescOrListConstructor(annots);
case IDENTIFIER_TOKEN:
STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) {
STNode ellipsis = parseEllipsis();
return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);
}
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false);
case OPEN_BRACE_TOKEN:
return parseMappingConstructorExpr();
case ERROR_KEYWORD:
STToken nextNextToken = getNextNextToken();
if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||
nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return parseErrorConstructorExpr(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case XML_KEYWORD:
case STRING_KEYWORD:
if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {
return parseExpression(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case TABLE_KEYWORD:
case STREAM_KEYWORD:
if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
return parseExpression(false);
case OPEN_PAREN_TOKEN:
return parseTypeDescOrExpr();
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
return parseExpression(false);
}
if (isTypeStartingToken(nextToken.kind)) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER, annots);
return parseTupleTypeDescOrListConstructorMember(annots);
}
}
private SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) {
return getStmtStartBracketedListType(memberNode);
}
private STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List<STNode> members, STNode closeBracket,
boolean isRoot) {
STNode tupleTypeOrListConst;
switch (peek().kind) {
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
if (!isRoot) {
endContext();
return new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members,
closeBracket);
}
default:
if (isValidExprRhsStart(peek().kind, closeBracket.kind) ||
(isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) {
members = getExpressionList(members);
STNode memberExpressions = STNodeFactory.createNodeList(members);
tupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket,
memberExpressions, closeBracket);
break;
}
STNode memberTypeDescs = STNodeFactory.createNodeList(getTypeDescList(members));
STNode tupleTypeDesc =
STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);
tupleTypeOrListConst =
parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
endContext();
if (!isRoot) {
return tupleTypeOrListConst;
}
STNode annots = STNodeFactory.createEmptyNodeList();
return parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot);
} | class member, object member or object member descriptor.
* </p>
* <code>
* class-member := object-field | method-defn | object-type-inclusion
* <br/>
* object-member := object-field | method-defn
* <br/>
* object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion
* </code>
*
* @param context Parsing context of the object member
* @return Parsed node
*/
private STNode parseObjectMember(ParserRuleContext context) {
STNode metadata;
STToken nextToken = peek();
switch (nextToken.kind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return null;
case ASTERISK_TOKEN:
case PUBLIC_KEYWORD:
case PRIVATE_KEYWORD:
case FINAL_KEYWORD:
case REMOTE_KEYWORD:
case FUNCTION_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case ISOLATED_KEYWORD:
case RESOURCE_KEYWORD:
metadata = STNodeFactory.createEmptyNode();
break;
case DOCUMENTATION_STRING:
case AT_TOKEN:
metadata = parseMetaData();
break;
default:
if (isTypeStartingToken(nextToken.kind)) {
metadata = STNodeFactory.createEmptyNode();
break;
}
ParserRuleContext recoveryCtx;
if (context == ParserRuleContext.OBJECT_MEMBER) {
recoveryCtx = ParserRuleContext.OBJECT_MEMBER_START;
} else {
recoveryCtx = ParserRuleContext.CLASS_MEMBER_START;
}
recover(peek(), recoveryCtx);
return parseObjectMember(context);
}
return parseObjectMemberWithoutMeta(metadata, context);
} | class member, object member or object member descriptor.
* </p>
* <code>
* class-member := object-field | method-defn | object-type-inclusion
* <br/>
* object-member := object-field | method-defn
* <br/>
* object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion
* </code>
*
* @param context Parsing context of the object member
* @return Parsed node
*/
private STNode parseObjectMember(ParserRuleContext context) {
STNode metadata;
STToken nextToken = peek();
switch (nextToken.kind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return null;
case ASTERISK_TOKEN:
case PUBLIC_KEYWORD:
case PRIVATE_KEYWORD:
case FINAL_KEYWORD:
case REMOTE_KEYWORD:
case FUNCTION_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case ISOLATED_KEYWORD:
case RESOURCE_KEYWORD:
metadata = STNodeFactory.createEmptyNode();
break;
case DOCUMENTATION_STRING:
case AT_TOKEN:
metadata = parseMetaData();
break;
default:
if (isTypeStartingToken(nextToken.kind)) {
metadata = STNodeFactory.createEmptyNode();
break;
}
ParserRuleContext recoveryCtx;
if (context == ParserRuleContext.OBJECT_MEMBER) {
recoveryCtx = ParserRuleContext.OBJECT_MEMBER_START;
} else {
recoveryCtx = ParserRuleContext.CLASS_MEMBER_START;
}
recover(peek(), recoveryCtx);
return parseObjectMember(context);
}
return parseObjectMemberWithoutMeta(metadata, context);
} |
Not this particular one, that was just an example of one having a comma (taken from a non-hosted app). Maybe we should disallow `-Xrunjdwp:transport` completely in hosted? Right now we warn or fail deployment for hosted (depending on feature flag value) and warn for non-hosted for JVM options like these. A JVM option like the above is perfectly valid in non-hosted. If we only validate JVM options for hosted this PR is not necessary | public void requireThatJvmOptionsAreLogged() throws IOException, SAXException {
verifyLoggingOfJvmOptions(true,
"options",
"-Xms2G foo bar",
"foo", "bar");
verifyLoggingOfJvmOptions(true,
"options",
"$(touch /tmp/hello-from-gc-options)",
"$(touch", "/tmp/hello-from-gc-options)");
verifyLoggingOfJvmOptions(false,
"options",
"$(touch /tmp/hello-from-gc-options)",
"$(touch", "/tmp/hello-from-gc-options)");
verifyLoggingOfJvmOptions(true, "options", "-Xms2G");
verifyLoggingOfJvmOptions(true, "options", "-verbose:gc");
verifyLoggingOfJvmOptions(true, "options", "-Djava.library.path=/opt/vespa/lib64:/home/y/lib64 -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005");
verifyLoggingOfJvmOptions(false, "options", "-Xms2G");
} | verifyLoggingOfJvmOptions(true, "options", "-Djava.library.path=/opt/vespa/lib64:/home/y/lib64 -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005"); | public void requireThatJvmOptionsAreLogged() throws IOException, SAXException {
verifyLoggingOfJvmOptions(true,
"options",
"-Xms2G foo bar",
"foo", "bar");
verifyLoggingOfJvmOptions(true,
"options",
"$(touch /tmp/hello-from-gc-options)",
"$(touch", "/tmp/hello-from-gc-options)");
verifyLoggingOfJvmOptions(true,
"options",
"-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005",
"-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005");
verifyLoggingOfJvmOptions(false,
"options",
"$(touch /tmp/hello-from-gc-options)",
"$(touch", "/tmp/hello-from-gc-options)");
verifyLoggingOfJvmOptions(true, "options", "-Xms2G");
verifyLoggingOfJvmOptions(true, "options", "-verbose:gc");
verifyLoggingOfJvmOptions(true, "options", "-Djava.library.path=/opt/vespa/lib64:/home/y/lib64");
verifyLoggingOfJvmOptions(false, "options", "-Xms2G");
} | class JvmOptionsTest extends ContainerModelBuilderTestBase {
@Test
public void verify_jvm_tag_with_attributes() throws IOException, SAXException {
String servicesXml =
"<container version='1.0'>" +
" <search/>" +
" <nodes>" +
" <jvm options='-XX:SoftRefLRUPolicyMSPerMB=2500' gc-options='-XX:+UseParNewGC' allocated-memory='45%'/>" +
" <node hostalias='mockhost'/>" +
" </nodes>" +
"</container>";
ApplicationPackage applicationPackage = new MockApplicationPackage.Builder().withServices(servicesXml).build();
final TestLogger logger = new TestLogger();
VespaModel model = new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder()
.applicationPackage(applicationPackage)
.deployLogger(logger)
.build());
QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder();
model.getConfig(qrStartBuilder, "container/container.0");
QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder);
assertEquals("-XX:+UseParNewGC", qrStartConfig.jvm().gcopts());
assertEquals(45, qrStartConfig.jvm().heapSizeAsPercentageOfPhysicalMemory());
assertEquals("-XX:SoftRefLRUPolicyMSPerMB=2500", model.getContainerClusters().values().iterator().next().getContainers().get(0).getJvmOptions());
}
@Test
public void detect_conflicting_jvmgcoptions_in_jvmargs() {
assertFalse(ContainerModelBuilder.incompatibleGCOptions(""));
assertFalse(ContainerModelBuilder.incompatibleGCOptions("UseG1GC"));
assertTrue(ContainerModelBuilder.incompatibleGCOptions("-XX:+UseG1GC"));
assertTrue(ContainerModelBuilder.incompatibleGCOptions("abc -XX:+UseParNewGC xyz"));
assertTrue(ContainerModelBuilder.incompatibleGCOptions("-XX:CMSInitiatingOccupancyFraction=19"));
}
@Test
public void honours_jvm_gc_options() {
Element clusterElem = DomBuilderTest.parse(
"<container version='1.0'>",
" <search/>",
" <nodes jvm-gc-options='-XX:+UseG1GC'>",
" <node hostalias='mockhost'/>",
" </nodes>",
"</container>" );
createModel(root, clusterElem);
QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder();
root.getConfig(qrStartBuilder, "container/container.0");
QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder);
assertEquals("-XX:+UseG1GC", qrStartConfig.jvm().gcopts());
}
private static void verifyIgnoreJvmGCOptions(boolean isHosted) throws IOException, SAXException {
verifyIgnoreJvmGCOptionsIfJvmArgs("jvmargs", ContainerCluster.G1GC, isHosted);
verifyIgnoreJvmGCOptionsIfJvmArgs( "jvm-options", "-XX:+UseG1GC", isHosted);
}
private static void verifyIgnoreJvmGCOptionsIfJvmArgs(String jvmOptionsName, String expectedGC, boolean isHosted) throws IOException, SAXException {
String servicesXml =
"<container version='1.0'>" +
" <nodes jvm-gc-options='-XX:+UseG1GC' " + jvmOptionsName + "='-XX:+UseParNewGC'>" +
" <node hostalias='mockhost'/>" +
" </nodes>" +
"</container>";
ApplicationPackage applicationPackage = new MockApplicationPackage.Builder().withServices(servicesXml).build();
final TestLogger logger = new TestLogger();
VespaModel model = new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder()
.applicationPackage(applicationPackage)
.deployLogger(logger)
.properties(new TestProperties().setHostedVespa(isHosted))
.build());
QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder();
model.getConfig(qrStartBuilder, "container/container.0");
QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder);
assertEquals(expectedGC, qrStartConfig.jvm().gcopts());
}
@Test
public void ignores_jvmgcoptions_on_conflicting_jvmargs() throws IOException, SAXException {
verifyIgnoreJvmGCOptions(false);
verifyIgnoreJvmGCOptions(true);
}
private void verifyJvmGCOptions(boolean isHosted, String featureFlagDefault, String override, String expected) throws IOException, SAXException {
String servicesXml =
"<container version='1.0'>" +
" <nodes " + ((override == null) ? ">" : ("jvm-gc-options='" + override + "'>")) +
" <node hostalias='mockhost'/>" +
" </nodes>" +
"</container>";
ApplicationPackage applicationPackage = new MockApplicationPackage.Builder().withServices(servicesXml).build();
final TestLogger logger = new TestLogger();
VespaModel model = new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder()
.applicationPackage(applicationPackage)
.deployLogger(logger)
.properties(new TestProperties().setJvmGCOptions(featureFlagDefault).setHostedVespa(isHosted))
.build());
QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder();
model.getConfig(qrStartBuilder, "container/container.0");
QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder);
assertEquals(expected, qrStartConfig.jvm().gcopts());
}
@Test
public void requireThatJvmGCOptionsIsHonoured() throws IOException, SAXException {
verifyJvmGCOptions(false, null, null, ContainerCluster.G1GC);
verifyJvmGCOptions(true, null, null, ContainerCluster.PARALLEL_GC);
verifyJvmGCOptions(true, "", null, ContainerCluster.PARALLEL_GC);
verifyJvmGCOptions(false, "-XX:+UseG1GC", null, "-XX:+UseG1GC");
verifyJvmGCOptions(true, "-XX:+UseG1GC", null, "-XX:+UseG1GC");
verifyJvmGCOptions(false, null, "-XX:+UseG1GC", "-XX:+UseG1GC");
verifyJvmGCOptions(false, "-XX:+UseParallelGC", "-XX:+UseG1GC", "-XX:+UseG1GC");
verifyJvmGCOptions(false, null, "-XX:+UseParallelGC", "-XX:+UseParallelGC");
}
@Test
public void requireThatInvalidJvmGcOptionsAreLogged() throws IOException, SAXException {
verifyLoggingOfJvmGcOptions(true,
"-XX:+ParallelGCThreads=8 foo bar",
"foo", "bar");
verifyLoggingOfJvmGcOptions(true,
"-XX:+UseCMSInitiatingOccupancyOnly foo bar",
"-XX:+UseCMSInitiatingOccupancyOnly", "foo", "bar");
verifyLoggingOfJvmGcOptions(true,
"-XX:+UseConcMarkSweepGC",
"-XX:+UseConcMarkSweepGC");
verifyLoggingOfJvmGcOptions(true,
"$(touch /tmp/hello-from-gc-options)",
"$(touch", "/tmp/hello-from-gc-options)");
verifyLoggingOfJvmGcOptions(false,
"$(touch /tmp/hello-from-gc-options)",
"$(touch", "/tmp/hello-from-gc-options)");
verifyLoggingOfJvmGcOptions(true, "-XX:+ParallelGCThreads=8");
verifyLoggingOfJvmGcOptions(true, "-XX:MaxTenuringThreshold");
verifyLoggingOfJvmGcOptions(false, "-XX:+UseConcMarkSweepGC");
}
@Test
public void requireThatInvalidJvmGcOptionsFailDeployment() throws IOException, SAXException {
try {
buildModelWithJvmOptions(new TestProperties().setHostedVespa(true).failDeploymentWithInvalidJvmOptions(true),
new TestLogger(),
"gc-options",
"-XX:+ParallelGCThreads=8 foo bar");
fail();
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Invalid JVM GC options in services.xml: bar,foo"));
}
}
private void verifyLoggingOfJvmGcOptions(boolean isHosted, String override, String... invalidOptions) throws IOException, SAXException {
verifyLoggingOfJvmOptions(isHosted, "gc-options", override, invalidOptions);
}
private void verifyLoggingOfJvmOptions(boolean isHosted, String optionName, String override, String... invalidOptions) throws IOException, SAXException {
TestLogger logger = new TestLogger();
buildModelWithJvmOptions(isHosted, logger, optionName, override);
List<String> strings = Arrays.asList(invalidOptions.clone());
if (strings.isEmpty()) {
assertEquals(logger.msgs.size() > 0 ? logger.msgs.get(0).getSecond() : "", 0, logger.msgs.size());
return;
}
Collections.sort(strings);
Pair<Level, String> firstOption = logger.msgs.get(0);
assertEquals(Level.WARNING, firstOption.getFirst());
assertEquals("Invalid JVM " + (optionName.equals("gc-options") ? "GC " : "") +
"options in services.xml: " + String.join(",", strings), firstOption.getSecond());
}
private void buildModelWithJvmOptions(boolean isHosted, TestLogger logger, String optionName, String override) throws IOException, SAXException {
buildModelWithJvmOptions(new TestProperties().setHostedVespa(isHosted), logger, optionName, override);
}
private void buildModelWithJvmOptions(TestProperties properties, TestLogger logger, String optionName, String override) throws IOException, SAXException {
String servicesXml =
"<container version='1.0'>" +
" <nodes>" +
" <jvm " + optionName + "='" + override + "'/>" +
" <node hostalias='mockhost'/>" +
" </nodes>" +
"</container>";
ApplicationPackage app = new MockApplicationPackage.Builder().withServices(servicesXml).build();
new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder()
.applicationPackage(app)
.deployLogger(logger)
.properties(properties)
.build());
}
@Test
@Test
public void requireThatInvalidJvmOptionsFailDeployment() throws IOException, SAXException {
try {
buildModelWithJvmOptions(new TestProperties().setHostedVespa(true).failDeploymentWithInvalidJvmOptions(true),
new TestLogger(),
"options",
"-Xms2G foo bar");
fail();
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Invalid JVM options in services.xml: bar,foo"));
}
}
} | class JvmOptionsTest extends ContainerModelBuilderTestBase {
@Test
public void verify_jvm_tag_with_attributes() throws IOException, SAXException {
String servicesXml =
"<container version='1.0'>" +
" <search/>" +
" <nodes>" +
" <jvm options='-XX:SoftRefLRUPolicyMSPerMB=2500' gc-options='-XX:+UseParNewGC' allocated-memory='45%'/>" +
" <node hostalias='mockhost'/>" +
" </nodes>" +
"</container>";
ApplicationPackage applicationPackage = new MockApplicationPackage.Builder().withServices(servicesXml).build();
final TestLogger logger = new TestLogger();
VespaModel model = new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder()
.applicationPackage(applicationPackage)
.deployLogger(logger)
.build());
QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder();
model.getConfig(qrStartBuilder, "container/container.0");
QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder);
assertEquals("-XX:+UseParNewGC", qrStartConfig.jvm().gcopts());
assertEquals(45, qrStartConfig.jvm().heapSizeAsPercentageOfPhysicalMemory());
assertEquals("-XX:SoftRefLRUPolicyMSPerMB=2500", model.getContainerClusters().values().iterator().next().getContainers().get(0).getJvmOptions());
}
@Test
public void detect_conflicting_jvmgcoptions_in_jvmargs() {
assertFalse(ContainerModelBuilder.incompatibleGCOptions(""));
assertFalse(ContainerModelBuilder.incompatibleGCOptions("UseG1GC"));
assertTrue(ContainerModelBuilder.incompatibleGCOptions("-XX:+UseG1GC"));
assertTrue(ContainerModelBuilder.incompatibleGCOptions("abc -XX:+UseParNewGC xyz"));
assertTrue(ContainerModelBuilder.incompatibleGCOptions("-XX:CMSInitiatingOccupancyFraction=19"));
}
@Test
public void honours_jvm_gc_options() {
Element clusterElem = DomBuilderTest.parse(
"<container version='1.0'>",
" <search/>",
" <nodes jvm-gc-options='-XX:+UseG1GC'>",
" <node hostalias='mockhost'/>",
" </nodes>",
"</container>" );
createModel(root, clusterElem);
QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder();
root.getConfig(qrStartBuilder, "container/container.0");
QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder);
assertEquals("-XX:+UseG1GC", qrStartConfig.jvm().gcopts());
}
private static void verifyIgnoreJvmGCOptions(boolean isHosted) throws IOException, SAXException {
verifyIgnoreJvmGCOptionsIfJvmArgs("jvmargs", ContainerCluster.G1GC, isHosted);
verifyIgnoreJvmGCOptionsIfJvmArgs( "jvm-options", "-XX:+UseG1GC", isHosted);
}
private static void verifyIgnoreJvmGCOptionsIfJvmArgs(String jvmOptionsName, String expectedGC, boolean isHosted) throws IOException, SAXException {
String servicesXml =
"<container version='1.0'>" +
" <nodes jvm-gc-options='-XX:+UseG1GC' " + jvmOptionsName + "='-XX:+UseParNewGC'>" +
" <node hostalias='mockhost'/>" +
" </nodes>" +
"</container>";
ApplicationPackage applicationPackage = new MockApplicationPackage.Builder().withServices(servicesXml).build();
final TestLogger logger = new TestLogger();
VespaModel model = new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder()
.applicationPackage(applicationPackage)
.deployLogger(logger)
.properties(new TestProperties().setHostedVespa(isHosted))
.build());
QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder();
model.getConfig(qrStartBuilder, "container/container.0");
QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder);
assertEquals(expectedGC, qrStartConfig.jvm().gcopts());
}
@Test
public void ignores_jvmgcoptions_on_conflicting_jvmargs() throws IOException, SAXException {
verifyIgnoreJvmGCOptions(false);
verifyIgnoreJvmGCOptions(true);
}
private void verifyJvmGCOptions(boolean isHosted, String featureFlagDefault, String override, String expected) throws IOException, SAXException {
String servicesXml =
"<container version='1.0'>" +
" <nodes " + ((override == null) ? ">" : ("jvm-gc-options='" + override + "'>")) +
" <node hostalias='mockhost'/>" +
" </nodes>" +
"</container>";
ApplicationPackage applicationPackage = new MockApplicationPackage.Builder().withServices(servicesXml).build();
final TestLogger logger = new TestLogger();
VespaModel model = new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder()
.applicationPackage(applicationPackage)
.deployLogger(logger)
.properties(new TestProperties().setJvmGCOptions(featureFlagDefault).setHostedVespa(isHosted))
.build());
QrStartConfig.Builder qrStartBuilder = new QrStartConfig.Builder();
model.getConfig(qrStartBuilder, "container/container.0");
QrStartConfig qrStartConfig = new QrStartConfig(qrStartBuilder);
assertEquals(expected, qrStartConfig.jvm().gcopts());
}
@Test
public void requireThatJvmGCOptionsIsHonoured() throws IOException, SAXException {
verifyJvmGCOptions(false, null, null, ContainerCluster.G1GC);
verifyJvmGCOptions(true, null, null, ContainerCluster.PARALLEL_GC);
verifyJvmGCOptions(true, "", null, ContainerCluster.PARALLEL_GC);
verifyJvmGCOptions(false, "-XX:+UseG1GC", null, "-XX:+UseG1GC");
verifyJvmGCOptions(true, "-XX:+UseG1GC", null, "-XX:+UseG1GC");
verifyJvmGCOptions(false, null, "-XX:+UseG1GC", "-XX:+UseG1GC");
verifyJvmGCOptions(false, "-XX:+UseParallelGC", "-XX:+UseG1GC", "-XX:+UseG1GC");
verifyJvmGCOptions(false, null, "-XX:+UseParallelGC", "-XX:+UseParallelGC");
}
@Test
public void requireThatInvalidJvmGcOptionsAreLogged() throws IOException, SAXException {
verifyLoggingOfJvmGcOptions(true,
"-XX:+ParallelGCThreads=8 foo bar",
"foo", "bar");
verifyLoggingOfJvmGcOptions(true,
"-XX:+UseCMSInitiatingOccupancyOnly foo bar",
"-XX:+UseCMSInitiatingOccupancyOnly", "foo", "bar");
verifyLoggingOfJvmGcOptions(true,
"-XX:+UseConcMarkSweepGC",
"-XX:+UseConcMarkSweepGC");
verifyLoggingOfJvmGcOptions(true,
"$(touch /tmp/hello-from-gc-options)",
"$(touch", "/tmp/hello-from-gc-options)");
verifyLoggingOfJvmGcOptions(false,
"$(touch /tmp/hello-from-gc-options)",
"$(touch", "/tmp/hello-from-gc-options)");
verifyLoggingOfJvmGcOptions(true, "-XX:+ParallelGCThreads=8");
verifyLoggingOfJvmGcOptions(true, "-XX:MaxTenuringThreshold");
verifyLoggingOfJvmGcOptions(false, "-XX:+UseConcMarkSweepGC");
}
@Test
public void requireThatInvalidJvmGcOptionsFailDeployment() throws IOException, SAXException {
try {
buildModelWithJvmOptions(new TestProperties().setHostedVespa(true).failDeploymentWithInvalidJvmOptions(true),
new TestLogger(),
"gc-options",
"-XX:+ParallelGCThreads=8 foo bar");
fail();
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Invalid JVM GC options in services.xml: bar,foo"));
}
}
private void verifyLoggingOfJvmGcOptions(boolean isHosted, String override, String... invalidOptions) throws IOException, SAXException {
verifyLoggingOfJvmOptions(isHosted, "gc-options", override, invalidOptions);
}
private void verifyLoggingOfJvmOptions(boolean isHosted, String optionName, String override, String... invalidOptions) throws IOException, SAXException {
TestLogger logger = new TestLogger();
buildModelWithJvmOptions(isHosted, logger, optionName, override);
List<String> strings = Arrays.asList(invalidOptions.clone());
if (strings.isEmpty()) {
assertEquals(logger.msgs.size() > 0 ? logger.msgs.get(0).getSecond() : "", 0, logger.msgs.size());
return;
}
assertTrue("Expected 1 or more log messages for invalid JM options, got none", logger.msgs.size() > 0);
Pair<Level, String> firstOption = logger.msgs.get(0);
assertEquals(Level.WARNING, firstOption.getFirst());
Collections.sort(strings);
assertEquals("Invalid JVM " + (optionName.equals("gc-options") ? "GC " : "") +
"options in services.xml: " + String.join(",", strings), firstOption.getSecond());
}
private void buildModelWithJvmOptions(boolean isHosted, TestLogger logger, String optionName, String override) throws IOException, SAXException {
buildModelWithJvmOptions(new TestProperties().setHostedVespa(isHosted), logger, optionName, override);
}
private void buildModelWithJvmOptions(TestProperties properties, TestLogger logger, String optionName, String override) throws IOException, SAXException {
String servicesXml =
"<container version='1.0'>" +
" <nodes>" +
" <jvm " + optionName + "='" + override + "'/>" +
" <node hostalias='mockhost'/>" +
" </nodes>" +
"</container>";
ApplicationPackage app = new MockApplicationPackage.Builder().withServices(servicesXml).build();
new VespaModel(new NullConfigModelRegistry(), new DeployState.Builder()
.applicationPackage(app)
.deployLogger(logger)
.properties(properties)
.build());
}
@Test
@Test
public void requireThatInvalidJvmOptionsFailDeployment() throws IOException, SAXException {
try {
buildModelWithJvmOptions(new TestProperties().setHostedVespa(true).failDeploymentWithInvalidJvmOptions(true),
new TestLogger(),
"options",
"-Xms2G foo bar");
fail();
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Invalid JVM options in services.xml: bar,foo"));
}
}
} |
Instead of doing multiple String::format calls across if statements, why not use a string builder? | public URL getDirectoryUrl() {
String directoryURLString = String.format("%s/%s/%s", azureFileStorageClient.getUrl(),
shareName, directoryPath);
if (snapshot != null) {
directoryURLString = String.format("%s?snapshot=%s", directoryURLString, snapshot);
}
try {
return new URL(directoryURLString);
} catch (MalformedURLException e) {
throw logger.logExceptionAsError(new RuntimeException(
String.format("Invalid URL on %s: %s" + getClass().getSimpleName(), directoryURLString), e));
}
} | directoryURLString = String.format("%s?snapshot=%s", directoryURLString, snapshot); | public URL getDirectoryUrl() {
StringBuilder directoryURLString = new StringBuilder(azureFileStorageClient.getUrl()).append("/")
.append(shareName).append("/").append(directoryPath);
if (snapshot != null) {
directoryURLString.append("?snapshot=").append(snapshot);
}
try {
return new URL(directoryURLString.toString());
} catch (MalformedURLException e) {
throw logger.logExceptionAsError(new RuntimeException(
String.format("Invalid URL on %s: %s" + getClass().getSimpleName(), directoryURLString), e));
}
} | class DirectoryAsyncClient {
private final ClientLogger logger = new ClientLogger(DirectoryAsyncClient.class);
private final AzureFileStorageImpl azureFileStorageClient;
private final String shareName;
private final String directoryPath;
private final String snapshot;
/**
* Creates a DirectoryAsyncClient that sends requests to the storage directory at {@link
* AzureFileStorageImpl
* {@code client}.
*
* @param azureFileStorageClient Client that interacts with the service interfaces
* @param shareName Name of the share
* @param directoryPath Name of the directory
* @param snapshot The snapshot of the share
*/
DirectoryAsyncClient(AzureFileStorageImpl azureFileStorageClient, String shareName, String directoryPath,
String snapshot) {
Objects.requireNonNull(shareName);
Objects.requireNonNull(directoryPath);
this.shareName = shareName;
this.directoryPath = directoryPath;
this.snapshot = snapshot;
this.azureFileStorageClient = azureFileStorageClient;
}
/**
* Get the url of the storage directory client.
*
* @return the URL of the storage directory client
* @throws RuntimeException If the directory is using a malformed URL.
*/
/**
* Constructs a FileAsyncClient that interacts with the specified file.
*
* <p>If the file doesn't exist in this directory {@link FileAsyncClient | class DirectoryAsyncClient {
private final ClientLogger logger = new ClientLogger(DirectoryAsyncClient.class);
private final AzureFileStorageImpl azureFileStorageClient;
private final String shareName;
private final String directoryPath;
private final String snapshot;
/**
* Creates a DirectoryAsyncClient that sends requests to the storage directory at {@link
* AzureFileStorageImpl
* {@code client}.
*
* @param azureFileStorageClient Client that interacts with the service interfaces
* @param shareName Name of the share
* @param directoryPath Name of the directory
* @param snapshot The snapshot of the share
*/
DirectoryAsyncClient(AzureFileStorageImpl azureFileStorageClient, String shareName, String directoryPath,
String snapshot) {
Objects.requireNonNull(shareName);
Objects.requireNonNull(directoryPath);
this.shareName = shareName;
this.directoryPath = directoryPath;
this.snapshot = snapshot;
this.azureFileStorageClient = azureFileStorageClient;
}
/**
* Get the url of the storage directory client.
*
* @return the URL of the storage directory client
* @throws RuntimeException If the directory is using a malformed URL.
*/
/**
* Constructs a FileAsyncClient that interacts with the specified file.
*
* <p>If the file doesn't exist in this directory {@link FileAsyncClient |
We need to handle other places where we call the semantic analyzer from the type checker too. Please check and create an issue. | private void analyzeObjectConstructor(BLangNode node, SymbolEnv env) {
if (!nonErrorLoggingCheck) {
semanticAnalyzer.analyzeNode(node, env);
}
} | semanticAnalyzer.analyzeNode(node, env); | private void analyzeObjectConstructor(BLangNode node, SymbolEnv env) {
if (!nonErrorLoggingCheck) {
semanticAnalyzer.analyzeNode(node, env);
}
} | class TypeChecker extends BLangNodeVisitor {
private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY = new CompilerContext.Key<>();
private static Set<String> listLengthModifierFunctions = new HashSet<>();
private static Map<String, HashSet<String>> modifierFunctions = new HashMap<>();
private static final String TABLE_TNAME = "table";
private static final String LIST_LANG_LIB = "lang.array";
private static final String MAP_LANG_LIB = "lang.map";
private static final String TABLE_LANG_LIB = "lang.table";
private static final String VALUE_LANG_LIB = "lang.value";
private static final String XML_LANG_LIB = "lang.xml";
private static final String FUNCTION_NAME_PUSH = "push";
private static final String FUNCTION_NAME_POP = "pop";
private static final String FUNCTION_NAME_SHIFT = "shift";
private static final String FUNCTION_NAME_UNSHIFT = "unshift";
private static final String FUNCTION_NAME_ENSURE_TYPE = "ensureType";
private Names names;
private SymbolTable symTable;
private SymbolEnter symbolEnter;
private SymbolResolver symResolver;
private NodeCloner nodeCloner;
private Types types;
private BLangDiagnosticLog dlog;
private SymbolEnv env;
private boolean isTypeChecked;
private TypeNarrower typeNarrower;
private TypeParamAnalyzer typeParamAnalyzer;
private BLangAnonymousModelHelper anonymousModelHelper;
private SemanticAnalyzer semanticAnalyzer;
private Unifier unifier;
private boolean nonErrorLoggingCheck = false;
private int letCount = 0;
private Stack<SymbolEnv> queryEnvs, prevEnvs;
private Stack<BLangNode> queryFinalClauses;
private boolean checkWithinQueryExpr = false;
private BLangMissingNodesHelper missingNodesHelper;
private boolean breakToParallelQueryEnv = false;
/**
* Expected types or inherited types.
*/
private BType expType;
private BType resultType;
private DiagnosticCode diagCode;
static {
listLengthModifierFunctions.add(FUNCTION_NAME_PUSH);
listLengthModifierFunctions.add(FUNCTION_NAME_POP);
listLengthModifierFunctions.add(FUNCTION_NAME_SHIFT);
listLengthModifierFunctions.add(FUNCTION_NAME_UNSHIFT);
modifierFunctions.put(LIST_LANG_LIB, new HashSet<String>() {{
add("remove");
add("removeAll");
add("setLength");
add("reverse");
add("sort");
add("pop");
add("push");
add("shift");
add("unshift");
}});
modifierFunctions.put(MAP_LANG_LIB, new HashSet<String>() {{
add("remove");
add("removeIfHasKey");
add("removeAll");
}});
modifierFunctions.put(TABLE_LANG_LIB, new HashSet<String>() {{
add("put");
add("add");
add("remove");
add("removeIfHasKey");
add("removeAll");
}});
modifierFunctions.put(VALUE_LANG_LIB, new HashSet<String>() {{
add("mergeJson");
}});
modifierFunctions.put(XML_LANG_LIB, new HashSet<String>() {{
add("setName");
add("setChildren");
add("strip");
}});
}
public static TypeChecker getInstance(CompilerContext context) {
TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY);
if (typeChecker == null) {
typeChecker = new TypeChecker(context);
}
return typeChecker;
}
public TypeChecker(CompilerContext context) {
context.put(TYPE_CHECKER_KEY, this);
this.names = Names.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.nodeCloner = NodeCloner.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeNarrower = TypeNarrower.getInstance(context);
this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context);
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.semanticAnalyzer = SemanticAnalyzer.getInstance(context);
this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);
this.queryFinalClauses = new Stack<>();
this.queryEnvs = new Stack<>();
this.prevEnvs = new Stack<>();
this.unifier = new Unifier();
}
public BType checkExpr(BLangExpression expr, SymbolEnv env) {
return checkExpr(expr, env, symTable.noType);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) {
return checkExpr(expr, env, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) {
if (expr.typeChecked) {
return expr.getBType();
}
if (expType.tag == TypeTags.INTERSECTION) {
expType = ((BIntersectionType) expType).effectiveType;
}
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
DiagnosticCode preDiagCode = this.diagCode;
this.env = env;
this.diagCode = diagCode;
this.expType = expType;
this.isTypeChecked = true;
expr.expectedType = expType;
expr.accept(this);
if (resultType.tag == TypeTags.INTERSECTION) {
resultType = ((BIntersectionType) resultType).effectiveType;
}
expr.setTypeCheckedType(resultType);
expr.typeChecked = isTypeChecked;
this.env = prevEnv;
this.expType = preExpType;
this.diagCode = preDiagCode;
validateAndSetExprExpectedType(expr);
return resultType;
}
private void validateAndSetExprExpectedType(BLangExpression expr) {
if (resultType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
if (expr.getKind() == NodeKind.RECORD_LITERAL_EXPR && expr.expectedType != null &&
expr.expectedType.tag == TypeTags.MAP && expr.getBType().tag == TypeTags.RECORD) {
return;
}
expr.expectedType = resultType;
}
public void visit(BLangLiteral literalExpr) {
BType literalType = setLiteralValueAndGetType(literalExpr, expType);
if (literalType == symTable.semanticError || literalExpr.isFiniteContext) {
return;
}
resultType = types.checkType(literalExpr, literalType, expType);
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
checkXMLNamespacePrefixes(xmlElementAccess.filters);
checkExpr(xmlElementAccess.expr, env, symTable.xmlType);
resultType = types.checkType(xmlElementAccess, symTable.xmlElementSeqType, expType);
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
checkXMLNamespacePrefixes(xmlNavigation.filters);
if (xmlNavigation.childIndex != null) {
checkExpr(xmlNavigation.childIndex, env, symTable.intType);
}
BType exprType = checkExpr(xmlNavigation.expr, env, symTable.xmlType);
if (exprType.tag == TypeTags.UNION) {
dlog.error(xmlNavigation.pos, DiagnosticErrorCode.TYPE_DOES_NOT_SUPPORT_XML_NAVIGATION_ACCESS,
xmlNavigation.expr.getBType());
}
BType actualType = xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN
? symTable.xmlType : symTable.xmlElementSeqType;
types.checkType(xmlNavigation, actualType, expType);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
resultType = symTable.xmlType;
} else {
resultType = symTable.xmlElementSeqType;
}
}
private void checkXMLNamespacePrefixes(List<BLangXMLElementFilter> filters) {
for (BLangXMLElementFilter filter : filters) {
if (!filter.namespace.isEmpty()) {
Name nsName = names.fromString(filter.namespace);
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, nsName);
filter.namespaceSymbol = nsSymbol;
if (nsSymbol == symTable.notFoundSymbol) {
dlog.error(filter.nsPos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE, nsName);
}
}
}
}
private BType setLiteralValueAndGetType(BLangLiteral literalExpr, BType expType) {
BType literalType = symTable.getTypeFromTag(literalExpr.getBType().tag);
Object literalValue = literalExpr.value;
if (literalType.tag == TypeTags.INT || literalType.tag == TypeTags.BYTE) {
if (expType.tag == TypeTags.FLOAT) {
literalType = symTable.floatType;
literalExpr.value = ((Long) literalValue).doubleValue();
} else if (expType.tag == TypeTags.DECIMAL &&
!NumericLiteralSupport.hasHexIndicator(literalExpr.originalValue)) {
literalType = symTable.decimalType;
literalExpr.value = String.valueOf(literalValue);
} else if (TypeTags.isIntegerTypeTag(expType.tag) || expType.tag == TypeTags.BYTE) {
literalType = getIntLiteralType(literalExpr.pos, expType, literalType, literalValue);
if (literalType == symTable.semanticError) {
return symTable.semanticError;
}
} else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.intType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.BYTE)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.byteType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED32_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed32IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED16_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed16IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED8_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed8IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED32_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned32IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED16_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned16IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED8_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned8IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes();
BType intSubType = null;
boolean intOrIntCompatibleTypeFound = false;
for (BType memType : memberTypes) {
if ((memType.tag != TypeTags.INT && TypeTags.isIntegerTypeTag(memType.tag)) ||
memType.tag == TypeTags.BYTE) {
intSubType = memType;
} else if (memType.tag == TypeTags.INT || memType.tag == TypeTags.JSON ||
memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY) {
intOrIntCompatibleTypeFound = true;
}
}
if (intOrIntCompatibleTypeFound) {
return setLiteralValueAndGetType(literalExpr, symTable.intType);
}
if (intSubType != null) {
return setLiteralValueAndGetType(literalExpr, intSubType);
}
BType finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.intType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.BYTE)) {
return setLiteralValueAndGetType(literalExpr, symTable.byteType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.byteType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.FLOAT)) {
return setLiteralValueAndGetType(literalExpr, symTable.floatType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.floatType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) {
return setLiteralValueAndGetType(literalExpr, symTable.decimalType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.decimalType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
}
} else if (literalType.tag == TypeTags.FLOAT) {
String literal = String.valueOf(literalValue);
String numericLiteral = NumericLiteralSupport.stripDiscriminator(literal);
boolean isDiscriminatedFloat = NumericLiteralSupport.isFloatDiscriminated(literal);
if (expType.tag == TypeTags.DECIMAL) {
if (isDiscriminatedFloat || NumericLiteralSupport.isHexLiteral(numericLiteral)) {
dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
symTable.floatType);
resultType = symTable.semanticError;
return resultType;
}
literalType = symTable.decimalType;
literalExpr.value = numericLiteral;
} else if (expType.tag == TypeTags.FLOAT) {
literalExpr.value = Double.parseDouble(String.valueOf(numericLiteral));
} else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (!isDiscriminatedFloat
&& literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
} else if (literalType.tag == TypeTags.DECIMAL) {
return decimalLiteral(literalValue, literalExpr, expType);
} else if (literalType.tag == TypeTags.STRING && types.isCharLiteralValue((String) literalValue)) {
if (expType.tag == TypeTags.CHAR_STRING) {
return symTable.charStringType;
}
if (expType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes();
for (BType memType : memberTypes) {
if (TypeTags.isStringTypeTag(memType.tag)) {
return setLiteralValueAndGetType(literalExpr, memType);
} else if (memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA ||
memType.tag == TypeTags.ANY) {
return setLiteralValueAndGetType(literalExpr, symTable.charStringType);
} else if (memType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(memType,
literalExpr)) {
setLiteralValueForFiniteType(literalExpr, symTable.charStringType);
return literalType;
}
}
}
boolean foundMember = types.isAssignableToFiniteType(expType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
} else {
if (this.expType.tag == TypeTags.FINITE) {
boolean foundMember = types.isAssignableToFiniteType(this.expType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
} else if (this.expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) this.expType;
boolean foundMember = unionType.getMemberTypes()
.stream()
.anyMatch(memberType -> types.isAssignableToFiniteType(memberType, literalExpr));
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
}
}
if (literalExpr.getBType().tag == TypeTags.BYTE_ARRAY) {
literalType = new BArrayType(symTable.byteType);
}
return literalType;
}
private BType getAndSetAssignableUnionMember(BLangLiteral literalExpr, BUnionType expType, BType desiredType) {
Set<BType> memberTypes = expType.getMemberTypes();
if (memberTypes.stream()
.anyMatch(memType -> memType.tag == desiredType.tag
|| memType.tag == TypeTags.JSON
|| memType.tag == TypeTags.ANYDATA
|| memType.tag == TypeTags.ANY)) {
return setLiteralValueAndGetType(literalExpr, desiredType);
}
BType finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.floatType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) {
return setLiteralValueAndGetType(literalExpr, symTable.decimalType);
}
finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.decimalType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
return symTable.noType;
}
private boolean literalAssignableToFiniteType(BLangLiteral literalExpr, BFiniteType finiteType,
int targetMemberTypeTag) {
for (BLangExpression valueExpr : finiteType.getValueSpace()) {
if (valueExpr.getBType().tag == targetMemberTypeTag &&
types.checkLiteralAssignabilityBasedOnType((BLangLiteral) valueExpr, literalExpr)) {
return true;
}
}
return false;
}
private BType decimalLiteral(Object literalValue, BLangLiteral literalExpr, BType expType) {
String literal = String.valueOf(literalValue);
if (expType.tag == TypeTags.FLOAT && NumericLiteralSupport.isDecimalDiscriminated(literal)) {
dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
symTable.decimalType);
resultType = symTable.semanticError;
return resultType;
}
if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.decimalType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
literalExpr.value = NumericLiteralSupport.stripDiscriminator(literal);
resultType = symTable.decimalType;
return symTable.decimalType;
}
private void setLiteralValueForFiniteType(BLangLiteral literalExpr, BType type) {
types.setImplicitCastExpr(literalExpr, type, this.expType);
this.resultType = type;
literalExpr.isFiniteContext = true;
}
private BType getFiniteTypeWithValuesOfSingleType(BUnionType unionType, BType matchType) {
List<BFiniteType> finiteTypeMembers = unionType.getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.map(memFiniteType -> (BFiniteType) memFiniteType)
.collect(Collectors.toList());
if (finiteTypeMembers.isEmpty()) {
return symTable.semanticError;
}
int tag = matchType.tag;
Set<BLangExpression> matchedValueSpace = new LinkedHashSet<>();
for (BFiniteType finiteType : finiteTypeMembers) {
Set<BLangExpression> set = new HashSet<>();
for (BLangExpression expression : finiteType.getValueSpace()) {
if (expression.getBType().tag == tag) {
set.add(expression);
}
}
matchedValueSpace.addAll(set);
}
if (matchedValueSpace.isEmpty()) {
return symTable.semanticError;
}
return new BFiniteType(null, matchedValueSpace);
}
private BType getIntLiteralType(Location location, BType expType, BType literalType,
Object literalValue) {
switch (expType.tag) {
case TypeTags.INT:
return symTable.intType;
case TypeTags.BYTE:
if (types.isByteLiteralValue((Long) literalValue)) {
return symTable.byteType;
}
break;
case TypeTags.SIGNED32_INT:
if (types.isSigned32LiteralValue((Long) literalValue)) {
return symTable.signed32IntType;
}
break;
case TypeTags.SIGNED16_INT:
if (types.isSigned16LiteralValue((Long) literalValue)) {
return symTable.signed16IntType;
}
break;
case TypeTags.SIGNED8_INT:
if (types.isSigned8LiteralValue((Long) literalValue)) {
return symTable.signed8IntType;
}
break;
case TypeTags.UNSIGNED32_INT:
if (types.isUnsigned32LiteralValue((Long) literalValue)) {
return symTable.unsigned32IntType;
}
break;
case TypeTags.UNSIGNED16_INT:
if (types.isUnsigned16LiteralValue((Long) literalValue)) {
return symTable.unsigned16IntType;
}
break;
case TypeTags.UNSIGNED8_INT:
if (types.isUnsigned8LiteralValue((Long) literalValue)) {
return symTable.unsigned8IntType;
}
break;
default:
}
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, literalType);
resultType = symTable.semanticError;
return resultType;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.READONLY) {
BType inferredType = getInferredTupleType(listConstructor, expType);
resultType = inferredType == symTable.semanticError ?
symTable.semanticError : types.checkType(listConstructor, inferredType, expType);
return;
}
resultType = checkListConstructorCompatibility(expType, listConstructor);
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.ANY || expType.tag == TypeTags.ANYDATA) {
List<BType> memTypes = checkExprList(new ArrayList<>(tableConstructorExpr.recordLiteralList), env);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
}
if (tableConstructorExpr.recordLiteralList.size() == 0) {
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE);
resultType = symTable.semanticError;
return;
}
BType inherentMemberType = inferTableMemberType(memTypes, tableConstructorExpr);
BTableType tableType = new BTableType(TypeTags.TABLE, inherentMemberType, null);
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
recordLiteral.setBType(inherentMemberType);
}
if (!validateTableConstructorExpr(tableConstructorExpr, tableType)) {
resultType = symTable.semanticError;
return;
}
if (checkKeySpecifier(tableConstructorExpr, tableType)) {
return;
}
resultType = tableType;
return;
}
BType applicableExpType = expType.tag == TypeTags.INTERSECTION ?
((BIntersectionType) expType).effectiveType : expType;
if (applicableExpType.tag == TypeTags.TABLE) {
List<BType> memTypes = new ArrayList<>();
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
BLangRecordLiteral clonedExpr = recordLiteral;
if (this.nonErrorLoggingCheck) {
clonedExpr.cloneAttempt++;
clonedExpr = nodeCloner.cloneNode(recordLiteral);
}
BType recordType = checkExpr(clonedExpr, env, ((BTableType) applicableExpType).constraint);
if (recordType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
memTypes.add(recordType);
}
if (((BTableType) applicableExpType).constraint.tag == TypeTags.MAP &&
((BTableType) applicableExpType).isTypeInlineDefined) {
validateMapConstraintTable(tableConstructorExpr, applicableExpType);
return;
}
if (!(validateTableType((BTableType) applicableExpType,
tableConstructorExpr.recordLiteralList) &&
validateTableConstructorExpr(tableConstructorExpr, (BTableType) applicableExpType))) {
resultType = symTable.semanticError;
return;
}
BTableType tableType = new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, applicableExpType),
null);
if (Symbols.isFlagOn(applicableExpType.flags, Flags.READONLY)) {
tableType.flags |= Flags.READONLY;
}
if (checkKeySpecifier(tableConstructorExpr, tableType)) {
return;
}
BTableType expectedTableType = (BTableType) applicableExpType;
if (expectedTableType.fieldNameList != null && tableType.fieldNameList == null) {
tableType.fieldNameList = expectedTableType.fieldNameList;
}
resultType = tableType;
} else if (applicableExpType.tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
List<BType> matchingTypes = new ArrayList<>();
BUnionType expectedType = (BUnionType) applicableExpType;
for (BType memType : expectedType.getMemberTypes()) {
dlog.resetErrorCount();
BLangTableConstructorExpr clonedTableExpr = tableConstructorExpr;
if (this.nonErrorLoggingCheck) {
tableConstructorExpr.cloneAttempt++;
clonedTableExpr = nodeCloner.cloneNode(tableConstructorExpr);
}
BType resultType = checkExpr(clonedTableExpr, env, memType);
if (resultType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(matchingTypes, resultType)) {
matchingTypes.add(resultType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (matchingTypes.isEmpty()) {
BLangTableConstructorExpr exprToLog = tableConstructorExpr;
if (this.nonErrorLoggingCheck) {
tableConstructorExpr.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(tableConstructorExpr);
}
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getInferredTableType(exprToLog));
} else if (matchingTypes.size() != 1) {
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES,
expType);
} else {
resultType = checkExpr(tableConstructorExpr, env, matchingTypes.get(0));
return;
}
resultType = symTable.semanticError;
} else {
resultType = symTable.semanticError;
}
}
private BType getInferredTableType(BLangTableConstructorExpr exprToLog) {
List<BType> memTypes = checkExprList(new ArrayList<>(exprToLog.recordLiteralList), env);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
return symTable.semanticError;
}
}
return new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, exprToLog), null);
}
private boolean checkKeySpecifier(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) {
if (tableConstructorExpr.tableKeySpecifier != null) {
if (!(validateTableKeyValue(getTableKeyNameList(tableConstructorExpr.
tableKeySpecifier), tableConstructorExpr.recordLiteralList))) {
resultType = symTable.semanticError;
return true;
}
tableType.fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier);
}
return false;
}
private BType inferTableMemberType(List<BType> memTypes, BType expType) {
if (memTypes.isEmpty()) {
return ((BTableType) expType).constraint;
}
LinkedHashSet<BType> result = new LinkedHashSet<>();
result.add(memTypes.get(0));
BUnionType unionType = BUnionType.create(null, result);
for (int i = 1; i < memTypes.size(); i++) {
BType source = memTypes.get(i);
if (!types.isAssignable(source, unionType)) {
result.add(source);
unionType = BUnionType.create(null, result);
}
}
if (unionType.getMemberTypes().size() == 1) {
return memTypes.get(0);
}
return unionType;
}
private BType inferTableMemberType(List<BType> memTypes, BLangTableConstructorExpr tableConstructorExpr) {
BLangTableKeySpecifier keySpecifier = tableConstructorExpr.tableKeySpecifier;
List<String> keySpecifierFieldNames = new ArrayList<>();
Set<BField> allFieldSet = new LinkedHashSet<>();
for (BType memType : memTypes) {
allFieldSet.addAll(((BRecordType) memType).fields.values());
}
Set<BField> commonFieldSet = new LinkedHashSet<>(allFieldSet);
for (BType memType : memTypes) {
commonFieldSet.retainAll(((BRecordType) memType).fields.values());
}
List<String> requiredFieldNames = new ArrayList<>();
if (keySpecifier != null) {
for (IdentifierNode identifierNode : keySpecifier.fieldNameIdentifierList) {
requiredFieldNames.add(((BLangIdentifier) identifierNode).value);
keySpecifierFieldNames.add(((BLangIdentifier) identifierNode).value);
}
}
List<String> fieldNames = new ArrayList<>();
for (BField field : allFieldSet) {
String fieldName = field.name.value;
if (fieldNames.contains(fieldName)) {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE_DUE_AMBIGUITY,
fieldName);
return symTable.semanticError;
}
fieldNames.add(fieldName);
boolean isOptional = true;
for (BField commonField : commonFieldSet) {
if (commonField.name.value.equals(fieldName)) {
isOptional = false;
requiredFieldNames.add(commonField.name.value);
}
}
if (isOptional) {
field.symbol.flags = Flags.asMask(EnumSet.of(Flag.OPTIONAL));
} else if (requiredFieldNames.contains(fieldName) && keySpecifierFieldNames.contains(fieldName)) {
field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)) + Flags.asMask(EnumSet.of(Flag.READONLY));
} else if (requiredFieldNames.contains(fieldName)) {
field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED));
}
}
return createTableConstraintRecordType(allFieldSet, tableConstructorExpr.pos);
}
private BRecordType createTableConstraintRecordType(Set<BField> allFieldSet, Location pos) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL);
for (BField field : allFieldSet) {
recordSymbol.scope.define(field.name, field.symbol);
}
BRecordType recordType = new BRecordType(recordSymbol);
recordType.fields = allFieldSet.stream().collect(getFieldCollector());
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env);
recordType.sealed = true;
recordType.restFieldType = symTable.noType;
return recordType;
}
private Collector<BField, ?, LinkedHashMap<String, BField>> getFieldCollector() {
BinaryOperator<BField> mergeFunc = (u, v) -> {
throw new IllegalStateException(String.format("Duplicate key %s", u));
};
return Collectors.toMap(field -> field.name.value, Function.identity(), mergeFunc, LinkedHashMap::new);
}
private boolean validateTableType(BTableType tableType, List<BLangRecordLiteral> recordLiterals) {
BType constraint = tableType.constraint;
if (tableType.isTypeInlineDefined && !types.isAssignable(constraint, symTable.mapAllType)) {
dlog.error(tableType.constraintPos, DiagnosticErrorCode.TABLE_CONSTRAINT_INVALID_SUBTYPE, constraint);
resultType = symTable.semanticError;
return false;
}
List<String> fieldNameList = tableType.fieldNameList;
if (fieldNameList != null) {
boolean isKeySpecifierValidated = !tableType.isTypeInlineDefined || validateKeySpecifier(fieldNameList,
constraint.tag != TypeTags.INTERSECTION ? constraint :
((BIntersectionType) constraint).effectiveType,
tableType.keyPos);
return (isKeySpecifierValidated && validateTableKeyValue(fieldNameList, recordLiterals));
}
return true;
}
private boolean validateTableKeyValue(List<String> keySpecifierFieldNames,
List<BLangRecordLiteral> recordLiterals) {
for (String fieldName : keySpecifierFieldNames) {
for (BLangRecordLiteral recordLiteral : recordLiterals) {
BLangRecordKeyValueField recordKeyValueField = getRecordKeyValueField(recordLiteral, fieldName);
if (recordKeyValueField != null && isConstExpression(recordKeyValueField.getValue())) {
continue;
}
dlog.error(recordLiteral.pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_VALUE_MUST_BE_CONSTANT_EXPR, fieldName);
resultType = symTable.semanticError;
return false;
}
}
return true;
}
private boolean isConstExpression(BLangExpression expression) {
switch(expression.getKind()) {
case LITERAL:
case NUMERIC_LITERAL:
case STRING_TEMPLATE_LITERAL:
case XML_ELEMENT_LITERAL:
case XML_TEXT_LITERAL:
case LIST_CONSTRUCTOR_EXPR:
case TABLE_CONSTRUCTOR_EXPR:
case RECORD_LITERAL_EXPR:
case TYPE_CONVERSION_EXPR:
case UNARY_EXPR:
case BINARY_EXPR:
case TYPE_TEST_EXPR:
case TERNARY_EXPR:
return true;
case SIMPLE_VARIABLE_REF:
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
case GROUP_EXPR:
return isConstExpression(((BLangGroupExpr) expression).expression);
default:
return false;
}
}
private BLangRecordKeyValueField getRecordKeyValueField(BLangRecordLiteral recordLiteral,
String fieldName) {
for (RecordLiteralNode.RecordField recordField : recordLiteral.fields) {
BLangRecordKeyValueField recordKeyValueField = (BLangRecordKeyValueField) recordField;
if (fieldName.equals(recordKeyValueField.key.toString())) {
return recordKeyValueField;
}
}
return null;
}
public boolean validateKeySpecifier(List<String> fieldNameList, BType constraint,
Location pos) {
for (String fieldName : fieldNameList) {
BField field = types.getTableConstraintField(constraint, fieldName);
if (field == null) {
dlog.error(pos,
DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER, fieldName, constraint);
resultType = symTable.semanticError;
return false;
}
if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_READONLY, fieldName);
resultType = symTable.semanticError;
return false;
}
if (!Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_REQUIRED, fieldName);
resultType = symTable.semanticError;
return false;
}
if (!types.isAssignable(field.type, symTable.anydataType)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_ANYDATA, fieldName, constraint);
resultType = symTable.semanticError;
return false;
}
}
return true;
}
private boolean validateTableConstructorExpr(BLangTableConstructorExpr tableConstructorExpr,
BTableType tableType) {
BType constraintType = tableType.constraint;
if (tableConstructorExpr.tableKeySpecifier != null) {
List<String> fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier);
if (tableType.fieldNameList == null &&
!validateKeySpecifier(fieldNameList,
constraintType.tag != TypeTags.INTERSECTION ? constraintType :
((BIntersectionType) constraintType).effectiveType,
tableConstructorExpr.tableKeySpecifier.pos)) {
return false;
}
if (tableType.fieldNameList != null && !tableType.fieldNameList.equals(fieldNameList)) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.TABLE_KEY_SPECIFIER_MISMATCH,
tableType.fieldNameList.toString(), fieldNameList.toString());
resultType = symTable.semanticError;
return false;
}
}
BType keyTypeConstraint = tableType.keyTypeConstraint;
if (keyTypeConstraint != null) {
List<BType> memberTypes = new ArrayList<>();
if (keyTypeConstraint.tag == TypeTags.TUPLE) {
for (Type type : ((TupleType) keyTypeConstraint).getTupleTypes()) {
memberTypes.add((BType) type);
}
} else {
memberTypes.add(keyTypeConstraint);
}
if (tableConstructorExpr.tableKeySpecifier == null && keyTypeConstraint.tag == TypeTags.NEVER) {
return true;
}
if (tableConstructorExpr.tableKeySpecifier == null ||
tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size() != memberTypes.size()) {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.KEY_SPECIFIER_SIZE_MISMATCH_WITH_KEY_CONSTRAINT,
memberTypes.size(),
tableConstructorExpr.tableKeySpecifier == null ?
0 : tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size());
resultType = symTable.semanticError;
return false;
}
List<IdentifierNode> fieldNameIdentifierList = tableConstructorExpr.tableKeySpecifier.
fieldNameIdentifierList;
int index = 0;
for (IdentifierNode identifier : fieldNameIdentifierList) {
BField field = types.getTableConstraintField(constraintType, ((BLangIdentifier) identifier).value);
if (field == null || !types.isAssignable(field.type, memberTypes.get(index))) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos,
DiagnosticErrorCode.KEY_SPECIFIER_MISMATCH_WITH_KEY_CONSTRAINT,
fieldNameIdentifierList.toString(), memberTypes.toString());
resultType = symTable.semanticError;
return false;
}
index++;
}
}
return true;
}
public void validateMapConstraintTable(BLangTableConstructorExpr tableConstructorExpr, BType expType) {
if (((BTableType) expType).fieldNameList != null || ((BTableType) expType).keyTypeConstraint != null) {
dlog.error(((BTableType) expType).keyPos,
DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT);
resultType = symTable.semanticError;
return;
}
if (tableConstructorExpr != null && tableConstructorExpr.tableKeySpecifier != null) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos,
DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT);
resultType = symTable.semanticError;
return;
}
if (tableConstructorExpr != null && !(validateTableType((BTableType) expType,
tableConstructorExpr.recordLiteralList))) {
resultType = symTable.semanticError;
return;
}
resultType = expType;
}
private List<String> getTableKeyNameList(BLangTableKeySpecifier tableKeySpecifier) {
List<String> fieldNamesList = new ArrayList<>();
for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) {
fieldNamesList.add(((BLangIdentifier) identifier).value);
}
return fieldNamesList;
}
private BType createTableKeyConstraint(List<String> fieldNames, BType constraintType) {
if (fieldNames == null) {
return symTable.semanticError;
}
List<BType> memTypes = new ArrayList<>();
for (String fieldName : fieldNames) {
BField tableConstraintField = types.getTableConstraintField(constraintType, fieldName);
if (tableConstraintField == null) {
return symTable.semanticError;
}
BType fieldType = tableConstraintField.type;
memTypes.add(fieldType);
}
if (memTypes.size() == 1) {
return memTypes.get(0);
}
return new BTupleType(memTypes);
}
private BType checkListConstructorCompatibility(BType bType, BLangListConstructorExpr listConstructor) {
int tag = bType.tag;
if (tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.nonErrorLoggingCheck = true;
this.dlog.mute();
List<BType> compatibleTypes = new ArrayList<>();
boolean erroredExpType = false;
for (BType memberType : ((BUnionType) bType).getMemberTypes()) {
if (memberType == symTable.semanticError) {
if (!erroredExpType) {
erroredExpType = true;
}
continue;
}
BType listCompatibleMemType = getListConstructorCompatibleNonUnionType(memberType);
if (listCompatibleMemType == symTable.semanticError) {
continue;
}
dlog.resetErrorCount();
BType memCompatibiltyType = checkListConstructorCompatibility(listCompatibleMemType, listConstructor);
if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(compatibleTypes, memCompatibiltyType)) {
compatibleTypes.add(memCompatibiltyType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (compatibleTypes.isEmpty()) {
BLangListConstructorExpr exprToLog = listConstructor;
if (this.nonErrorLoggingCheck) {
listConstructor.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(listConstructor);
}
BType inferredTupleType = getInferredTupleType(exprToLog, symTable.noType);
if (!erroredExpType && inferredTupleType != symTable.semanticError) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, inferredTupleType);
}
return symTable.semanticError;
} else if (compatibleTypes.size() != 1) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES,
expType);
return symTable.semanticError;
}
return checkListConstructorCompatibility(compatibleTypes.get(0), listConstructor);
}
if (tag == TypeTags.INTERSECTION) {
return checkListConstructorCompatibility(((BIntersectionType) bType).effectiveType, listConstructor);
}
BType possibleType = getListConstructorCompatibleNonUnionType(bType);
switch (possibleType.tag) {
case TypeTags.ARRAY:
return checkArrayType(listConstructor, (BArrayType) possibleType);
case TypeTags.TUPLE:
return checkTupleType(listConstructor, (BTupleType) possibleType);
case TypeTags.READONLY:
return checkReadOnlyListType(listConstructor);
case TypeTags.TYPEDESC:
List<BType> results = new ArrayList<>();
listConstructor.isTypedescExpr = true;
for (int i = 0; i < listConstructor.exprs.size(); i++) {
results.add(checkExpr(listConstructor.exprs.get(i), env, symTable.noType));
}
List<BType> actualTypes = new ArrayList<>();
for (int i = 0; i < listConstructor.exprs.size(); i++) {
final BLangExpression expr = listConstructor.exprs.get(i);
if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
actualTypes.add(((BLangTypedescExpr) expr).resolvedType);
} else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
actualTypes.add(((BLangSimpleVarRef) expr).symbol.type);
} else {
actualTypes.add(results.get(i));
}
}
if (actualTypes.size() == 1) {
listConstructor.typedescType = actualTypes.get(0);
} else {
listConstructor.typedescType = new BTupleType(actualTypes);
}
return new BTypedescType(listConstructor.typedescType, null);
}
BLangListConstructorExpr exprToLog = listConstructor;
if (this.nonErrorLoggingCheck) {
listConstructor.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(listConstructor);
}
if (bType == symTable.semanticError) {
getInferredTupleType(exprToLog, symTable.semanticError);
} else {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, bType,
getInferredTupleType(exprToLog, symTable.noType));
}
return symTable.semanticError;
}
private BType getListConstructorCompatibleNonUnionType(BType type) {
switch (type.tag) {
case TypeTags.ARRAY:
case TypeTags.TUPLE:
case TypeTags.READONLY:
case TypeTags.TYPEDESC:
return type;
case TypeTags.JSON:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayJsonType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayJsonType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANYDATA:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayAnydataType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayAnydataType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANY:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayType, env,
symTable, anonymousModelHelper, names);
case TypeTags.INTERSECTION:
return ((BIntersectionType) type).effectiveType;
}
return symTable.semanticError;
}
private BType checkArrayType(BLangListConstructorExpr listConstructor, BArrayType arrayType) {
BType eType = arrayType.eType;
if (arrayType.state == BArrayState.INFERRED) {
arrayType.size = listConstructor.exprs.size();
arrayType.state = BArrayState.CLOSED;
} else if ((arrayType.state != BArrayState.OPEN) && (arrayType.size != listConstructor.exprs.size())) {
if (arrayType.size < listConstructor.exprs.size()) {
dlog.error(listConstructor.pos,
DiagnosticErrorCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size,
listConstructor.exprs.size());
return symTable.semanticError;
}
if (!types.hasFillerValue(eType)) {
dlog.error(listConstructor.pos,
DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE, expType);
return symTable.semanticError;
}
}
boolean errored = false;
for (BLangExpression expr : listConstructor.exprs) {
if (exprIncompatible(eType, expr) && !errored) {
errored = true;
}
}
return errored ? symTable.semanticError : arrayType;
}
private BType checkTupleType(BLangListConstructorExpr listConstructor, BTupleType tupleType) {
List<BLangExpression> exprs = listConstructor.exprs;
List<BType> memberTypes = tupleType.tupleTypes;
BType restType = tupleType.restType;
int listExprSize = exprs.size();
int memberTypeSize = memberTypes.size();
if (listExprSize < memberTypeSize) {
for (int i = listExprSize; i < memberTypeSize; i++) {
if (!types.hasFillerValue(memberTypes.get(i))) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR,
"tuple and expression size does not match");
return symTable.semanticError;
}
}
} else if (listExprSize > memberTypeSize && restType == null) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR,
"tuple and expression size does not match");
return symTable.semanticError;
}
boolean errored = false;
int nonRestCountToCheck = listExprSize < memberTypeSize ? listExprSize : memberTypeSize;
for (int i = 0; i < nonRestCountToCheck; i++) {
if (exprIncompatible(memberTypes.get(i), exprs.get(i)) && !errored) {
errored = true;
}
}
for (int i = nonRestCountToCheck; i < exprs.size(); i++) {
if (exprIncompatible(restType, exprs.get(i)) && !errored) {
errored = true;
}
}
return errored ? symTable.semanticError : tupleType;
}
private BType checkReadOnlyListType(BLangListConstructorExpr listConstructor) {
if (!this.nonErrorLoggingCheck) {
BType inferredType = getInferredTupleType(listConstructor, symTable.readonlyType);
if (inferredType == symTable.semanticError) {
return symTable.semanticError;
}
return types.checkType(listConstructor, inferredType, symTable.readonlyType);
}
for (BLangExpression expr : listConstructor.exprs) {
if (exprIncompatible(symTable.readonlyType, expr)) {
return symTable.semanticError;
}
}
return symTable.readonlyType;
}
private boolean exprIncompatible(BType eType, BLangExpression expr) {
if (expr.typeChecked) {
return expr.getBType() == symTable.semanticError;
}
BLangExpression exprToCheck = expr;
if (this.nonErrorLoggingCheck) {
expr.cloneAttempt++;
exprToCheck = nodeCloner.cloneNode(expr);
}
return checkExpr(exprToCheck, this.env, eType) == symTable.semanticError;
}
private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env) {
return checkExprList(exprs, env, symTable.noType);
}
private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env, BType expType) {
List<BType> types = new ArrayList<>();
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
this.env = env;
this.expType = expType;
for (BLangExpression e : exprs) {
checkExpr(e, this.env, expType);
types.add(resultType);
}
this.env = prevEnv;
this.expType = preExpType;
return types;
}
private BType getInferredTupleType(BLangListConstructorExpr listConstructor, BType expType) {
List<BType> memTypes = checkExprList(listConstructor.exprs, env, expType);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
return symTable.semanticError;
}
}
BTupleType tupleType = new BTupleType(memTypes);
if (expType.tag != TypeTags.READONLY) {
return tupleType;
}
tupleType.flags |= Flags.READONLY;
return tupleType;
}
public void visit(BLangRecordLiteral recordLiteral) {
int expTypeTag = expType.tag;
if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.READONLY) {
expType = defineInferredRecordType(recordLiteral, expType);
} else if (expTypeTag == TypeTags.OBJECT) {
dlog.error(recordLiteral.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL, expType);
resultType = symTable.semanticError;
return;
}
resultType = getEffectiveMappingType(recordLiteral,
checkMappingConstructorCompatibility(expType, recordLiteral));
}
private BType getEffectiveMappingType(BLangRecordLiteral recordLiteral, BType applicableMappingType) {
if (applicableMappingType == symTable.semanticError ||
(applicableMappingType.tag == TypeTags.RECORD && Symbols.isFlagOn(applicableMappingType.flags,
Flags.READONLY))) {
return applicableMappingType;
}
Map<String, RecordLiteralNode.RecordField> readOnlyFields = new LinkedHashMap<>();
LinkedHashMap<String, BField> applicableTypeFields =
applicableMappingType.tag == TypeTags.RECORD ? ((BRecordType) applicableMappingType).fields :
new LinkedHashMap<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
continue;
}
String name;
if (field.isKeyValueField()) {
BLangRecordKeyValueField keyValueField = (BLangRecordKeyValueField) field;
if (!keyValueField.readonly) {
continue;
}
BLangExpression keyExpr = keyValueField.key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
name = ((BLangSimpleVarRef) keyExpr).variableName.value;
} else {
name = (String) ((BLangLiteral) keyExpr).value;
}
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
if (!varNameField.readonly) {
continue;
}
name = varNameField.variableName.value;
}
if (applicableTypeFields.containsKey(name) &&
Symbols.isFlagOn(applicableTypeFields.get(name).symbol.flags, Flags.READONLY)) {
continue;
}
readOnlyFields.put(name, field);
}
if (readOnlyFields.isEmpty()) {
return applicableMappingType;
}
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL);
LinkedHashMap<String, BField> newFields = new LinkedHashMap<>();
for (Map.Entry<String, RecordLiteralNode.RecordField> readOnlyEntry : readOnlyFields.entrySet()) {
RecordLiteralNode.RecordField field = readOnlyEntry.getValue();
String key = readOnlyEntry.getKey();
Name fieldName = names.fromString(key);
BType readOnlyFieldType;
if (field.isKeyValueField()) {
readOnlyFieldType = ((BLangRecordKeyValueField) field).valueExpr.getBType();
} else {
readOnlyFieldType = ((BLangRecordVarNameField) field).getBType();
}
BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{
add(Flag.REQUIRED);
add(Flag.READONLY);
}}), fieldName, pkgID, readOnlyFieldType, recordSymbol,
((BLangNode) field).pos, VIRTUAL);
newFields.put(key, new BField(fieldName, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordType = new BRecordType(recordSymbol, recordSymbol.flags);
if (applicableMappingType.tag == TypeTags.MAP) {
recordType.sealed = false;
recordType.restFieldType = ((BMapType) applicableMappingType).constraint;
} else {
BRecordType applicableRecordType = (BRecordType) applicableMappingType;
boolean allReadOnlyFields = true;
for (Map.Entry<String, BField> origEntry : applicableRecordType.fields.entrySet()) {
String fieldName = origEntry.getKey();
BField field = origEntry.getValue();
if (readOnlyFields.containsKey(fieldName)) {
continue;
}
BVarSymbol origFieldSymbol = field.symbol;
long origFieldFlags = origFieldSymbol.flags;
if (allReadOnlyFields && !Symbols.isFlagOn(origFieldFlags, Flags.READONLY)) {
allReadOnlyFields = false;
}
BVarSymbol fieldSymbol = new BVarSymbol(origFieldFlags, field.name, pkgID,
origFieldSymbol.type, recordSymbol, field.pos, VIRTUAL);
newFields.put(fieldName, new BField(field.name, null, fieldSymbol));
recordSymbol.scope.define(field.name, fieldSymbol);
}
recordType.sealed = applicableRecordType.sealed;
recordType.restFieldType = applicableRecordType.restFieldType;
if (recordType.sealed && allReadOnlyFields) {
recordType.flags |= Flags.READONLY;
recordType.tsymbol.flags |= Flags.READONLY;
}
}
recordType.fields = newFields;
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
recordLiteral.pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env);
if (applicableMappingType.tag == TypeTags.MAP) {
recordLiteral.expectedType = applicableMappingType;
}
return recordType;
}
private BType checkMappingConstructorCompatibility(BType bType, BLangRecordLiteral mappingConstructor) {
int tag = bType.tag;
if (tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
List<BType> compatibleTypes = new ArrayList<>();
boolean erroredExpType = false;
for (BType memberType : ((BUnionType) bType).getMemberTypes()) {
if (memberType == symTable.semanticError) {
if (!erroredExpType) {
erroredExpType = true;
}
continue;
}
BType listCompatibleMemType = getMappingConstructorCompatibleNonUnionType(memberType);
if (listCompatibleMemType == symTable.semanticError) {
continue;
}
dlog.resetErrorCount();
BType memCompatibiltyType = checkMappingConstructorCompatibility(listCompatibleMemType,
mappingConstructor);
if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(compatibleTypes, memCompatibiltyType)) {
compatibleTypes.add(memCompatibiltyType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (compatibleTypes.isEmpty()) {
if (!erroredExpType) {
reportIncompatibleMappingConstructorError(mappingConstructor, bType);
}
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
} else if (compatibleTypes.size() != 1) {
dlog.error(mappingConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, bType);
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
}
return checkMappingConstructorCompatibility(compatibleTypes.get(0), mappingConstructor);
}
if (tag == TypeTags.INTERSECTION) {
return checkMappingConstructorCompatibility(((BIntersectionType) bType).effectiveType, mappingConstructor);
}
BType possibleType = getMappingConstructorCompatibleNonUnionType(bType);
switch (possibleType.tag) {
case TypeTags.MAP:
return validateSpecifiedFields(mappingConstructor, possibleType) ? possibleType :
symTable.semanticError;
case TypeTags.RECORD:
boolean isSpecifiedFieldsValid = validateSpecifiedFields(mappingConstructor, possibleType);
boolean hasAllRequiredFields = validateRequiredFields((BRecordType) possibleType,
mappingConstructor.fields,
mappingConstructor.pos);
return isSpecifiedFieldsValid && hasAllRequiredFields ? possibleType : symTable.semanticError;
case TypeTags.READONLY:
return checkReadOnlyMappingType(mappingConstructor);
}
reportIncompatibleMappingConstructorError(mappingConstructor, bType);
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
}
private BType checkReadOnlyMappingType(BLangRecordLiteral mappingConstructor) {
if (!this.nonErrorLoggingCheck) {
BType inferredType = defineInferredRecordType(mappingConstructor, symTable.readonlyType);
if (inferredType == symTable.semanticError) {
return symTable.semanticError;
}
return checkMappingConstructorCompatibility(inferredType, mappingConstructor);
}
for (RecordLiteralNode.RecordField field : mappingConstructor.fields) {
BLangExpression exprToCheck;
if (field.isKeyValueField()) {
exprToCheck = ((BLangRecordKeyValueField) field).valueExpr;
} else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
exprToCheck = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
} else {
exprToCheck = (BLangRecordVarNameField) field;
}
if (exprIncompatible(symTable.readonlyType, exprToCheck)) {
return symTable.semanticError;
}
}
return symTable.readonlyType;
}
private BType getMappingConstructorCompatibleNonUnionType(BType type) {
switch (type.tag) {
case TypeTags.MAP:
case TypeTags.RECORD:
case TypeTags.READONLY:
return type;
case TypeTags.JSON:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapJsonType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapJsonType, env,
symTable, anonymousModelHelper, names);
case TypeTags.ANYDATA:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapAnydataType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapAnydataType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANY:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapType, env,
symTable, anonymousModelHelper, names);
case TypeTags.INTERSECTION:
return ((BIntersectionType) type).effectiveType;
}
return symTable.semanticError;
}
private boolean isMappingConstructorCompatibleType(BType type) {
return type.tag == TypeTags.RECORD || type.tag == TypeTags.MAP;
}
private void reportIncompatibleMappingConstructorError(BLangRecordLiteral mappingConstructorExpr, BType expType) {
if (expType == symTable.semanticError) {
return;
}
if (expType.tag != TypeTags.UNION) {
dlog.error(mappingConstructorExpr.pos,
DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType);
return;
}
BUnionType unionType = (BUnionType) expType;
BType[] memberTypes = unionType.getMemberTypes().toArray(new BType[0]);
if (memberTypes.length == 2) {
BRecordType recType = null;
if (memberTypes[0].tag == TypeTags.RECORD && memberTypes[1].tag == TypeTags.NIL) {
recType = (BRecordType) memberTypes[0];
} else if (memberTypes[1].tag == TypeTags.RECORD && memberTypes[0].tag == TypeTags.NIL) {
recType = (BRecordType) memberTypes[1];
}
if (recType != null) {
validateSpecifiedFields(mappingConstructorExpr, recType);
validateRequiredFields(recType, mappingConstructorExpr.fields, mappingConstructorExpr.pos);
return;
}
}
for (BType bType : memberTypes) {
if (isMappingConstructorCompatibleType(bType)) {
dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_MAPPING_CONSTRUCTOR,
unionType);
return;
}
}
dlog.error(mappingConstructorExpr.pos,
DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, unionType);
}
private boolean validateSpecifiedFields(BLangRecordLiteral mappingConstructor, BType possibleType) {
boolean isFieldsValid = true;
for (RecordLiteralNode.RecordField field : mappingConstructor.fields) {
BType checkedType = checkMappingField(field, possibleType);
if (isFieldsValid && checkedType == symTable.semanticError) {
isFieldsValid = false;
}
}
return isFieldsValid;
}
private boolean validateRequiredFields(BRecordType type, List<RecordLiteralNode.RecordField> specifiedFields,
Location pos) {
HashSet<String> specFieldNames = getFieldNames(specifiedFields);
boolean hasAllRequiredFields = true;
for (BField field : type.fields.values()) {
String fieldName = field.name.value;
if (!specFieldNames.contains(fieldName) && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)
&& !types.isNeverTypeOrStructureTypeWithARequiredNeverMember(field.type)) {
dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
if (hasAllRequiredFields) {
hasAllRequiredFields = false;
}
}
}
return hasAllRequiredFields;
}
private HashSet<String> getFieldNames(List<RecordLiteralNode.RecordField> specifiedFields) {
HashSet<String> fieldNames = new HashSet<>();
for (RecordLiteralNode.RecordField specifiedField : specifiedFields) {
if (specifiedField.isKeyValueField()) {
String name = getKeyValueFieldName((BLangRecordKeyValueField) specifiedField);
if (name == null) {
continue;
}
fieldNames.add(name);
} else if (specifiedField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
fieldNames.add(getVarNameFieldName((BLangRecordVarNameField) specifiedField));
} else {
fieldNames.addAll(getSpreadOpFieldRequiredFieldNames(
(BLangRecordLiteral.BLangRecordSpreadOperatorField) specifiedField));
}
}
return fieldNames;
}
private String getKeyValueFieldName(BLangRecordKeyValueField field) {
BLangRecordKey key = field.key;
if (key.computedKey) {
return null;
}
BLangExpression keyExpr = key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) keyExpr).variableName.value;
} else if (keyExpr.getKind() == NodeKind.LITERAL) {
return (String) ((BLangLiteral) keyExpr).value;
}
return null;
}
private String getVarNameFieldName(BLangRecordVarNameField field) {
return field.variableName.value;
}
private List<String> getSpreadOpFieldRequiredFieldNames(BLangRecordLiteral.BLangRecordSpreadOperatorField field) {
BType spreadType = checkExpr(field.expr, env);
if (spreadType.tag != TypeTags.RECORD) {
return Collections.emptyList();
}
List<String> fieldNames = new ArrayList<>();
for (BField bField : ((BRecordType) spreadType).getFields().values()) {
if (!Symbols.isOptional(bField.symbol)) {
fieldNames.add(bField.name.value);
}
}
return fieldNames;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
if (workerFlushExpr.workerIdentifier != null) {
String workerName = workerFlushExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(workerFlushExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
} else {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromString(workerName));
if (symbol != symTable.notFoundSymbol) {
workerFlushExpr.workerSymbol = symbol;
}
}
}
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(workerFlushExpr, actualType, expType);
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(syncSendExpr.workerIdentifier));
if (symTable.notFoundSymbol.equals(symbol)) {
syncSendExpr.workerType = symTable.semanticError;
} else {
syncSendExpr.workerType = symbol.type;
syncSendExpr.workerSymbol = symbol;
}
syncSendExpr.env = this.env;
checkExpr(syncSendExpr.expr, this.env);
if (!types.isAssignable(syncSendExpr.expr.getBType(), symTable.cloneableType)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_SEND,
syncSendExpr.expr.getBType());
}
String workerName = syncSendExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
}
syncSendExpr.expectedType = expType;
resultType = expType == symTable.noType ? symTable.nilType : expType;
}
@Override
public void visit(BLangWorkerReceive workerReceiveExpr) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerReceiveExpr.workerIdentifier));
workerReceiveExpr.env = this.env;
if (symTable.notFoundSymbol.equals(symbol)) {
workerReceiveExpr.workerType = symTable.semanticError;
} else {
workerReceiveExpr.workerType = symbol.type;
workerReceiveExpr.workerSymbol = symbol;
}
if (symTable.noType == this.expType) {
this.dlog.error(workerReceiveExpr.pos, DiagnosticErrorCode.INVALID_USAGE_OF_RECEIVE_EXPRESSION);
}
workerReceiveExpr.setBType(this.expType);
resultType = this.expType;
}
private boolean workerExists(SymbolEnv env, String workerName) {
if (workerName.equals(DEFAULT_WORKER_NAME)) {
return true;
}
BSymbol symbol = this.symResolver.lookupSymbolInMainSpace(env, new Name(workerName));
return symbol != this.symTable.notFoundSymbol &&
symbol.type.tag == TypeTags.FUTURE &&
((BFutureType) symbol.type).workerDerivative;
}
@Override
public void visit(BLangConstRef constRef) {
constRef.symbol = symResolver.lookupMainSpaceSymbolInPackage(constRef.pos, env,
names.fromIdNode(constRef.pkgAlias), names.fromIdNode(constRef.variableName));
types.setImplicitCastExpr(constRef, constRef.getBType(), expType);
resultType = constRef.getBType();
}
public void visit(BLangSimpleVarRef varRefExpr) {
BType actualType = symTable.semanticError;
Name varName = names.fromIdNode(varRefExpr.variableName);
if (varName == Names.IGNORE) {
if (varRefExpr.isLValue) {
varRefExpr.setBType(this.symTable.anyType);
} else {
varRefExpr.setBType(this.symTable.semanticError);
dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDERSCORE_NOT_ALLOWED);
}
varRefExpr.symbol = new BVarSymbol(0, true, varName, env.enclPkg.symbol.pkgID, varRefExpr.getBType(),
env.scope.owner, varRefExpr.pos, VIRTUAL);
resultType = varRefExpr.getBType();
return;
}
Name compUnitName = getCurrentCompUnit(varRefExpr);
varRefExpr.pkgSymbol =
symResolver.resolvePrefixSymbol(env, names.fromIdNode(varRefExpr.pkgAlias), compUnitName);
if (varRefExpr.pkgSymbol == symTable.notFoundSymbol) {
varRefExpr.symbol = symTable.notFoundSymbol;
dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, varRefExpr.pkgAlias);
}
if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
actualType = symTable.stringType;
} else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(varRefExpr.pos, env,
names.fromIdNode(varRefExpr.pkgAlias), varName);
if (symbol == symTable.notFoundSymbol && env.enclType != null) {
Name objFuncName = names.fromString(Symbols
.getAttachedFuncSymbolName(env.enclType.getBType().tsymbol.name.value, varName.value));
symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName,
env.enclType.getBType().tsymbol);
}
if (((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE)) {
BVarSymbol varSym = (BVarSymbol) symbol;
checkSelfReferences(varRefExpr.pos, env, varSym);
varRefExpr.symbol = varSym;
actualType = varSym.type;
markAndRegisterClosureVariable(symbol, varRefExpr.pos, env);
} else if ((symbol.tag & SymTag.TYPE_DEF) == SymTag.TYPE_DEF) {
actualType = symbol.type.tag == TypeTags.TYPEDESC ? symbol.type : new BTypedescType(symbol.type, null);
varRefExpr.symbol = symbol;
} else if ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) symbol;
varRefExpr.symbol = constSymbol;
BType symbolType = symbol.type;
if (symbolType != symTable.noType && expType.tag == TypeTags.FINITE ||
(expType.tag == TypeTags.UNION && ((BUnionType) expType).getMemberTypes().stream()
.anyMatch(memType -> memType.tag == TypeTags.FINITE &&
types.isAssignable(symbolType, memType)))) {
actualType = symbolType;
} else {
actualType = constSymbol.literalType;
}
if (varRefExpr.isLValue || varRefExpr.isCompoundAssignmentLValue) {
actualType = symTable.semanticError;
dlog.error(varRefExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_CONSTANT_VALUE);
}
} else {
varRefExpr.symbol = symbol;
logUndefinedSymbolError(varRefExpr.pos, varName.value);
}
}
if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) {
dlog.error(varRefExpr.pos, DiagnosticErrorCode.CLOSED_ARRAY_TYPE_CAN_NOT_INFER_SIZE);
return;
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
@Override
public void visit(BLangRecordVarRef varRefExpr) {
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
String recordName = this.anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.symbol.pkgID);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, names.fromString(recordName),
env.enclPkg.symbol.pkgID, null, env.scope.owner,
varRefExpr.pos, SOURCE);
symbolEnter.defineSymbol(varRefExpr.pos, recordSymbol, env);
boolean unresolvedReference = false;
for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) {
BLangVariableReference bLangVarReference = (BLangVariableReference) recordRefField.variableReference;
bLangVarReference.isLValue = true;
checkExpr(recordRefField.variableReference, env);
if (bLangVarReference.symbol == null || bLangVarReference.symbol == symTable.notFoundSymbol ||
!isValidVariableReference(recordRefField.variableReference)) {
unresolvedReference = true;
continue;
}
BVarSymbol bVarSymbol = (BVarSymbol) bLangVarReference.symbol;
BField field = new BField(names.fromIdNode(recordRefField.variableName), varRefExpr.pos,
new BVarSymbol(0, names.fromIdNode(recordRefField.variableName),
env.enclPkg.symbol.pkgID, bVarSymbol.type, recordSymbol,
varRefExpr.pos, SOURCE));
fields.put(field.name.value, field);
}
BLangExpression restParam = (BLangExpression) varRefExpr.restParam;
if (restParam != null) {
checkExpr(restParam, env);
unresolvedReference = !isValidVariableReference(restParam);
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BRecordType bRecordType = new BRecordType(recordSymbol);
bRecordType.fields = fields;
recordSymbol.type = bRecordType;
varRefExpr.symbol = new BVarSymbol(0, recordSymbol.name,
env.enclPkg.symbol.pkgID, bRecordType, env.scope.owner, varRefExpr.pos,
SOURCE);
if (restParam == null) {
bRecordType.sealed = true;
bRecordType.restFieldType = symTable.noType;
} else if (restParam.getBType() == symTable.semanticError) {
bRecordType.restFieldType = symTable.mapType;
} else {
BType restFieldType;
if (restParam.getBType().tag == TypeTags.RECORD) {
restFieldType = ((BRecordType) restParam.getBType()).restFieldType;
} else if (restParam.getBType().tag == TypeTags.MAP) {
restFieldType = ((BMapType) restParam.getBType()).constraint;
} else {
restFieldType = restParam.getBType();
}
bRecordType.restFieldType = restFieldType;
}
resultType = bRecordType;
}
@Override
public void visit(BLangErrorVarRef varRefExpr) {
if (varRefExpr.typeNode != null) {
BType bType = symResolver.resolveTypeNode(varRefExpr.typeNode, env);
varRefExpr.setBType(bType);
checkIndirectErrorVarRef(varRefExpr);
resultType = bType;
return;
}
if (varRefExpr.message != null) {
varRefExpr.message.isLValue = true;
checkExpr(varRefExpr.message, env);
if (!types.isAssignable(symTable.stringType, varRefExpr.message.getBType())) {
dlog.error(varRefExpr.message.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType,
varRefExpr.message.getBType());
}
}
if (varRefExpr.cause != null) {
varRefExpr.cause.isLValue = true;
checkExpr(varRefExpr.cause, env);
if (!types.isAssignable(symTable.errorOrNilType, varRefExpr.cause.getBType())) {
dlog.error(varRefExpr.cause.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.errorOrNilType,
varRefExpr.cause.getBType());
}
}
boolean unresolvedReference = false;
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
BLangVariableReference refItem = (BLangVariableReference) detailItem.expr;
refItem.isLValue = true;
checkExpr(refItem, env);
if (!isValidVariableReference(refItem)) {
unresolvedReference = true;
continue;
}
if (refItem.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR
|| refItem.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
dlog.error(refItem.pos, DiagnosticErrorCode.INVALID_VARIABLE_REFERENCE_IN_BINDING_PATTERN,
refItem);
unresolvedReference = true;
continue;
}
if (refItem.symbol == null) {
unresolvedReference = true;
}
}
if (varRefExpr.restVar != null) {
varRefExpr.restVar.isLValue = true;
if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
checkExpr(varRefExpr.restVar, env);
unresolvedReference = unresolvedReference
|| varRefExpr.restVar.symbol == null
|| !isValidVariableReference(varRefExpr.restVar);
}
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BType errorRefRestFieldType;
if (varRefExpr.restVar == null) {
errorRefRestFieldType = symTable.anydataOrReadonly;
} else if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) varRefExpr.restVar).variableName.value.equals(Names.IGNORE.value)) {
errorRefRestFieldType = symTable.anydataOrReadonly;
} else if (varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
|| varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
errorRefRestFieldType = varRefExpr.restVar.getBType();
} else if (varRefExpr.restVar.getBType().tag == TypeTags.MAP) {
errorRefRestFieldType = ((BMapType) varRefExpr.restVar.getBType()).constraint;
} else {
dlog.error(varRefExpr.restVar.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
varRefExpr.restVar.getBType(), symTable.detailType);
resultType = symTable.semanticError;
return;
}
BType errorDetailType = errorRefRestFieldType == symTable.anydataOrReadonly
? symTable.errorType.detailType
: new BMapType(TypeTags.MAP, errorRefRestFieldType, null, Flags.PUBLIC);
resultType = new BErrorType(symTable.errorType.tsymbol, errorDetailType);
}
private void checkIndirectErrorVarRef(BLangErrorVarRef varRefExpr) {
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
checkExpr(detailItem.expr, env);
checkExpr(detailItem, env, detailItem.expr.getBType());
}
if (varRefExpr.restVar != null) {
checkExpr(varRefExpr.restVar, env);
}
if (varRefExpr.message != null) {
varRefExpr.message.isLValue = true;
checkExpr(varRefExpr.message, env);
}
if (varRefExpr.cause != null) {
varRefExpr.cause.isLValue = true;
checkExpr(varRefExpr.cause, env);
}
}
@Override
public void visit(BLangTupleVarRef varRefExpr) {
List<BType> results = new ArrayList<>();
for (int i = 0; i < varRefExpr.expressions.size(); i++) {
((BLangVariableReference) varRefExpr.expressions.get(i)).isLValue = true;
results.add(checkExpr(varRefExpr.expressions.get(i), env, symTable.noType));
}
BTupleType actualType = new BTupleType(results);
if (varRefExpr.restParam != null) {
BLangExpression restExpr = (BLangExpression) varRefExpr.restParam;
((BLangVariableReference) restExpr).isLValue = true;
BType checkedType = checkExpr(restExpr, env, symTable.noType);
if (!(checkedType.tag == TypeTags.ARRAY || checkedType.tag == TypeTags.TUPLE)) {
dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_REST_DESCRIPTOR, checkedType);
resultType = symTable.semanticError;
return;
}
if (checkedType.tag == TypeTags.ARRAY) {
actualType.restType = ((BArrayType) checkedType).eType;
} else {
actualType.restType = checkedType;
}
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
/**
* This method will recursively check if a multidimensional array has at least one open sealed dimension.
*
* @param arrayType array to check if open sealed
* @return true if at least one dimension is open sealed
*/
public boolean isArrayOpenSealedType(BArrayType arrayType) {
if (arrayType.state == BArrayState.INFERRED) {
return true;
}
if (arrayType.eType.tag == TypeTags.ARRAY) {
return isArrayOpenSealedType((BArrayType) arrayType.eType);
}
return false;
}
/**
* This method will recursively traverse and find the symbol environment of a lambda node (which is given as the
* enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the
* enclosing invokable node's environment, which are outside of the scope of a lambda function.
*/
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) {
if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {
return env.enclEnv;
}
if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) {
return env.enclEnv;
}
if (env.enclInvokable != null && env.enclInvokable == encInvokable) {
return findEnclosingInvokableEnv(env.enclEnv, encInvokable);
}
return env;
}
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangRecordTypeNode recordTypeNode) {
if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {
return env.enclEnv;
}
if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) {
return env.enclEnv;
}
if (env.enclType != null && env.enclType == recordTypeNode) {
return findEnclosingInvokableEnv(env.enclEnv, recordTypeNode);
}
return env;
}
private boolean isFunctionArgument(BSymbol symbol, List<BLangSimpleVariable> params) {
return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) &&
param.getBType().tag == symbol.type.tag));
}
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
markLeafNode(fieldAccessExpr);
BLangExpression containerExpression = fieldAccessExpr.expr;
if (containerExpression instanceof BLangValueExpression) {
((BLangValueExpression) containerExpression).isLValue = fieldAccessExpr.isLValue;
((BLangValueExpression) containerExpression).isCompoundAssignmentLValue =
fieldAccessExpr.isCompoundAssignmentLValue;
}
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(checkExpr(containerExpression, env));
if (fieldAccessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess
&& !isXmlAccess(fieldAccessExpr)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_EXPRESSION);
resultType = symTable.semanticError;
return;
}
BType actualType;
if (fieldAccessExpr.optionalFieldAccess) {
if (fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPTIONAL_FIELD_ACCESS_NOT_REQUIRED_ON_LHS);
resultType = symTable.semanticError;
return;
}
actualType = checkOptionalFieldAccessExpr(fieldAccessExpr, varRefType,
names.fromIdNode(fieldAccessExpr.field));
} else {
actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field));
if (actualType != symTable.semanticError &&
(fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue)) {
if (isAllReadonlyTypes(varRefType)) {
if (varRefType.tag != TypeTags.OBJECT || !isInitializationInInit(varRefType)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE,
varRefType);
resultType = symTable.semanticError;
return;
}
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD) &&
isInvalidReadonlyFieldUpdate(varRefType, fieldAccessExpr.field.value)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD,
fieldAccessExpr.field.value, varRefType);
resultType = symTable.semanticError;
return;
}
}
}
resultType = types.checkType(fieldAccessExpr, actualType, this.expType);
}
private boolean isAllReadonlyTypes(BType type) {
if (type.tag != TypeTags.UNION) {
return Symbols.isFlagOn(type.flags, Flags.READONLY);
}
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isAllReadonlyTypes(memberType)) {
return false;
}
}
return true;
}
private boolean isInitializationInInit(BType type) {
BObjectType objectType = (BObjectType) type;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) objectType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
return env.enclInvokable != null && initializerFunc != null &&
env.enclInvokable.symbol == initializerFunc.symbol;
}
private boolean isInvalidReadonlyFieldUpdate(BType type, String fieldName) {
if (type.tag == TypeTags.RECORD) {
if (Symbols.isFlagOn(type.flags, Flags.READONLY)) {
return true;
}
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
if (!field.name.value.equals(fieldName)) {
continue;
}
return Symbols.isFlagOn(field.symbol.flags, Flags.READONLY);
}
return recordType.sealed;
}
boolean allInvalidUpdates = true;
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isInvalidReadonlyFieldUpdate(memberType, fieldName)) {
allInvalidUpdates = false;
}
}
return allInvalidUpdates;
}
private boolean isXmlAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangExpression expr = fieldAccessExpr.expr;
BType exprType = expr.getBType();
if (exprType.tag == TypeTags.XML || exprType.tag == TypeTags.XML_ELEMENT) {
return true;
}
if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType((BLangFieldBasedAccess) expr)
&& exprType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) exprType).getMemberTypes();
return memberTypes.contains(symTable.xmlType) || memberTypes.contains(symTable.xmlElementType);
}
return false;
}
public void visit(BLangIndexBasedAccess indexBasedAccessExpr) {
markLeafNode(indexBasedAccessExpr);
BLangExpression containerExpression = indexBasedAccessExpr.expr;
if (containerExpression.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
((BLangTypedescExpr) containerExpression).typeNode);
resultType = symTable.semanticError;
return;
}
if (containerExpression instanceof BLangValueExpression) {
((BLangValueExpression) containerExpression).isLValue = indexBasedAccessExpr.isLValue;
((BLangValueExpression) containerExpression).isCompoundAssignmentLValue =
indexBasedAccessExpr.isCompoundAssignmentLValue;
}
boolean isStringValue = containerExpression.getBType() != null
&& containerExpression.getBType().tag == TypeTags.STRING;
if (!isStringValue) {
checkExpr(containerExpression, this.env, symTable.noType);
}
if (indexBasedAccessExpr.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY &&
containerExpression.getBType().tag != TypeTags.TABLE) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.MULTI_KEY_MEMBER_ACCESS_NOT_SUPPORTED,
containerExpression.getBType());
resultType = symTable.semanticError;
return;
}
BType actualType = checkIndexAccessExpr(indexBasedAccessExpr);
BType exprType = containerExpression.getBType();
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
if (actualType != symTable.semanticError &&
(indexBasedAccessExpr.isLValue || indexBasedAccessExpr.isCompoundAssignmentLValue)) {
if (isAllReadonlyTypes(exprType)) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE,
exprType);
resultType = symTable.semanticError;
return;
} else if (types.isSubTypeOfBaseType(exprType, TypeTags.RECORD) &&
(indexExpr.getKind() == NodeKind.LITERAL || isConst(indexExpr)) &&
isInvalidReadonlyFieldUpdate(exprType, getConstFieldName(indexExpr))) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD,
getConstFieldName(indexExpr), exprType);
resultType = symTable.semanticError;
return;
}
}
if (indexBasedAccessExpr.isLValue) {
indexBasedAccessExpr.originalType = actualType;
indexBasedAccessExpr.setBType(actualType);
resultType = actualType;
return;
}
this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType);
}
public void visit(BLangInvocation iExpr) {
if (iExpr.expr == null) {
checkFunctionInvocationExpr(iExpr);
return;
}
if (invalidModuleAliasUsage(iExpr)) {
return;
}
checkExpr(iExpr.expr, this.env, symTable.noType);
BType varRefType = iExpr.expr.getBType();
switch (varRefType.tag) {
case TypeTags.OBJECT:
checkObjectFunctionInvocationExpr(iExpr, (BObjectType) varRefType);
break;
case TypeTags.RECORD:
checkFieldFunctionPointer(iExpr, this.env);
break;
case TypeTags.NONE:
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, iExpr.name);
break;
case TypeTags.SEMANTIC_ERROR:
break;
default:
checkInLangLib(iExpr, varRefType);
}
}
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
BLangUserDefinedType userProvidedTypeRef = errorConstructorExpr.errorTypeRef;
if (userProvidedTypeRef != null) {
symResolver.resolveTypeNode(userProvidedTypeRef, env, DiagnosticErrorCode.UNDEFINED_ERROR_TYPE_DESCRIPTOR);
}
validateErrorConstructorPositionalArgs(errorConstructorExpr);
List<BType> expandedCandidates = getTypeCandidatesForErrorConstructor(errorConstructorExpr);
List<BType> errorDetailTypes = new ArrayList<>();
for (BType expandedCandidate : expandedCandidates) {
BType detailType = ((BErrorType) expandedCandidate).detailType;
errorDetailTypes.add(detailType);
}
BType detailCandidate;
if (errorDetailTypes.size() == 1) {
detailCandidate = errorDetailTypes.get(0);
} else {
detailCandidate = BUnionType.create(null, new LinkedHashSet<>(errorDetailTypes));
}
BLangRecordLiteral recordLiteral = createRecordLiteralForErrorConstructor(errorConstructorExpr);
BType inferredDetailType = checkExprSilent(recordLiteral, detailCandidate, env);
int index = errorDetailTypes.indexOf(inferredDetailType);
BType selectedCandidate = index < 0 ? symTable.semanticError : expandedCandidates.get(index);
if (selectedCandidate != symTable.semanticError
&& (userProvidedTypeRef == null || userProvidedTypeRef.getBType() == selectedCandidate)) {
checkProvidedErrorDetails(errorConstructorExpr, inferredDetailType);
resultType = types.checkType(errorConstructorExpr.pos, selectedCandidate, expType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
return;
}
if (userProvidedTypeRef == null && errorDetailTypes.size() > 1) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_ERROR_TYPE, expType);
}
BErrorType errorType;
if (userProvidedTypeRef != null && userProvidedTypeRef.getBType().tag == TypeTags.ERROR) {
errorType = (BErrorType) userProvidedTypeRef.getBType();
} else if (expandedCandidates.size() == 1) {
errorType = (BErrorType) expandedCandidates.get(0);
} else {
errorType = symTable.errorType;
}
List<BLangNamedArgsExpression> namedArgs =
checkProvidedErrorDetails(errorConstructorExpr, errorType.detailType);
BType detailType = errorType.detailType;
if (detailType.tag == TypeTags.MAP) {
BType errorDetailTypeConstraint = ((BMapType) detailType).constraint;
for (BLangNamedArgsExpression namedArgExpr: namedArgs) {
if (!types.isAssignable(namedArgExpr.expr.getBType(), errorDetailTypeConstraint)) {
dlog.error(namedArgExpr.pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE,
namedArgExpr.name, errorDetailTypeConstraint, namedArgExpr.expr.getBType());
}
}
} else if (detailType.tag == TypeTags.RECORD) {
BRecordType targetErrorDetailRec = (BRecordType) errorType.detailType;
LinkedList<String> missingRequiredFields = targetErrorDetailRec.fields.values().stream()
.filter(f -> (f.symbol.flags & Flags.REQUIRED) == Flags.REQUIRED)
.map(f -> f.name.value)
.collect(Collectors.toCollection(LinkedList::new));
LinkedHashMap<String, BField> targetFields = targetErrorDetailRec.fields;
for (BLangNamedArgsExpression namedArg : namedArgs) {
BField field = targetFields.get(namedArg.name.value);
Location pos = namedArg.pos;
if (field == null) {
if (targetErrorDetailRec.sealed) {
dlog.error(pos, DiagnosticErrorCode.UNKNOWN_DETAIL_ARG_TO_CLOSED_ERROR_DETAIL_REC,
namedArg.name, targetErrorDetailRec);
} else if (targetFields.isEmpty()
&& !types.isAssignable(namedArg.expr.getBType(), targetErrorDetailRec.restFieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_REST_ARG_TYPE,
namedArg.name, targetErrorDetailRec);
}
} else {
missingRequiredFields.remove(namedArg.name.value);
if (!types.isAssignable(namedArg.expr.getBType(), field.type)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE,
namedArg.name, field.type, namedArg.expr.getBType());
}
}
}
for (String requiredField : missingRequiredFields) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.MISSING_ERROR_DETAIL_ARG, requiredField);
}
}
if (userProvidedTypeRef != null) {
errorConstructorExpr.setBType(userProvidedTypeRef.getBType());
} else {
errorConstructorExpr.setBType(errorType);
}
resultType = errorConstructorExpr.getBType();
}
private void validateErrorConstructorPositionalArgs(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.isEmpty()) {
return;
}
checkExpr(errorConstructorExpr.positionalArgs.get(0), this.env, symTable.stringType);
int positionalArgCount = errorConstructorExpr.positionalArgs.size();
if (positionalArgCount > 1) {
checkExpr(errorConstructorExpr.positionalArgs.get(1), this.env, symTable.errorOrNilType);
}
}
private BType checkExprSilent(BLangRecordLiteral recordLiteral, BType expType, SymbolEnv env) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
BType type = checkExpr(recordLiteral, env, expType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
return type;
}
private BLangRecordLiteral createRecordLiteralForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
for (NamedArgNode namedArg : errorConstructorExpr.getNamedArgs()) {
BLangRecordKeyValueField field =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
field.valueExpr = (BLangExpression) namedArg.getExpression();
BLangLiteral expr = new BLangLiteral();
expr.value = namedArg.getName().value;
expr.setBType(symTable.stringType);
field.key = new BLangRecordKey(expr);
recordLiteral.fields.add(field);
}
return recordLiteral;
}
private List<BType> getTypeCandidatesForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) {
BLangUserDefinedType errorTypeRef = errorConstructorExpr.errorTypeRef;
if (errorTypeRef == null) {
if (expType.tag == TypeTags.ERROR) {
return List.of(expType);
} else if (types.isAssignable(expType, symTable.errorType) || expType.tag == TypeTags.UNION) {
return expandExpectedErrorTypes(expType);
}
} else {
if (errorTypeRef.getBType().tag != TypeTags.ERROR) {
if (errorTypeRef.getBType().tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(errorTypeRef.pos, DiagnosticErrorCode.INVALID_ERROR_TYPE_REFERENCE, errorTypeRef);
}
} else {
return List.of(errorTypeRef.getBType());
}
}
return List.of(symTable.errorType);
}
private List<BType> expandExpectedErrorTypes(BType candidateType) {
List<BType> expandedCandidates = new ArrayList<>();
if (candidateType.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) candidateType).getMemberTypes()) {
if (types.isAssignable(memberType, symTable.errorType)) {
if (memberType.tag == TypeTags.INTERSECTION) {
expandedCandidates.add(((BIntersectionType) memberType).effectiveType);
} else {
expandedCandidates.add(memberType);
}
}
}
} else if (types.isAssignable(candidateType, symTable.errorType)) {
if (candidateType.tag == TypeTags.INTERSECTION) {
expandedCandidates.add(((BIntersectionType) candidateType).effectiveType);
} else {
expandedCandidates.add(candidateType);
}
}
return expandedCandidates;
}
public void visit(BLangInvocation.BLangActionInvocation aInv) {
if (aInv.expr == null) {
checkFunctionInvocationExpr(aInv);
return;
}
if (invalidModuleAliasUsage(aInv)) {
return;
}
checkExpr(aInv.expr, this.env, symTable.noType);
BLangExpression varRef = aInv.expr;
switch (varRef.getBType().tag) {
case TypeTags.OBJECT:
checkActionInvocation(aInv, (BObjectType) varRef.getBType());
break;
case TypeTags.RECORD:
checkFieldFunctionPointer(aInv, this.env);
break;
case TypeTags.NONE:
dlog.error(aInv.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, aInv.name);
resultType = symTable.semanticError;
break;
case TypeTags.SEMANTIC_ERROR:
default:
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, varRef.getBType());
resultType = symTable.semanticError;
break;
}
}
private boolean invalidModuleAliasUsage(BLangInvocation invocation) {
Name pkgAlias = names.fromIdNode(invocation.pkgAlias);
if (pkgAlias != Names.EMPTY) {
dlog.error(invocation.pos, DiagnosticErrorCode.PKG_ALIAS_NOT_ALLOWED_HERE);
return true;
}
return false;
}
public void visit(BLangLetExpression letExpression) {
BLetSymbol letSymbol = new BLetSymbol(SymTag.LET, Flags.asMask(new HashSet<>(Lists.of())),
new Name(String.format("$let_symbol_%d$", letCount++)),
env.enclPkg.symbol.pkgID, letExpression.getBType(), env.scope.owner,
letExpression.pos);
letExpression.env = SymbolEnv.createExprEnv(letExpression, env, letSymbol);
for (BLangLetVariable letVariable : letExpression.letVarDeclarations) {
semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letExpression.env);
}
BType exprType = checkExpr(letExpression.expr, letExpression.env, this.expType);
types.checkType(letExpression, exprType, this.expType);
}
private void checkInLangLib(BLangInvocation iExpr, BType varRefType) {
BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType);
if (langLibMethodSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION_IN_TYPE, iExpr.name.value,
iExpr.expr.getBType());
resultType = symTable.semanticError;
return;
}
if (checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol)) {
return;
}
checkIllegalStorageSizeChangeMethodCall(iExpr, varRefType);
}
private boolean checkInvalidImmutableValueUpdate(BLangInvocation iExpr, BType varRefType,
BSymbol langLibMethodSymbol) {
if (!Symbols.isFlagOn(varRefType.flags, Flags.READONLY)) {
return false;
}
String packageId = langLibMethodSymbol.pkgID.name.value;
if (!modifierFunctions.containsKey(packageId)) {
return false;
}
String funcName = langLibMethodSymbol.name.value;
if (!modifierFunctions.get(packageId).contains(funcName)) {
return false;
}
if (funcName.equals("mergeJson") && varRefType.tag != TypeTags.MAP) {
return false;
}
if (funcName.equals("strip") && TypeTags.isXMLTypeTag(varRefType.tag)) {
return false;
}
dlog.error(iExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType);
resultType = symTable.semanticError;
return true;
}
private boolean isFixedLengthList(BType type) {
switch(type.tag) {
case TypeTags.ARRAY:
return (((BArrayType) type).state != BArrayState.OPEN);
case TypeTags.TUPLE:
return (((BTupleType) type).restType == null);
case TypeTags.UNION:
BUnionType unionType = (BUnionType) type;
for (BType member : unionType.getMemberTypes()) {
if (!isFixedLengthList(member)) {
return false;
}
}
return true;
default:
return false;
}
}
private void checkIllegalStorageSizeChangeMethodCall(BLangInvocation iExpr, BType varRefType) {
String invocationName = iExpr.name.getValue();
if (!listLengthModifierFunctions.contains(invocationName)) {
return;
}
if (isFixedLengthList(varRefType)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_LIST_SIZE, invocationName,
varRefType);
resultType = symTable.semanticError;
return;
}
if (isShiftOnIncompatibleTuples(varRefType, invocationName)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_TUPLE_SHAPE, invocationName,
varRefType);
resultType = symTable.semanticError;
return;
}
}
private boolean isShiftOnIncompatibleTuples(BType varRefType, String invocationName) {
if ((varRefType.tag == TypeTags.TUPLE) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0) &&
hasDifferentTypeThanRest((BTupleType) varRefType)) {
return true;
}
if ((varRefType.tag == TypeTags.UNION) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0)) {
BUnionType unionVarRef = (BUnionType) varRefType;
boolean allMemberAreFixedShapeTuples = true;
for (BType member : unionVarRef.getMemberTypes()) {
if (member.tag != TypeTags.TUPLE) {
allMemberAreFixedShapeTuples = false;
break;
}
if (!hasDifferentTypeThanRest((BTupleType) member)) {
allMemberAreFixedShapeTuples = false;
break;
}
}
return allMemberAreFixedShapeTuples;
}
return false;
}
private boolean hasDifferentTypeThanRest(BTupleType tupleType) {
if (tupleType.restType == null) {
return false;
}
for (BType member : tupleType.getTupleTypes()) {
if (!types.isSameType(tupleType.restType, member)) {
return true;
}
}
return false;
}
private boolean checkFieldFunctionPointer(BLangInvocation iExpr, SymbolEnv env) {
BType type = checkExpr(iExpr.expr, env);
BLangIdentifier invocationIdentifier = iExpr.name;
if (type == symTable.semanticError) {
return false;
}
BSymbol fieldSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(invocationIdentifier),
type.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
checkIfLangLibMethodExists(iExpr, type, iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FIELD_IN_RECORD,
invocationIdentifier, type);
return false;
}
if (fieldSymbol.kind != SymbolKind.FUNCTION) {
checkIfLangLibMethodExists(iExpr, type, iExpr.pos, DiagnosticErrorCode.INVALID_METHOD_CALL_EXPR_ON_FIELD,
fieldSymbol.type);
return false;
}
iExpr.symbol = fieldSymbol;
iExpr.setBType(((BInvokableSymbol) fieldSymbol).retType);
checkInvocationParamAndReturnType(iExpr);
iExpr.functionPointerInvocation = true;
return true;
}
private void checkIfLangLibMethodExists(BLangInvocation iExpr, BType varRefType, Location pos,
DiagnosticErrorCode errCode, Object... diagMsgArgs) {
BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType);
if (langLibMethodSymbol == symTable.notFoundSymbol) {
dlog.error(pos, errCode, diagMsgArgs);
resultType = symTable.semanticError;
} else {
checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol);
}
}
@Override
public void visit(BLangObjectConstructorExpression objectCtorExpression) {
if (objectCtorExpression.referenceType == null && objectCtorExpression.expectedType != null) {
BObjectType objectType = (BObjectType) objectCtorExpression.classNode.getBType();
if (objectCtorExpression.expectedType.tag == TypeTags.OBJECT) {
BObjectType expObjType = (BObjectType) objectCtorExpression.expectedType;
objectType.typeIdSet = expObjType.typeIdSet;
} else if (objectCtorExpression.expectedType.tag != TypeTags.NONE) {
if (!checkAndLoadTypeIdSet(objectCtorExpression.expectedType, objectType)) {
dlog.error(objectCtorExpression.pos, DiagnosticErrorCode.INVALID_TYPE_OBJECT_CONSTRUCTOR,
objectCtorExpression.expectedType);
resultType = symTable.semanticError;
return;
}
}
}
visit(objectCtorExpression.typeInit);
}
private boolean isDefiniteObjectType(BType type, Set<BTypeIdSet> typeIdSets) {
if (type.tag != TypeTags.OBJECT && type.tag != TypeTags.UNION) {
return false;
}
Set<BType> visitedTypes = new HashSet<>();
if (!collectObjectTypeIds(type, typeIdSets, visitedTypes)) {
return false;
}
return typeIdSets.size() <= 1;
}
private boolean collectObjectTypeIds(BType type, Set<BTypeIdSet> typeIdSets, Set<BType> visitedTypes) {
if (type.tag == TypeTags.OBJECT) {
var objectType = (BObjectType) type;
typeIdSets.add(objectType.typeIdSet);
return true;
}
if (type.tag == TypeTags.UNION) {
if (!visitedTypes.add(type)) {
return true;
}
for (BType member : ((BUnionType) type).getMemberTypes()) {
if (!collectObjectTypeIds(member, typeIdSets, visitedTypes)) {
return false;
}
}
return true;
}
return false;
}
private boolean checkAndLoadTypeIdSet(BType type, BObjectType objectType) {
Set<BTypeIdSet> typeIdSets = new HashSet<>();
if (!isDefiniteObjectType(type, typeIdSets)) {
return false;
}
if (typeIdSets.isEmpty()) {
objectType.typeIdSet = BTypeIdSet.emptySet();
return true;
}
var typeIdIterator = typeIdSets.iterator();
if (typeIdIterator.hasNext()) {
BTypeIdSet typeIdSet = typeIdIterator.next();
objectType.typeIdSet = typeIdSet;
return true;
}
return true;
}
public void visit(BLangTypeInit cIExpr) {
if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null) || expType.tag == TypeTags.RECORD) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_TYPE_NEW_LITERAL, expType);
resultType = symTable.semanticError;
return;
}
BType actualType;
if (cIExpr.userDefinedType != null) {
actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env);
} else {
actualType = expType;
}
if (actualType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
if (actualType.tag == TypeTags.INTERSECTION) {
actualType = ((BIntersectionType) actualType).effectiveType;
}
switch (actualType.tag) {
case TypeTags.OBJECT:
BObjectType actualObjectType = (BObjectType) actualType;
if (isObjectConstructorExpr(cIExpr, actualObjectType)) {
BLangClassDefinition classDefForConstructor = getClassDefinitionForObjectConstructorExpr(cIExpr,
env);
List<BLangType> typeRefs = classDefForConstructor.typeRefs;
SymbolEnv pkgEnv = symTable.pkgEnvMap.get(env.enclPkg.symbol);
if (Symbols.isFlagOn(expType.flags, Flags.READONLY)) {
handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv,
false);
} else if (!typeRefs.isEmpty() && Symbols.isFlagOn(typeRefs.get(0).getBType().flags,
Flags.READONLY)) {
handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv,
true);
} else {
analyzeObjectConstructor(classDefForConstructor, pkgEnv);
}
markConstructedObjectIsolatedness(actualObjectType);
}
if ((actualType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT,
actualType.tsymbol);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return;
}
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) actualType)) {
return;
}
}
break;
case TypeTags.STREAM:
if (cIExpr.initInvocation.argExprs.size() > 1) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR, cIExpr.initInvocation);
resultType = symTable.semanticError;
return;
}
BStreamType actualStreamType = (BStreamType) actualType;
if (actualStreamType.completionType != null) {
BType completionType = actualStreamType.completionType;
if (completionType.tag != symTable.nilType.tag && !types.containsErrorType(completionType)) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED, completionType.toString());
resultType = symTable.semanticError;
return;
}
}
if (!cIExpr.initInvocation.argExprs.isEmpty()) {
BLangExpression iteratorExpr = cIExpr.initInvocation.argExprs.get(0);
BType constructType = checkExpr(iteratorExpr, env, symTable.noType);
BUnionType expectedNextReturnType = createNextReturnType(cIExpr.pos, (BStreamType) actualType);
if (constructType.tag != TypeTags.OBJECT) {
dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
BAttachedFunction closeFunc = types.getAttachedFuncFromObject((BObjectType) constructType,
BLangCompilerConstants.CLOSE_FUNC);
if (closeFunc != null) {
BType closeableIteratorType = symTable.langQueryModuleSymbol.scope
.lookup(Names.ABSTRACT_STREAM_CLOSEABLE_ITERATOR).symbol.type;
if (!types.isAssignable(constructType, closeableIteratorType)) {
dlog.error(iteratorExpr.pos,
DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_CLOSEABLE_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
} else {
BType iteratorType = symTable.langQueryModuleSymbol.scope
.lookup(Names.ABSTRACT_STREAM_ITERATOR).symbol.type;
if (!types.isAssignable(constructType, iteratorType)) {
dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
}
BUnionType nextReturnType = types.getVarTypeFromIteratorFuncReturnType(constructType);
if (nextReturnType != null) {
types.checkType(iteratorExpr.pos, nextReturnType, expectedNextReturnType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
} else {
dlog.error(constructType.tsymbol.getPosition(),
DiagnosticErrorCode.INVALID_NEXT_METHOD_RETURN_TYPE, expectedNextReturnType);
}
}
if (this.expType.tag != TypeTags.NONE && !types.isAssignable(actualType, this.expType)) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, this.expType,
actualType);
resultType = symTable.semanticError;
return;
}
resultType = actualType;
return;
case TypeTags.UNION:
List<BType> matchingMembers = findMembersWithMatchingInitFunc(cIExpr, (BUnionType) actualType);
BType matchedType = getMatchingType(matchingMembers, cIExpr, actualType);
cIExpr.initInvocation.setBType(symTable.nilType);
if (matchedType.tag == TypeTags.OBJECT) {
if (((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
actualType = matchedType;
break;
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) matchedType)) {
return;
}
}
}
types.checkType(cIExpr, matchedType, expType);
cIExpr.setBType(matchedType);
resultType = matchedType;
return;
default:
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType);
resultType = symTable.semanticError;
return;
}
if (cIExpr.initInvocation.getBType() == null) {
cIExpr.initInvocation.setBType(symTable.nilType);
}
BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType());
resultType = types.checkType(cIExpr, actualTypeInitType, expType);
}
private BUnionType createNextReturnType(Location pos, BStreamType streamType) {
BRecordType recordType = new BRecordType(null, Flags.ANONYMOUS);
recordType.restFieldType = symTable.noType;
recordType.sealed = true;
Name fieldName = Names.VALUE;
BField field = new BField(fieldName, pos, new BVarSymbol(Flags.PUBLIC,
fieldName, env.enclPkg.packageID,
streamType.constraint, env.scope.owner, pos, VIRTUAL));
field.type = streamType.constraint;
recordType.fields.put(field.name.value, field);
recordType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID,
recordType, env.scope.owner, pos, VIRTUAL);
recordType.tsymbol.scope = new Scope(env.scope.owner);
recordType.tsymbol.scope.define(fieldName, field.symbol);
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(recordType);
retTypeMembers.addAll(types.getAllTypes(streamType.completionType));
retTypeMembers.add(symTable.nilType);
BUnionType unionType = BUnionType.create(null);
unionType.addAll(retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY,
env.enclPkg.symbol.pkgID, unionType, env.scope.owner, pos, VIRTUAL);
return unionType;
}
private boolean isValidInitInvocation(BLangTypeInit cIExpr, BObjectType objType) {
if (!cIExpr.initInvocation.argExprs.isEmpty()
&& ((BObjectTypeSymbol) objType.tsymbol).initializerFunc == null) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL,
cIExpr.initInvocation.name.value);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return false;
}
return true;
}
private BType getObjectConstructorReturnType(BType objType, BType initRetType) {
if (initRetType.tag == TypeTags.UNION) {
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(objType);
retTypeMembers.addAll(((BUnionType) initRetType).getMemberTypes());
retTypeMembers.remove(symTable.nilType);
BUnionType unionType = BUnionType.create(null, retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0,
Names.EMPTY, env.enclPkg.symbol.pkgID, unionType,
env.scope.owner, symTable.builtinPos, VIRTUAL);
return unionType;
} else if (initRetType.tag == TypeTags.NIL) {
return objType;
}
return symTable.semanticError;
}
private List<BType> findMembersWithMatchingInitFunc(BLangTypeInit cIExpr, BUnionType lhsUnionType) {
int objectCount = 0;
for (BType memberType : lhsUnionType.getMemberTypes()) {
int tag = memberType.tag;
if (tag == TypeTags.OBJECT) {
objectCount++;
continue;
}
if (tag != TypeTags.INTERSECTION) {
continue;
}
if (((BIntersectionType) memberType).effectiveType.tag == TypeTags.OBJECT) {
objectCount++;
}
}
boolean containsSingleObject = objectCount == 1;
List<BType> matchingLhsMemberTypes = new ArrayList<>();
for (BType memberType : lhsUnionType.getMemberTypes()) {
if (memberType.tag != TypeTags.OBJECT) {
continue;
}
if ((memberType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT,
lhsUnionType.tsymbol);
}
if (containsSingleObject) {
return Collections.singletonList(memberType);
}
BAttachedFunction initializerFunc = ((BObjectTypeSymbol) memberType.tsymbol).initializerFunc;
if (isArgsMatchesFunction(cIExpr.argsExpr, initializerFunc)) {
matchingLhsMemberTypes.add(memberType);
}
}
return matchingLhsMemberTypes;
}
private BType getMatchingType(List<BType> matchingLhsMembers, BLangTypeInit cIExpr, BType lhsUnion) {
if (matchingLhsMembers.isEmpty()) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
} else if (matchingLhsMembers.size() == 1) {
return matchingLhsMembers.get(0).tsymbol.type;
} else {
dlog.error(cIExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
}
}
private boolean isArgsMatchesFunction(List<BLangExpression> invocationArguments, BAttachedFunction function) {
invocationArguments.forEach(expr -> checkExpr(expr, env, symTable.noType));
if (function == null) {
return invocationArguments.isEmpty();
}
if (function.symbol.params.isEmpty() && invocationArguments.isEmpty()) {
return true;
}
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
List<BLangExpression> positionalArgs = new ArrayList<>();
for (BLangExpression argument : invocationArguments) {
if (argument.getKind() == NodeKind.NAMED_ARGS_EXPR) {
namedArgs.add((BLangNamedArgsExpression) argument);
} else {
positionalArgs.add(argument);
}
}
List<BVarSymbol> requiredParams = function.symbol.params.stream()
.filter(param -> !param.isDefaultable)
.collect(Collectors.toList());
if (requiredParams.size() > invocationArguments.size()) {
return false;
}
List<BVarSymbol> defaultableParams = function.symbol.params.stream()
.filter(param -> param.isDefaultable)
.collect(Collectors.toList());
int givenRequiredParamCount = 0;
for (int i = 0; i < positionalArgs.size(); i++) {
if (function.symbol.params.size() > i) {
givenRequiredParamCount++;
BVarSymbol functionParam = function.symbol.params.get(i);
if (!types.isAssignable(positionalArgs.get(i).getBType(), functionParam.type)) {
return false;
}
requiredParams.remove(functionParam);
defaultableParams.remove(functionParam);
continue;
}
if (function.symbol.restParam != null) {
BType restParamType = ((BArrayType) function.symbol.restParam.type).eType;
if (!types.isAssignable(positionalArgs.get(i).getBType(), restParamType)) {
return false;
}
continue;
}
return false;
}
for (BLangNamedArgsExpression namedArg : namedArgs) {
boolean foundNamedArg = false;
List<BVarSymbol> params = function.symbol.params;
for (int i = givenRequiredParamCount; i < params.size(); i++) {
BVarSymbol functionParam = params.get(i);
if (!namedArg.name.value.equals(functionParam.name.value)) {
continue;
}
foundNamedArg = true;
BType namedArgExprType = checkExpr(namedArg.expr, env);
if (!types.isAssignable(functionParam.type, namedArgExprType)) {
return false;
}
requiredParams.remove(functionParam);
defaultableParams.remove(functionParam);
}
if (!foundNamedArg) {
return false;
}
}
return requiredParams.size() <= 0;
}
public void visit(BLangWaitForAllExpr waitForAllExpr) {
switch (expType.tag) {
case TypeTags.RECORD:
checkTypesForRecords(waitForAllExpr);
break;
case TypeTags.MAP:
checkTypesForMap(waitForAllExpr, ((BMapType) expType).constraint);
LinkedHashSet<BType> memberTypesForMap = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypesForMap.size() == 1) {
resultType = new BMapType(TypeTags.MAP,
memberTypesForMap.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintTypeForMap = BUnionType.create(null, memberTypesForMap);
resultType = new BMapType(TypeTags.MAP, constraintTypeForMap, symTable.mapType.tsymbol);
break;
case TypeTags.NONE:
case TypeTags.ANY:
checkTypesForMap(waitForAllExpr, expType);
LinkedHashSet<BType> memberTypes = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypes.size() == 1) {
resultType = new BMapType(TypeTags.MAP, memberTypes.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintType = BUnionType.create(null, memberTypes);
resultType = new BMapType(TypeTags.MAP, constraintType, symTable.mapType.tsymbol);
break;
default:
dlog.error(waitForAllExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getWaitForAllExprReturnType(waitForAllExpr, waitForAllExpr.pos));
resultType = symTable.semanticError;
break;
}
waitForAllExpr.setBType(resultType);
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitForAllExpr, waitForAllExpr.getBType(), expType);
}
}
private BRecordType getWaitForAllExprReturnType(BLangWaitForAllExpr waitExpr,
Location pos) {
BRecordType retType = new BRecordType(null, Flags.ANONYMOUS);
List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals = waitExpr.keyValuePairs;
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BLangIdentifier fieldName;
if (keyVal.valueExpr == null || keyVal.valueExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
fieldName = keyVal.key;
} else {
fieldName = ((BLangSimpleVarRef) keyVal.valueExpr).variableName;
}
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(fieldName));
BType fieldType = symbol.type.tag == TypeTags.FUTURE ? ((BFutureType) symbol.type).constraint : symbol.type;
BField field = new BField(names.fromIdNode(keyVal.key), null,
new BVarSymbol(0, names.fromIdNode(keyVal.key), env.enclPkg.packageID,
fieldType, null, keyVal.pos, VIRTUAL));
retType.fields.put(field.name.value, field);
}
retType.restFieldType = symTable.noType;
retType.sealed = true;
retType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID, retType, null,
pos, VIRTUAL);
return retType;
}
private LinkedHashSet<BType> collectWaitExprTypes(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals) {
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BType bType = keyVal.keyExpr != null ? keyVal.keyExpr.getBType() : keyVal.valueExpr.getBType();
if (bType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) bType).constraint);
} else {
memberTypes.add(bType);
}
}
return memberTypes;
}
private void checkTypesForMap(BLangWaitForAllExpr waitForAllExpr, BType expType) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValuePairs = waitForAllExpr.keyValuePairs;
keyValuePairs.forEach(keyVal -> checkWaitKeyValExpr(keyVal, expType));
}
private void checkTypesForRecords(BLangWaitForAllExpr waitExpr) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> rhsFields = waitExpr.getKeyValuePairs();
Map<String, BField> lhsFields = ((BRecordType) expType).fields;
if (((BRecordType) expType).sealed && rhsFields.size() > lhsFields.size()) {
dlog.error(waitExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getWaitForAllExprReturnType(waitExpr, waitExpr.pos));
resultType = symTable.semanticError;
return;
}
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : rhsFields) {
String key = keyVal.key.value;
if (!lhsFields.containsKey(key)) {
if (((BRecordType) expType).sealed) {
dlog.error(waitExpr.pos, DiagnosticErrorCode.INVALID_FIELD_NAME_RECORD_LITERAL, key, expType);
resultType = symTable.semanticError;
} else {
BType restFieldType = ((BRecordType) expType).restFieldType;
checkWaitKeyValExpr(keyVal, restFieldType);
}
} else {
checkWaitKeyValExpr(keyVal, lhsFields.get(key).type);
}
}
checkMissingReqFieldsForWait(((BRecordType) expType), rhsFields, waitExpr.pos);
if (symTable.semanticError != resultType) {
resultType = expType;
}
}
private void checkMissingReqFieldsForWait(BRecordType type, List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValPairs,
Location pos) {
type.fields.values().forEach(field -> {
boolean hasField = keyValPairs.stream().anyMatch(keyVal -> field.name.value.equals(keyVal.key.value));
if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
}
});
}
private void checkWaitKeyValExpr(BLangWaitForAllExpr.BLangWaitKeyValue keyVal, BType type) {
BLangExpression expr;
if (keyVal.keyExpr != null) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode
(((BLangSimpleVarRef) keyVal.keyExpr).variableName));
keyVal.keyExpr.setBType(symbol.type);
expr = keyVal.keyExpr;
} else {
expr = keyVal.valueExpr;
}
BFutureType futureType = new BFutureType(TypeTags.FUTURE, type, null);
checkExpr(expr, env, futureType);
setEventualTypeForExpression(expr, type);
}
private void setEventualTypeForExpression(BLangExpression expression,
BType currentExpectedType) {
if (expression == null) {
return;
}
if (isSimpleWorkerReference(expression)) {
return;
}
BFutureType futureType = (BFutureType) expression.expectedType;
BType currentType = futureType.constraint;
if (types.containsErrorType(currentType)) {
return;
}
BUnionType eventualType = BUnionType.create(null, currentType, symTable.errorType);
if (((currentExpectedType.tag != TypeTags.NONE) && (currentExpectedType.tag != TypeTags.NIL)) &&
!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR,
currentExpectedType, eventualType, expression);
}
futureType.constraint = eventualType;
}
private void setEventualTypeForWaitExpression(BLangExpression expression,
Location pos) {
if ((resultType == symTable.semanticError) ||
(types.containsErrorType(resultType))) {
return;
}
if (isSimpleWorkerReference(expression)) {
return;
}
BType currentExpectedType = ((BFutureType) expType).constraint;
BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType);
if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) {
resultType = eventualType;
return;
}
if (!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType,
eventualType, expression);
resultType = symTable.semanticError;
return;
}
if (resultType.tag == TypeTags.FUTURE) {
((BFutureType) resultType).constraint = eventualType;
} else {
resultType = eventualType;
}
}
private void setEventualTypeForAlternateWaitExpression(BLangExpression expression, Location pos) {
if ((resultType == symTable.semanticError) ||
(expression.getKind() != NodeKind.BINARY_EXPR) ||
(types.containsErrorType(resultType))) {
return;
}
if (types.containsErrorType(resultType)) {
return;
}
if (!isReferencingNonWorker((BLangBinaryExpr) expression)) {
return;
}
BType currentExpectedType = ((BFutureType) expType).constraint;
BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType);
if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) {
resultType = eventualType;
return;
}
if (!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType,
eventualType, expression);
resultType = symTable.semanticError;
return;
}
if (resultType.tag == TypeTags.FUTURE) {
((BFutureType) resultType).constraint = eventualType;
} else {
resultType = eventualType;
}
}
private boolean isSimpleWorkerReference(BLangExpression expression) {
if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return false;
}
BLangSimpleVarRef simpleVarRef = ((BLangSimpleVarRef) expression);
BSymbol varRefSymbol = simpleVarRef.symbol;
if (varRefSymbol == null) {
return false;
}
if (workerExists(env, simpleVarRef.variableName.value)) {
return true;
}
return false;
}
private boolean isReferencingNonWorker(BLangBinaryExpr binaryExpr) {
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
if (isReferencingNonWorker(lhsExpr)) {
return true;
}
return isReferencingNonWorker(rhsExpr);
}
private boolean isReferencingNonWorker(BLangExpression expression) {
if (expression.getKind() == NodeKind.BINARY_EXPR) {
return isReferencingNonWorker((BLangBinaryExpr) expression);
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) expression;
BSymbol varRefSymbol = simpleVarRef.symbol;
String varRefSymbolName = varRefSymbol.getName().value;
if (workerExists(env, varRefSymbolName)) {
return false;
}
}
return true;
}
public void visit(BLangTernaryExpr ternaryExpr) {
BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType);
SymbolEnv thenEnv = typeNarrower.evaluateTruth(ternaryExpr.expr, ternaryExpr.thenExpr, env);
BType thenType = checkExpr(ternaryExpr.thenExpr, thenEnv, expType);
SymbolEnv elseEnv = typeNarrower.evaluateFalsity(ternaryExpr.expr, ternaryExpr.elseExpr, env);
BType elseType = checkExpr(ternaryExpr.elseExpr, elseEnv, expType);
if (condExprType == symTable.semanticError || thenType == symTable.semanticError ||
elseType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isAssignable(elseType, thenType)) {
resultType = thenType;
} else if (types.isAssignable(thenType, elseType)) {
resultType = elseType;
} else {
dlog.error(ternaryExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, thenType, elseType);
resultType = symTable.semanticError;
}
} else {
resultType = expType;
}
}
public void visit(BLangWaitExpr waitExpr) {
expType = new BFutureType(TypeTags.FUTURE, expType, null);
checkExpr(waitExpr.getExpression(), env, expType);
if (resultType.tag == TypeTags.UNION) {
LinkedHashSet<BType> memberTypes = collectMemberTypes((BUnionType) resultType, new LinkedHashSet<>());
if (memberTypes.size() == 1) {
resultType = memberTypes.toArray(new BType[0])[0];
} else {
resultType = BUnionType.create(null, memberTypes);
}
} else if (resultType != symTable.semanticError) {
resultType = ((BFutureType) resultType).constraint;
}
BLangExpression waitFutureExpression = waitExpr.getExpression();
if (waitFutureExpression.getKind() == NodeKind.BINARY_EXPR) {
setEventualTypeForAlternateWaitExpression(waitFutureExpression, waitExpr.pos);
} else {
setEventualTypeForWaitExpression(waitFutureExpression, waitExpr.pos);
}
waitExpr.setBType(resultType);
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitExpr, waitExpr.getBType(), ((BFutureType) expType).constraint);
}
}
private LinkedHashSet<BType> collectMemberTypes(BUnionType unionType, LinkedHashSet<BType> memberTypes) {
for (BType memberType : unionType.getMemberTypes()) {
if (memberType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) memberType).constraint);
} else {
memberTypes.add(memberType);
}
}
return memberTypes;
}
@Override
public void visit(BLangTrapExpr trapExpr) {
boolean firstVisit = trapExpr.expr.getBType() == null;
BType actualType;
BType exprType = checkExpr(trapExpr.expr, env, expType);
boolean definedWithVar = expType == symTable.noType;
if (trapExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = trapExpr.getBType();
exprType = trapExpr.expr.getBType();
}
}
if (expType == symTable.semanticError || exprType == symTable.semanticError) {
actualType = symTable.semanticError;
} else {
LinkedHashSet<BType> resultTypes = new LinkedHashSet<>();
if (exprType.tag == TypeTags.UNION) {
resultTypes.addAll(((BUnionType) exprType).getMemberTypes());
} else {
resultTypes.add(exprType);
}
resultTypes.add(symTable.errorType);
actualType = BUnionType.create(null, resultTypes);
}
resultType = types.checkType(trapExpr, actualType, expType);
if (definedWithVar && resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(trapExpr.expr, trapExpr.expr.getBType(), resultType);
}
}
public void visit(BLangBinaryExpr binaryExpr) {
if (expType.tag == TypeTags.FUTURE && binaryExpr.opKind == OperatorKind.BITWISE_OR) {
BType lhsResultType = checkExpr(binaryExpr.lhsExpr, env, expType);
BType rhsResultType = checkExpr(binaryExpr.rhsExpr, env, expType);
if (lhsResultType == symTable.semanticError || rhsResultType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
resultType = BUnionType.create(null, lhsResultType, rhsResultType);
return;
}
checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(binaryExpr);
SymbolEnv rhsExprEnv;
BType lhsType;
if (binaryExpr.expectedType.tag == TypeTags.FLOAT || binaryExpr.expectedType.tag == TypeTags.DECIMAL) {
lhsType = checkAndGetType(binaryExpr.lhsExpr, env, binaryExpr);
} else {
lhsType = checkExpr(binaryExpr.lhsExpr, env);
}
if (binaryExpr.opKind == OperatorKind.AND) {
rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env, true);
} else if (binaryExpr.opKind == OperatorKind.OR) {
rhsExprEnv = typeNarrower.evaluateFalsity(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env);
} else {
rhsExprEnv = env;
}
BType rhsType;
if (binaryExpr.expectedType.tag == TypeTags.FLOAT || binaryExpr.expectedType.tag == TypeTags.DECIMAL) {
rhsType = checkAndGetType(binaryExpr.rhsExpr, rhsExprEnv, binaryExpr);
} else {
rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv);
}
BType actualType = symTable.semanticError;
switch (binaryExpr.opKind) {
case ADD:
BType leftConstituent = getXMLConstituents(lhsType);
BType rightConstituent = getXMLConstituents(rhsType);
if (leftConstituent != null && rightConstituent != null) {
actualType = new BXMLType(BUnionType.create(null, leftConstituent, rightConstituent), null);
break;
}
default:
if (lhsType != symTable.semanticError && rhsType != symTable.semanticError) {
BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBitwiseShiftOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getArithmeticOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType,
binaryExpr);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryComparisonOpForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
dlog.error(binaryExpr.pos, DiagnosticErrorCode.BINARY_OP_INCOMPATIBLE_TYPES, binaryExpr.opKind,
lhsType, rhsType);
} else {
if ((binaryExpr.opKind == OperatorKind.EQUAL || binaryExpr.opKind == OperatorKind.NOT_EQUAL) &&
(couldHoldTableValues(lhsType, new ArrayList<>()) &&
couldHoldTableValues(rhsType, new ArrayList<>()))) {
dlog.error(binaryExpr.pos, DiagnosticErrorCode.EQUALITY_NOT_YET_SUPPORTED, TABLE_TNAME);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
actualType = opSymbol.type.getReturnType();
}
}
}
resultType = types.checkType(binaryExpr, actualType, expType);
}
private BType checkAndGetType(BLangExpression expr, SymbolEnv env, BLangBinaryExpr binaryExpr) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
expr.cloneAttempt++;
BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, binaryExpr.expectedType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (errorCount == 0 && exprCompatibleType != symTable.semanticError) {
return checkExpr(expr, env, binaryExpr.expectedType);
} else {
return checkExpr(expr, env);
}
}
private SymbolEnv getEnvBeforeInputNode(SymbolEnv env, BLangNode node) {
while (env != null && env.node != node) {
env = env.enclEnv;
}
return env != null && env.enclEnv != null
? env.enclEnv.createClone()
: new SymbolEnv(node, null);
}
private SymbolEnv getEnvAfterJoinNode(SymbolEnv env, BLangNode node) {
SymbolEnv clone = env.createClone();
while (clone != null && clone.node != node) {
clone = clone.enclEnv;
}
if (clone != null) {
clone.enclEnv = getEnvBeforeInputNode(clone.enclEnv, getLastInputNodeFromEnv(clone.enclEnv));
} else {
clone = new SymbolEnv(node, null);
}
return clone;
}
private BLangNode getLastInputNodeFromEnv(SymbolEnv env) {
while (env != null && (env.node.getKind() != NodeKind.FROM && env.node.getKind() != NodeKind.JOIN)) {
env = env.enclEnv;
}
return env != null ? env.node : null;
}
public void visit(BLangTransactionalExpr transactionalExpr) {
resultType = types.checkType(transactionalExpr, symTable.booleanType, expType);
}
public void visit(BLangCommitExpr commitExpr) {
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(commitExpr, actualType, expType);
}
private BType getXMLConstituents(BType type) {
BType constituent = null;
if (type.tag == TypeTags.XML) {
constituent = ((BXMLType) type).constraint;
} else if (TypeTags.isXMLNonSequenceType(type.tag)) {
constituent = type;
}
return constituent;
}
private void checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(BLangBinaryExpr binaryExpr) {
if (expType.tag != TypeTags.DECIMAL) {
return;
}
switch (binaryExpr.opKind) {
case ADD:
case SUB:
case MUL:
case DIV:
checkExpr(binaryExpr.lhsExpr, env, expType);
checkExpr(binaryExpr.rhsExpr, env, expType);
break;
default:
break;
}
}
public void visit(BLangElvisExpr elvisExpr) {
BType lhsType = checkExpr(elvisExpr.lhsExpr, env);
BType actualType = symTable.semanticError;
if (lhsType != symTable.semanticError) {
if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) {
BUnionType unionType = (BUnionType) lhsType;
LinkedHashSet<BType> memberTypes = unionType.getMemberTypes().stream()
.filter(type -> type.tag != TypeTags.NIL)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (memberTypes.size() == 1) {
actualType = memberTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, memberTypes);
}
} else {
dlog.error(elvisExpr.pos, DiagnosticErrorCode.OPERATOR_NOT_SUPPORTED, OperatorKind.ELVIS,
lhsType);
}
}
BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType);
BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
if (rhsReturnType == symTable.semanticError || lhsReturnType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isSameType(rhsReturnType, lhsReturnType)) {
resultType = lhsReturnType;
} else {
dlog.error(elvisExpr.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsReturnType,
rhsReturnType);
resultType = symTable.semanticError;
}
} else {
resultType = expType;
}
}
@Override
public void visit(BLangGroupExpr groupExpr) {
resultType = checkExpr(groupExpr.expression, env, expType);
}
public void visit(BLangTypedescExpr accessExpr) {
if (accessExpr.resolvedType == null) {
accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env);
}
int resolveTypeTag = accessExpr.resolvedType.tag;
final BType actualType;
if (resolveTypeTag != TypeTags.TYPEDESC && resolveTypeTag != TypeTags.NONE) {
actualType = new BTypedescType(accessExpr.resolvedType, null);
} else {
actualType = accessExpr.resolvedType;
}
resultType = types.checkType(accessExpr, actualType, expType);
}
public void visit(BLangUnaryExpr unaryExpr) {
BType exprType;
BType actualType = symTable.semanticError;
if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = exprType;
}
} else if (OperatorKind.TYPEOF.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = new BTypedescType(exprType, null);
}
} else {
boolean decimalNegation = OperatorKind.SUB.equals(unaryExpr.operator) && expType.tag == TypeTags.DECIMAL;
boolean isAdd = OperatorKind.ADD.equals(unaryExpr.operator);
exprType = (decimalNegation || isAdd) ? checkExpr(unaryExpr.expr, env, expType) :
checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.pos, unaryExpr.operator, exprType);
if (symbol == symTable.notFoundSymbol) {
dlog.error(unaryExpr.pos, DiagnosticErrorCode.UNARY_OP_INCOMPATIBLE_TYPES,
unaryExpr.operator, exprType);
} else {
unaryExpr.opSymbol = (BOperatorSymbol) symbol;
actualType = symbol.type.getReturnType();
}
}
}
resultType = types.checkType(unaryExpr, actualType, expType);
}
public void visit(BLangTypeConversionExpr conversionExpr) {
BType actualType = symTable.semanticError;
for (BLangAnnotationAttachment annAttachment : conversionExpr.annAttachments) {
annAttachment.attachPoints.add(AttachPoint.Point.TYPE);
semanticAnalyzer.analyzeNode(annAttachment, this.env);
}
BLangExpression expr = conversionExpr.expr;
if (conversionExpr.typeNode == null) {
if (!conversionExpr.annAttachments.isEmpty()) {
resultType = checkExpr(expr, env, this.expType);
}
return;
}
BType targetType = getEffectiveReadOnlyType(conversionExpr.typeNode.pos,
symResolver.resolveTypeNode(conversionExpr.typeNode, env));
conversionExpr.targetType = targetType;
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, targetType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if ((errorCount == 0 && exprCompatibleType != symTable.semanticError) || requireTypeInference(expr, false)) {
checkExpr(expr, env, targetType);
} else {
checkExpr(expr, env, symTable.noType);
}
BType exprType = expr.getBType();
if (types.isTypeCastable(expr, exprType, targetType, this.env)) {
actualType = targetType;
} else if (exprType != symTable.semanticError && exprType != symTable.noType) {
dlog.error(conversionExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_CAST, exprType, targetType);
}
resultType = types.checkType(conversionExpr, actualType, this.expType);
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
bLangLambdaFunction.setBType(bLangLambdaFunction.function.getBType());
bLangLambdaFunction.capturedClosureEnv = env.createClone();
if (!this.nonErrorLoggingCheck) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.getBType(), expType);
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BType expectedType = expType;
if (expectedType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expectedType;
BType invokableType = unionType.getMemberTypes().stream().filter(type -> type.tag == TypeTags.INVOKABLE)
.collect(Collectors.collectingAndThen(Collectors.toList(), list -> {
if (list.size() != 1) {
return null;
}
return list.get(0);
}
));
if (invokableType != null) {
expectedType = invokableType;
}
}
if (expectedType.tag != TypeTags.INVOKABLE || Symbols.isFlagOn(expectedType.flags, Flags.ANY_FUNCTION)) {
dlog.error(bLangArrowFunction.pos,
DiagnosticErrorCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS);
resultType = symTable.semanticError;
return;
}
BInvokableType expectedInvocation = (BInvokableType) expectedType;
populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes);
bLangArrowFunction.body.expr.setBType(populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType));
if (expectedInvocation.retType.tag == TypeTags.NONE) {
expectedInvocation.retType = bLangArrowFunction.body.expr.getBType();
}
resultType = bLangArrowFunction.funcType = expectedInvocation;
}
public void visit(BLangXMLQName bLangXMLQName) {
String prefix = bLangXMLQName.prefix.value;
resultType = types.checkType(bLangXMLQName, symTable.stringType, expType);
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty()
&& bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
dlog.error(bLangXMLQName.pos, DiagnosticErrorCode.INVALID_NAMESPACE_PREFIX, prefix);
bLangXMLQName.setBType(symTable.semanticError);
return;
}
if (bLangXMLQName.prefix.value.isEmpty()) {
return;
}
BSymbol xmlnsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromIdNode(bLangXMLQName.prefix));
if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
return;
}
if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
logUndefinedSymbolError(bLangXMLQName.pos, prefix);
bLangXMLQName.setBType(symTable.semanticError);
return;
}
if (xmlnsSymbol.getKind() == SymbolKind.PACKAGE) {
xmlnsSymbol = findXMLNamespaceFromPackageConst(bLangXMLQName.localname.value, bLangXMLQName.prefix.value,
(BPackageSymbol) xmlnsSymbol, bLangXMLQName.pos);
}
if (xmlnsSymbol == null || xmlnsSymbol.getKind() != SymbolKind.XMLNS) {
resultType = symTable.semanticError;
return;
}
bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol;
bLangXMLQName.namespaceURI = bLangXMLQName.nsSymbol.namespaceURI;
}
private BSymbol findXMLNamespaceFromPackageConst(String localname, String prefix,
BPackageSymbol pkgSymbol, Location pos) {
BSymbol constSymbol = symResolver.lookupMemberSymbol(pos, pkgSymbol.scope, env,
names.fromString(localname), SymTag.CONSTANT);
if (constSymbol == symTable.notFoundSymbol) {
if (!missingNodesHelper.isMissingNode(prefix) && !missingNodesHelper.isMissingNode(localname)) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, prefix + ":" + localname);
}
return null;
}
BConstantSymbol constantSymbol = (BConstantSymbol) constSymbol;
if (constantSymbol.literalType.tag != TypeTags.STRING) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType, constantSymbol.literalType);
return null;
}
String constVal = (String) constantSymbol.value.value;
int s = constVal.indexOf('{');
int e = constVal.lastIndexOf('}');
if (e > s + 1) {
pkgSymbol.isUsed = true;
String nsURI = constVal.substring(s + 1, e);
String local = constVal.substring(e);
return new BXMLNSSymbol(names.fromString(local), nsURI, constantSymbol.pkgID, constantSymbol.owner, pos,
SOURCE);
}
dlog.error(pos, DiagnosticErrorCode.INVALID_ATTRIBUTE_REFERENCE, prefix + ":" + localname);
return null;
}
public void visit(BLangXMLAttribute bLangXMLAttribute) {
SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env);
BLangXMLQName name = (BLangXMLQName) bLangXMLAttribute.name;
checkExpr(name, xmlAttributeEnv, symTable.stringType);
if (name.prefix.value.isEmpty()) {
name.namespaceURI = null;
}
checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType);
symbolEnter.defineNode(bLangXMLAttribute, env);
}
public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) {
SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env);
Set<String> usedPrefixes = new HashSet<>();
BLangIdentifier elemNamePrefix = ((BLangXMLQName) bLangXMLElementLiteral.startTagName).prefix;
if (elemNamePrefix != null && !elemNamePrefix.value.isEmpty()) {
usedPrefixes.add(elemNamePrefix.value);
}
for (BLangXMLAttribute attribute : bLangXMLElementLiteral.attributes) {
if (attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute)) {
BLangXMLQuotedString value = attribute.value;
if (value.getKind() == NodeKind.XML_QUOTED_STRING && value.textFragments.size() > 1) {
dlog.error(value.pos, DiagnosticErrorCode.INVALID_XML_NS_INTERPOLATION);
}
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
BLangIdentifier prefix = ((BLangXMLQName) attribute.name).prefix;
if (prefix != null && !prefix.value.isEmpty()) {
usedPrefixes.add(prefix.value);
}
}
bLangXMLElementLiteral.attributes.forEach(attribute -> {
if (!(attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute))) {
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
});
Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv);
Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX);
if (namespaces.containsKey(defaultNs)) {
bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs);
}
for (Map.Entry<Name, BXMLNSSymbol> nsEntry : namespaces.entrySet()) {
if (usedPrefixes.contains(nsEntry.getKey().value)) {
bLangXMLElementLiteral.namespacesInScope.put(nsEntry.getKey(), nsEntry.getValue());
}
}
validateTags(bLangXMLElementLiteral, xmlElementEnv);
bLangXMLElementLiteral.modifiedChildren =
concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlElementType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLElementLiteral.pos, symTable.xmlElementType,
this.expType);
if (Symbols.isFlagOn(resultType.flags, Flags.READONLY)) {
markChildrenAsImmutable(bLangXMLElementLiteral);
}
}
private boolean isXmlNamespaceAttribute(BLangXMLAttribute attribute) {
BLangXMLQName attrName = (BLangXMLQName) attribute.name;
return (attrName.prefix.value.isEmpty()
&& attrName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE))
|| attrName.prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE);
}
public BType getXMLTypeFromLiteralKind(BLangExpression childXMLExpressions) {
if (childXMLExpressions.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
return symTable.xmlElementType;
}
if (childXMLExpressions.getKind() == NodeKind.XML_TEXT_LITERAL) {
return symTable.xmlTextType;
}
if (childXMLExpressions.getKind() == NodeKind.XML_PI_LITERAL) {
return symTable.xmlPIType;
}
return symTable.xmlCommentType;
}
public void muteErrorLog() {
this.nonErrorLoggingCheck = true;
this.dlog.mute();
}
public void unMuteErrorLog(boolean prevNonErrorLoggingCheck, int errorCount) {
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
}
public BType getXMLSequenceType(BType xmlSubType) {
switch (xmlSubType.tag) {
case TypeTags.XML_ELEMENT:
return new BXMLType(symTable.xmlElementType, null);
case TypeTags.XML_COMMENT:
return new BXMLType(symTable.xmlCommentType, null);
case TypeTags.XML_PI:
return new BXMLType(symTable.xmlPIType, null);
default:
return symTable.xmlTextType;
}
}
public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) {
if (expType.tag != TypeTags.XML && expType.tag != TypeTags.UNION && expType.tag != TypeTags.XML_TEXT
&& expType != symTable.noType) {
dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
"XML Sequence");
resultType = symTable.semanticError;
return;
}
List<BType> xmlTypesInSequence = new ArrayList<>();
for (BLangExpression expressionItem : bLangXMLSequenceLiteral.xmlItems) {
resultType = checkExpr(expressionItem, env, expType);
if (!xmlTypesInSequence.contains(resultType)) {
xmlTypesInSequence.add(resultType);
}
}
if (expType.tag == TypeTags.XML || expType == symTable.noType) {
if (xmlTypesInSequence.size() == 1) {
resultType = getXMLSequenceType(xmlTypesInSequence.get(0));
return;
}
resultType = symTable.xmlType;
return;
}
if (expType.tag == TypeTags.XML_TEXT) {
resultType = symTable.xmlTextType;
return;
}
for (BType item : ((BUnionType) expType).getMemberTypes()) {
if (item.tag != TypeTags.XML_TEXT && item.tag != TypeTags.XML) {
dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
expType, symTable.xmlType);
resultType = symTable.semanticError;
return;
}
}
resultType = symTable.xmlType;
}
public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) {
List<BLangExpression> literalValues = bLangXMLTextLiteral.textFragments;
checkStringTemplateExprs(literalValues);
BLangExpression xmlExpression = literalValues.get(0);
if (literalValues.size() == 1 && xmlExpression.getKind() == NodeKind.LITERAL &&
((String) ((BLangLiteral) xmlExpression).value).isEmpty()) {
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlNeverType, expType);
return;
}
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlTextType, expType);
}
public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) {
checkStringTemplateExprs(bLangXMLCommentLiteral.textFragments);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlCommentType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLCommentLiteral.pos, symTable.xmlCommentType,
this.expType);
}
public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) {
checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType);
checkStringTemplateExprs(bLangXMLProcInsLiteral.dataFragments);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlPIType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLProcInsLiteral.pos, symTable.xmlPIType, this.expType);
}
public void visit(BLangXMLQuotedString bLangXMLQuotedString) {
checkStringTemplateExprs(bLangXMLQuotedString.textFragments);
resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType);
}
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
dlog.error(xmlAttributeAccessExpr.pos,
DiagnosticErrorCode.DEPRECATED_XML_ATTRIBUTE_ACCESS);
resultType = symTable.semanticError;
}
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
checkStringTemplateExprs(stringTemplateLiteral.exprs);
resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType);
}
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
BType type = determineRawTemplateLiteralType(rawTemplateLiteral, expType);
if (type == symTable.semanticError) {
resultType = type;
return;
}
BObjectType literalType = (BObjectType) type;
BType stringsType = literalType.fields.get("strings").type;
if (evaluateRawTemplateExprs(rawTemplateLiteral.strings, stringsType, INVALID_NUM_STRINGS,
rawTemplateLiteral.pos)) {
type = symTable.semanticError;
}
BType insertionsType = literalType.fields.get("insertions").type;
if (evaluateRawTemplateExprs(rawTemplateLiteral.insertions, insertionsType, INVALID_NUM_INSERTIONS,
rawTemplateLiteral.pos)) {
type = symTable.semanticError;
}
resultType = type;
}
private BType determineRawTemplateLiteralType(BLangRawTemplateLiteral rawTemplateLiteral, BType expType) {
if (expType == symTable.noType || containsAnyType(expType)) {
return symTable.rawTemplateType;
}
BType compatibleType = getCompatibleRawTemplateType(expType, rawTemplateLiteral.pos);
BType type = types.checkType(rawTemplateLiteral, compatibleType, symTable.rawTemplateType,
DiagnosticErrorCode.INVALID_RAW_TEMPLATE_TYPE);
if (type == symTable.semanticError) {
return type;
}
if (Symbols.isFlagOn(type.tsymbol.flags, Flags.CLASS)) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_RAW_TEMPLATE_ASSIGNMENT, type);
return symTable.semanticError;
}
BObjectType litObjType = (BObjectType) type;
BObjectTypeSymbol objTSymbol = (BObjectTypeSymbol) litObjType.tsymbol;
if (litObjType.fields.size() > 2) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_NUM_FIELDS, litObjType);
type = symTable.semanticError;
}
if (!objTSymbol.attachedFuncs.isEmpty()) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.METHODS_NOT_ALLOWED, litObjType);
type = symTable.semanticError;
}
return type;
}
private boolean evaluateRawTemplateExprs(List<? extends BLangExpression> exprs, BType fieldType,
DiagnosticCode code, Location pos) {
BType listType = fieldType.tag != TypeTags.INTERSECTION ? fieldType :
((BIntersectionType) fieldType).effectiveType;
boolean errored = false;
if (listType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) listType;
if (arrayType.state == BArrayState.CLOSED && (exprs.size() != arrayType.size)) {
dlog.error(pos, code, arrayType.size, exprs.size());
return false;
}
for (BLangExpression expr : exprs) {
errored = (checkExpr(expr, env, arrayType.eType) == symTable.semanticError) || errored;
}
} else if (listType.tag == TypeTags.TUPLE) {
BTupleType tupleType = (BTupleType) listType;
final int size = exprs.size();
final int requiredItems = tupleType.tupleTypes.size();
if (size < requiredItems || (size > requiredItems && tupleType.restType == null)) {
dlog.error(pos, code, requiredItems, size);
return false;
}
int i;
List<BType> memberTypes = tupleType.tupleTypes;
for (i = 0; i < requiredItems; i++) {
errored = (checkExpr(exprs.get(i), env, memberTypes.get(i)) == symTable.semanticError) || errored;
}
if (size > requiredItems) {
for (; i < size; i++) {
errored = (checkExpr(exprs.get(i), env, tupleType.restType) == symTable.semanticError) || errored;
}
}
} else {
throw new IllegalStateException("Expected a list type, but found: " + listType);
}
return errored;
}
private boolean containsAnyType(BType type) {
if (type == symTable.anyType) {
return true;
}
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().contains(symTable.anyType);
}
return false;
}
private BType getCompatibleRawTemplateType(BType expType, Location pos) {
if (expType.tag != TypeTags.UNION) {
return expType;
}
BUnionType unionType = (BUnionType) expType;
List<BType> compatibleTypes = new ArrayList<>();
for (BType type : unionType.getMemberTypes()) {
if (types.isAssignable(type, symTable.rawTemplateType)) {
compatibleTypes.add(type);
}
}
if (compatibleTypes.size() == 0) {
return expType;
}
if (compatibleTypes.size() > 1) {
dlog.error(pos, DiagnosticErrorCode.MULTIPLE_COMPATIBLE_RAW_TEMPLATE_TYPES, symTable.rawTemplateType,
expType);
return symTable.semanticError;
}
return compatibleTypes.get(0);
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
checkExpr(intRangeExpression.startExpr, env, symTable.intType);
checkExpr(intRangeExpression.endExpr, env, symTable.intType);
resultType = new BArrayType(symTable.intType);
}
@Override
public void visit(BLangRestArgsExpression bLangRestArgExpression) {
resultType = checkExpr(bLangRestArgExpression.expr, env, expType);
}
@Override
public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) {
if (expType.tag != TypeTags.TYPEDESC) {
dlog.error(inferTypedescExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.typeDesc);
resultType = symTable.semanticError;
return;
}
resultType = expType;
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType);
bLangNamedArgsExpression.setBType(bLangNamedArgsExpression.expr.getBType());
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env);
checkExpr(bLangMatchExpression.expr, matchExprEnv);
bLangMatchExpression.patternClauses.forEach(pattern -> {
if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) {
symbolEnter.defineNode(pattern.variable, matchExprEnv);
}
checkExpr(pattern.expr, matchExprEnv, expType);
pattern.variable.setBType(symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv));
});
LinkedHashSet<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression);
BType actualType;
if (matchExprTypes.contains(symTable.semanticError)) {
actualType = symTable.semanticError;
} else if (matchExprTypes.size() == 1) {
actualType = matchExprTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, matchExprTypes);
}
resultType = types.checkType(bLangMatchExpression, actualType, expType);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
checkWithinQueryExpr = isWithinQuery();
visitCheckAndCheckPanicExpr(checkedExpr);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr);
}
@Override
public void visit(BLangQueryExpr queryExpr) {
boolean cleanPrevEnvs = false;
if (prevEnvs.empty()) {
prevEnvs.push(env);
cleanPrevEnvs = true;
}
if (breakToParallelQueryEnv) {
queryEnvs.push(prevEnvs.peek());
} else {
queryEnvs.push(env);
}
queryFinalClauses.push(queryExpr.getSelectClause());
List<BLangNode> clauses = queryExpr.getQueryClauses();
BLangExpression collectionNode = (BLangExpression) ((BLangFromClause) clauses.get(0)).getCollection();
clauses.forEach(clause -> clause.accept(this));
BType actualType = resolveQueryType(queryEnvs.peek(), ((BLangSelectClause) queryFinalClauses.peek()).expression,
collectionNode.getBType(), expType, queryExpr);
actualType = (actualType == symTable.semanticError) ? actualType :
types.checkType(queryExpr.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
queryFinalClauses.pop();
queryEnvs.pop();
if (cleanPrevEnvs) {
prevEnvs.pop();
}
if (actualType.tag == TypeTags.TABLE) {
BTableType tableType = (BTableType) actualType;
tableType.constraintPos = queryExpr.pos;
tableType.isTypeInlineDefined = true;
if (!validateTableType(tableType, null)) {
resultType = symTable.semanticError;
return;
}
}
checkWithinQueryExpr = false;
resultType = actualType;
}
private boolean isWithinQuery() {
return !queryEnvs.isEmpty() && !queryFinalClauses.isEmpty();
}
private BType resolveQueryType(SymbolEnv env, BLangExpression selectExp, BType collectionType,
BType targetType, BLangQueryExpr queryExpr) {
List<BType> resultTypes = types.getAllTypes(targetType).stream()
.filter(t -> !types.isAssignable(t, symTable.errorType))
.filter(t -> !types.isAssignable(t, symTable.nilType))
.collect(Collectors.toList());
if (resultTypes.isEmpty()) {
resultTypes.add(symTable.noType);
}
BType actualType = symTable.semanticError;
List<BType> selectTypes = new ArrayList<>();
List<BType> resolvedTypes = new ArrayList<>();
BType selectType, resolvedType;
for (BType type : resultTypes) {
switch (type.tag) {
case TypeTags.ARRAY:
selectType = checkExpr(selectExp, env, ((BArrayType) type).eType);
resolvedType = new BArrayType(selectType);
break;
case TypeTags.TABLE:
selectType = checkExpr(selectExp, env, types.getSafeType(((BTableType) type).constraint,
true, true));
resolvedType = symTable.tableType;
break;
case TypeTags.STREAM:
selectType = checkExpr(selectExp, env, types.getSafeType(((BStreamType) type).constraint,
true, true));
resolvedType = symTable.streamType;
break;
case TypeTags.STRING:
case TypeTags.XML:
selectType = checkExpr(selectExp, env, type);
resolvedType = selectType;
break;
case TypeTags.NONE:
default:
selectType = checkExpr(selectExp, env, type);
resolvedType = getNonContextualQueryType(selectType, collectionType);
break;
}
if (selectType != symTable.semanticError) {
if (resolvedType.tag == TypeTags.STREAM) {
queryExpr.isStream = true;
}
if (resolvedType.tag == TypeTags.TABLE) {
queryExpr.isTable = true;
}
selectTypes.add(selectType);
resolvedTypes.add(resolvedType);
}
}
if (selectTypes.size() == 1) {
BType errorType = getErrorType(collectionType, queryExpr);
selectType = selectTypes.get(0);
if (queryExpr.isStream) {
return new BStreamType(TypeTags.STREAM, selectType, errorType, null);
} else if (queryExpr.isTable) {
actualType = getQueryTableType(queryExpr, selectType);
} else {
actualType = resolvedTypes.get(0);
}
if (errorType != null && errorType.tag != TypeTags.NIL) {
return BUnionType.create(null, actualType, errorType);
} else {
return actualType;
}
} else if (selectTypes.size() > 1) {
dlog.error(selectExp.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, selectTypes);
return actualType;
} else {
return actualType;
}
}
private BType getQueryTableType(BLangQueryExpr queryExpr, BType constraintType) {
final BTableType tableType = new BTableType(TypeTags.TABLE, constraintType, null);
if (!queryExpr.fieldNameIdentifierList.isEmpty()) {
tableType.fieldNameList = queryExpr.fieldNameIdentifierList.stream()
.map(identifier -> ((BLangIdentifier) identifier).value).collect(Collectors.toList());
return BUnionType.create(null, tableType, symTable.errorType);
}
return tableType;
}
private BType getErrorType(BType collectionType, BLangQueryExpr queryExpr) {
if (collectionType.tag == TypeTags.SEMANTIC_ERROR) {
return null;
}
BType returnType = null, errorType = null;
switch (collectionType.tag) {
case TypeTags.STREAM:
errorType = ((BStreamType) collectionType).completionType;
break;
case TypeTags.OBJECT:
returnType = types.getVarTypeFromIterableObject((BObjectType) collectionType);
break;
default:
BSymbol itrSymbol = symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
if (itrSymbol == this.symTable.notFoundSymbol) {
return null;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) itrSymbol;
returnType = types.getResultTypeOfNextInvocation((BObjectType) invokableSymbol.retType);
}
List<BType> errorTypes = new ArrayList<>();
if (returnType != null) {
types.getAllTypes(returnType).stream()
.filter(t -> types.isAssignable(t, symTable.errorType))
.forEach(errorTypes::add);
}
if (checkWithinQueryExpr && queryExpr.isStream) {
if (errorTypes.isEmpty()) {
errorTypes.add(symTable.nilType);
}
errorTypes.add(symTable.errorType);
}
if (!errorTypes.isEmpty()) {
if (errorTypes.size() == 1) {
errorType = errorTypes.get(0);
} else {
errorType = BUnionType.create(null, errorTypes.toArray(new BType[0]));
}
}
return errorType;
}
private BType getNonContextualQueryType(BType staticType, BType basicType) {
BType resultType;
switch (basicType.tag) {
case TypeTags.TABLE:
resultType = symTable.tableType;
break;
case TypeTags.STREAM:
resultType = symTable.streamType;
break;
case TypeTags.XML:
resultType = new BXMLType(staticType, null);
break;
case TypeTags.STRING:
resultType = symTable.stringType;
break;
default:
resultType = new BArrayType(staticType);
break;
}
return resultType;
}
@Override
public void visit(BLangQueryAction queryAction) {
if (prevEnvs.empty()) {
prevEnvs.push(env);
} else {
prevEnvs.push(prevEnvs.peek());
}
queryEnvs.push(prevEnvs.peek());
BLangDoClause doClause = queryAction.getDoClause();
queryFinalClauses.push(doClause);
List<BLangNode> clauses = queryAction.getQueryClauses();
clauses.forEach(clause -> clause.accept(this));
semanticAnalyzer.analyzeStmt(doClause.body, SymbolEnv.createBlockEnv(doClause.body, queryEnvs.peek()));
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(doClause.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
queryFinalClauses.pop();
queryEnvs.pop();
prevEnvs.pop();
}
@Override
public void visit(BLangFromClause fromClause) {
boolean prevBreakToParallelEnv = this.breakToParallelQueryEnv;
this.breakToParallelQueryEnv = true;
SymbolEnv fromEnv = SymbolEnv.createTypeNarrowedEnv(fromClause, queryEnvs.pop());
fromClause.env = fromEnv;
queryEnvs.push(fromEnv);
checkExpr(fromClause.collection, queryEnvs.peek());
types.setInputClauseTypedBindingPatternType(fromClause);
handleInputClauseVariables(fromClause, queryEnvs.peek());
this.breakToParallelQueryEnv = prevBreakToParallelEnv;
}
@Override
public void visit(BLangJoinClause joinClause) {
boolean prevBreakEnv = this.breakToParallelQueryEnv;
this.breakToParallelQueryEnv = true;
SymbolEnv joinEnv = SymbolEnv.createTypeNarrowedEnv(joinClause, queryEnvs.pop());
joinClause.env = joinEnv;
queryEnvs.push(joinEnv);
checkExpr(joinClause.collection, queryEnvs.peek());
types.setInputClauseTypedBindingPatternType(joinClause);
handleInputClauseVariables(joinClause, queryEnvs.peek());
if (joinClause.onClause != null) {
((BLangOnClause) joinClause.onClause).accept(this);
}
this.breakToParallelQueryEnv = prevBreakEnv;
}
@Override
public void visit(BLangLetClause letClause) {
SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(letClause, queryEnvs.pop());
letClause.env = letEnv;
queryEnvs.push(letEnv);
for (BLangLetVariable letVariable : letClause.letVarDeclarations) {
semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letEnv);
}
}
@Override
public void visit(BLangWhereClause whereClause) {
whereClause.env = handleFilterClauses(whereClause.expression);
}
@Override
public void visit(BLangSelectClause selectClause) {
SymbolEnv selectEnv = SymbolEnv.createTypeNarrowedEnv(selectClause, queryEnvs.pop());
selectClause.env = selectEnv;
queryEnvs.push(selectEnv);
}
@Override
public void visit(BLangDoClause doClause) {
SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(doClause, queryEnvs.pop());
doClause.env = letEnv;
queryEnvs.push(letEnv);
}
@Override
public void visit(BLangOnConflictClause onConflictClause) {
BType exprType = checkExpr(onConflictClause.expression, queryEnvs.peek(), symTable.errorType);
if (!types.isAssignable(exprType, symTable.errorType)) {
dlog.error(onConflictClause.expression.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED,
symTable.errorType, exprType);
}
}
@Override
public void visit(BLangLimitClause limitClause) {
BType exprType = checkExpr(limitClause.expression, queryEnvs.peek());
if (!types.isAssignable(exprType, symTable.intType)) {
dlog.error(limitClause.expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
symTable.intType, exprType);
}
}
@Override
public void visit(BLangOnClause onClause) {
BType lhsType, rhsType;
BLangNode joinNode = getLastInputNodeFromEnv(queryEnvs.peek());
onClause.lhsEnv = getEnvBeforeInputNode(queryEnvs.peek(), joinNode);
lhsType = checkExpr(onClause.lhsExpr, onClause.lhsEnv);
onClause.rhsEnv = getEnvAfterJoinNode(queryEnvs.peek(), joinNode);
rhsType = checkExpr(onClause.rhsExpr, onClause.rhsEnv != null ? onClause.rhsEnv : queryEnvs.peek());
if (!types.isAssignable(lhsType, rhsType)) {
dlog.error(onClause.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsType, rhsType);
}
}
@Override
public void visit(BLangOrderByClause orderByClause) {
orderByClause.env = queryEnvs.peek();
for (OrderKeyNode orderKeyNode : orderByClause.getOrderKeyList()) {
BType exprType = checkExpr((BLangExpression) orderKeyNode.getOrderKey(), orderByClause.env);
if (!types.isOrderedType(exprType, false)) {
dlog.error(((BLangOrderKey) orderKeyNode).expression.pos, DiagnosticErrorCode.ORDER_BY_NOT_SUPPORTED);
}
}
}
@Override
public void visit(BLangDo doNode) {
if (doNode.onFailClause != null) {
doNode.onFailClause.accept(this);
}
}
public void visit(BLangOnFailClause onFailClause) {
onFailClause.body.stmts.forEach(stmt -> stmt.accept(this));
}
private SymbolEnv handleFilterClauses (BLangExpression filterExpression) {
checkExpr(filterExpression, queryEnvs.peek(), symTable.booleanType);
BType actualType = filterExpression.getBType();
if (TypeTags.TUPLE == actualType.tag) {
dlog.error(filterExpression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
symTable.booleanType, actualType);
}
SymbolEnv filterEnv = typeNarrower.evaluateTruth(filterExpression, queryFinalClauses.peek(), queryEnvs.pop());
queryEnvs.push(filterEnv);
return filterEnv;
}
private void handleInputClauseVariables(BLangInputClause bLangInputClause, SymbolEnv blockEnv) {
if (bLangInputClause.variableDefinitionNode == null) {
return;
}
BLangVariable variableNode = (BLangVariable) bLangInputClause.variableDefinitionNode.getVariable();
if (bLangInputClause.isDeclaredWithVar) {
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv);
return;
}
BType typeNodeType = symResolver.resolveTypeNode(variableNode.typeNode, blockEnv);
if (types.isAssignable(bLangInputClause.varType, typeNodeType)) {
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv);
return;
}
if (typeNodeType != symTable.semanticError) {
dlog.error(variableNode.typeNode.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
bLangInputClause.varType, typeNodeType);
}
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, typeNodeType, blockEnv);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr) {
String operatorType = checkedExpr.getKind() == NodeKind.CHECK_EXPR ? "check" : "checkpanic";
BLangExpression exprWithCheckingKeyword = checkedExpr.expr;
boolean firstVisit = exprWithCheckingKeyword.getBType() == null;
BType checkExprCandidateType;
if (expType == symTable.noType) {
checkExprCandidateType = symTable.noType;
} else {
BType exprType = getCandidateType(checkedExpr, expType);
if (exprType == symTable.semanticError) {
checkExprCandidateType = BUnionType.create(null, expType, symTable.errorType);
} else {
checkExprCandidateType = addDefaultErrorIfNoErrorComponentFound(expType);
}
}
if (checkedExpr.getKind() == NodeKind.CHECK_EXPR && types.isUnionOfSimpleBasicTypes(expType)) {
rewriteWithEnsureTypeFunc(checkedExpr, checkExprCandidateType);
}
BType exprType = checkExpr(checkedExpr.expr, env, checkExprCandidateType);
if (checkedExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = checkedExpr.getBType();
exprType = checkedExpr.expr.getBType();
}
}
boolean isErrorType = types.isAssignable(exprType, symTable.errorType);
if (exprType.tag != TypeTags.UNION && !isErrorType) {
if (exprType.tag == TypeTags.READONLY) {
checkedExpr.equivalentErrorTypeList = new ArrayList<>(1) {{
add(symTable.errorType);
}};
resultType = symTable.anyAndReadonly;
return;
} else if (exprType != symTable.semanticError) {
dlog.error(checkedExpr.expr.pos,
DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS,
operatorType);
}
checkedExpr.setBType(symTable.semanticError);
return;
}
List<BType> errorTypes = new ArrayList<>();
List<BType> nonErrorTypes = new ArrayList<>();
if (!isErrorType) {
for (BType memberType : ((BUnionType) exprType).getMemberTypes()) {
if (memberType.tag == TypeTags.READONLY) {
errorTypes.add(symTable.errorType);
nonErrorTypes.add(symTable.anyAndReadonly);
continue;
}
if (types.isAssignable(memberType, symTable.errorType)) {
errorTypes.add(memberType);
continue;
}
nonErrorTypes.add(memberType);
}
} else {
errorTypes.add(exprType);
}
checkedExpr.equivalentErrorTypeList = errorTypes;
if (errorTypes.isEmpty()) {
dlog.error(checkedExpr.expr.pos,
DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType);
checkedExpr.setBType(symTable.semanticError);
return;
}
BType actualType;
if (nonErrorTypes.size() == 0) {
actualType = symTable.neverType;
} else if (nonErrorTypes.size() == 1) {
actualType = nonErrorTypes.get(0);
} else {
actualType = BUnionType.create(null, new LinkedHashSet<>(nonErrorTypes));
}
if (actualType.tag == TypeTags.NEVER) {
dlog.error(checkedExpr.pos, DiagnosticErrorCode.NEVER_TYPE_NOT_ALLOWED_WITH_CHECKED_EXPR,
operatorType);
}
resultType = types.checkType(checkedExpr, actualType, expType);
}
private void rewriteWithEnsureTypeFunc(BLangCheckedExpr checkedExpr, BType type) {
BType rhsType = getCandidateType(checkedExpr, type);
if (rhsType == symTable.semanticError) {
rhsType = getCandidateType(checkedExpr, rhsType);
}
BType candidateLaxType = getCandidateLaxType(checkedExpr.expr, rhsType);
if (!types.isLax(candidateLaxType)) {
return;
}
ArrayList<BLangExpression> argExprs = new ArrayList<>();
BType typedescType = new BTypedescType(expType, null);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = expType;
typedescExpr.setBType(typedescType);
argExprs.add(typedescExpr);
BLangInvocation invocation = ASTBuilderUtil.createLangLibInvocationNode(FUNCTION_NAME_ENSURE_TYPE,
argExprs, checkedExpr.expr, checkedExpr.pos);
invocation.symbol = symResolver.lookupLangLibMethod(type,
names.fromString(invocation.name.value));
invocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
checkedExpr.expr = invocation;
}
private BType getCandidateLaxType(BLangNode expr, BType rhsType) {
if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
return types.getSafeType(rhsType, false, true);
}
return rhsType;
}
private BType getCandidateType(BLangCheckedExpr checkedExpr, BType checkExprCandidateType) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
checkedExpr.expr.cloneAttempt++;
BLangExpression clone = nodeCloner.cloneNode(checkedExpr.expr);
BType rhsType;
if (checkExprCandidateType == symTable.semanticError) {
rhsType = checkExpr(clone, env);
} else {
rhsType = checkExpr(clone, env, checkExprCandidateType);
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
return rhsType;
}
private BType addDefaultErrorIfNoErrorComponentFound(BType type) {
for (BType t : types.getAllTypes(type)) {
if (types.isAssignable(t, symTable.errorType)) {
return type;
}
}
return BUnionType.create(null, type, symTable.errorType);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
resultType = serviceConstructorExpr.serviceNode.symbol.type;
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
typeTestExpr.typeNode.setBType(symResolver.resolveTypeNode(typeTestExpr.typeNode, env));
checkExpr(typeTestExpr.expr, env);
resultType = types.checkType(typeTestExpr, symTable.booleanType, expType);
}
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
checkExpr(annotAccessExpr.expr, this.env, symTable.typeDesc);
BType actualType = symTable.semanticError;
BSymbol symbol =
this.symResolver.resolveAnnotation(annotAccessExpr.pos, env,
names.fromString(annotAccessExpr.pkgAlias.getValue()),
names.fromString(annotAccessExpr.annotationName.getValue()));
if (symbol == this.symTable.notFoundSymbol) {
this.dlog.error(annotAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_ANNOTATION,
annotAccessExpr.annotationName.getValue());
} else {
annotAccessExpr.annotationSymbol = (BAnnotationSymbol) symbol;
BType annotType = ((BAnnotationSymbol) symbol).attachedType == null ? symTable.trueType :
((BAnnotationSymbol) symbol).attachedType.type;
actualType = BUnionType.create(null, annotType, symTable.nilType);
}
this.resultType = this.types.checkType(annotAccessExpr, actualType, this.expType);
}
private boolean isValidVariableReference(BLangExpression varRef) {
switch (varRef.getKind()) {
case SIMPLE_VARIABLE_REF:
case RECORD_VARIABLE_REF:
case TUPLE_VARIABLE_REF:
case ERROR_VARIABLE_REF:
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
case XML_ATTRIBUTE_ACCESS_EXPR:
return true;
default:
dlog.error(varRef.pos, DiagnosticErrorCode.INVALID_RECORD_BINDING_PATTERN, varRef.getBType());
return false;
}
}
private BType getEffectiveReadOnlyType(Location pos, BType origTargetType) {
if (origTargetType == symTable.readonlyType) {
if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) {
return origTargetType;
}
return ImmutableTypeCloner.getImmutableIntersectionType(pos, types,
(SelectivelyImmutableReferenceType) expType,
env, symTable, anonymousModelHelper, names,
new HashSet<>());
}
if (origTargetType.tag != TypeTags.UNION) {
return origTargetType;
}
boolean hasReadOnlyType = false;
LinkedHashSet<BType> nonReadOnlyTypes = new LinkedHashSet<>();
for (BType memberType : ((BUnionType) origTargetType).getMemberTypes()) {
if (memberType == symTable.readonlyType) {
hasReadOnlyType = true;
continue;
}
nonReadOnlyTypes.add(memberType);
}
if (!hasReadOnlyType) {
return origTargetType;
}
if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) {
return origTargetType;
}
BUnionType nonReadOnlyUnion = BUnionType.create(null, nonReadOnlyTypes);
nonReadOnlyUnion.add(ImmutableTypeCloner.getImmutableIntersectionType(pos, types,
(SelectivelyImmutableReferenceType)
expType,
env, symTable, anonymousModelHelper,
names, new HashSet<>()));
return nonReadOnlyUnion;
}
private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) {
SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env);
bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv));
return checkExpr(bLangArrowFunction.body.expr, arrowFunctionEnv, expectedRetType);
}
private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) {
if (paramTypes.size() != bLangArrowFunction.params.size()) {
dlog.error(bLangArrowFunction.pos,
DiagnosticErrorCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH,
paramTypes.size(), bLangArrowFunction.params.size());
resultType = symTable.semanticError;
bLangArrowFunction.params.forEach(param -> param.setBType(symTable.semanticError));
return;
}
for (int i = 0; i < bLangArrowFunction.params.size(); i++) {
BLangSimpleVariable paramIdentifier = bLangArrowFunction.params.get(i);
BType bType = paramTypes.get(i);
BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
valueTypeNode.setTypeKind(bType.getKind());
valueTypeNode.pos = symTable.builtinPos;
paramIdentifier.setTypeNode(valueTypeNode);
paramIdentifier.setBType(bType);
}
}
private void checkSelfReferences(Location pos, SymbolEnv env, BVarSymbol varSymbol) {
if (env.enclVarSym == varSymbol) {
dlog.error(pos, DiagnosticErrorCode.SELF_REFERENCE_VAR, varSymbol.name);
}
}
public List<BType> getListWithErrorTypes(int count) {
List<BType> list = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
list.add(symTable.semanticError);
}
return list;
}
private void checkFunctionInvocationExpr(BLangInvocation iExpr) {
Name funcName = names.fromIdNode(iExpr.name);
Name pkgAlias = names.fromIdNode(iExpr.pkgAlias);
BSymbol funcSymbol = symTable.notFoundSymbol;
BSymbol pkgSymbol = symResolver.resolvePrefixSymbol(env, pkgAlias, getCurrentCompUnit(iExpr));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias);
} else {
if (funcSymbol == symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName);
if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
funcSymbol = symbol;
}
if (symTable.rootPkgSymbol.pkgID.equals(symbol.pkgID) &&
(symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) {
funcSymbol = symbol;
}
}
if (funcSymbol == symTable.notFoundSymbol || ((funcSymbol.tag & SymTag.TYPE) == SymTag.TYPE)) {
BSymbol ctor = symResolver.lookupConstructorSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName);
funcSymbol = ctor != symTable.notFoundSymbol ? ctor : funcSymbol;
}
}
if (funcSymbol == symTable.notFoundSymbol || isNotFunction(funcSymbol)) {
if (!missingNodesHelper.isMissingNode(funcName)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, funcName);
}
iExpr.argExprs.forEach(arg -> checkExpr(arg, env));
resultType = symTable.semanticError;
return;
}
if (isFunctionPointer(funcSymbol)) {
iExpr.functionPointerInvocation = true;
markAndRegisterClosureVariable(funcSymbol, iExpr.pos, env);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
boolean langLibPackageID = PackageID.isLangLibPackageID(pkgSymbol.pkgID);
if (langLibPackageID) {
this.env = SymbolEnv.createInvocationEnv(iExpr, this.env);
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
if (langLibPackageID && !iExpr.argExprs.isEmpty()) {
checkInvalidImmutableValueUpdate(iExpr, iExpr.argExprs.get(0).getBType(), funcSymbol);
}
}
protected void markAndRegisterClosureVariable(BSymbol symbol, Location pos, SymbolEnv env) {
BLangInvokableNode encInvokable = env.enclInvokable;
if (symbol.closure || (symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE &&
env.node.getKind() != NodeKind.ARROW_EXPR && env.node.getKind() != NodeKind.EXPR_FUNCTION_BODY &&
encInvokable != null && !encInvokable.flagSet.contains(Flag.LAMBDA)) {
return;
}
if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA)
&& !isFunctionArgument(symbol, encInvokable.requiredParams)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
}
}
if (env.node.getKind() == NodeKind.ARROW_EXPR
&& !isFunctionArgument(symbol, ((BLangArrowFunction) env.node).params)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol) {
resolvedSymbol.closure = true;
((BLangArrowFunction) env.node).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
}
}
if (env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, (BLangRecordTypeNode) env.enclType);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && encInvokable != null &&
!encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
}
}
}
private boolean isNotFunction(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION
|| (funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
return false;
}
if (isFunctionPointer(funcSymbol)) {
return false;
}
return true;
}
private boolean isFunctionPointer(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) {
return false;
}
return (funcSymbol.tag & SymTag.FUNCTION) == SymTag.VARIABLE
&& funcSymbol.kind == SymbolKind.FUNCTION
&& (funcSymbol.flags & Flags.NATIVE) != Flags.NATIVE;
}
private List<BLangNamedArgsExpression> checkProvidedErrorDetails(BLangErrorConstructorExpr errorConstructorExpr,
BType expectedType) {
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
for (BLangNamedArgsExpression namedArgsExpression : errorConstructorExpr.namedArgs) {
BType target = getErrorCtorNamedArgTargetType(namedArgsExpression, expectedType);
BLangNamedArgsExpression clone = nodeCloner.cloneNode(namedArgsExpression);
BType type = checkExpr(clone, env, target);
if (type == symTable.semanticError) {
checkExpr(namedArgsExpression, env);
} else {
checkExpr(namedArgsExpression, env, target);
}
namedArgs.add(namedArgsExpression);
}
return namedArgs;
}
private BType getErrorCtorNamedArgTargetType(BLangNamedArgsExpression namedArgsExpression, BType expectedType) {
if (expectedType == symTable.semanticError) {
return symTable.semanticError;
}
if (expectedType.tag == TypeTags.MAP) {
return ((BMapType) expectedType).constraint;
}
if (expectedType.tag != TypeTags.RECORD) {
return symTable.semanticError;
}
BRecordType recordType = (BRecordType) expectedType;
BField targetField = recordType.fields.get(namedArgsExpression.name.value);
if (targetField != null) {
return targetField.type;
}
if (!recordType.sealed && !recordType.fields.isEmpty()) {
dlog.error(namedArgsExpression.pos, DiagnosticErrorCode.INVALID_REST_DETAIL_ARG, namedArgsExpression.name,
recordType);
}
return recordType.sealed ? symTable.noType : recordType.restFieldType;
}
private void checkObjectFunctionInvocationExpr(BLangInvocation iExpr, BObjectType objectType) {
if (objectType.getKind() == TypeKind.SERVICE &&
!(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.SERVICE_FUNCTION_INVALID_INVOCATION);
return;
}
Name funcName =
names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, iExpr.name.value));
BSymbol funcSymbol =
symResolver.resolveObjectMethod(iExpr.pos, env, funcName, (BObjectTypeSymbol) objectType.tsymbol);
if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) {
if (!checkLangLibMethodInvocationExpr(iExpr, objectType)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, iExpr.name.value,
objectType);
resultType = symTable.semanticError;
return;
}
} else {
iExpr.symbol = funcSymbol;
}
if (iExpr.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value) &&
!(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_INIT_INVOCATION);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
checkInvocationParamAndReturnType(iExpr);
}
private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BObjectType expType) {
BLangValueExpression varRef = (BLangValueExpression) aInv.expr;
if (((varRef.symbol.tag & SymTag.ENDPOINT) != SymTag.ENDPOINT) && !aInv.async) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, varRef.getBType());
this.resultType = symTable.semanticError;
aInv.symbol = symTable.notFoundSymbol;
return;
}
BVarSymbol epSymbol = (BVarSymbol) varRef.symbol;
Name remoteMethodQName = names
.fromString(Symbols.getAttachedFuncSymbolName(expType.tsymbol.name.value, aInv.name.value));
Name actionName = names.fromIdNode(aInv.name);
BSymbol remoteFuncSymbol = symResolver
.lookupMemberSymbol(aInv.pos, epSymbol.type.tsymbol.scope, env, remoteMethodQName, SymTag.FUNCTION);
if (remoteFuncSymbol == symTable.notFoundSymbol && !checkLangLibMethodInvocationExpr(aInv, expType)) {
dlog.error(aInv.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, aInv.name.value, expType);
resultType = symTable.semanticError;
return;
}
if (!Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && !aInv.async) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_METHOD_INVOCATION_SYNTAX, actionName);
this.resultType = symTable.semanticError;
return;
}
if (Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) &&
Symbols.isFlagOn(expType.flags, Flags.CLIENT) &&
types.isNeverTypeOrStructureTypeWithARequiredNeverMember
((BType) ((InvokableSymbol) remoteFuncSymbol).getReturnType())) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_CLIENT_REMOTE_METHOD_CALL);
}
aInv.symbol = remoteFuncSymbol;
checkInvocationParamAndReturnType(aInv);
}
private boolean checkLangLibMethodInvocationExpr(BLangInvocation iExpr, BType bType) {
return getLangLibMethod(iExpr, bType) != symTable.notFoundSymbol;
}
private BSymbol getLangLibMethod(BLangInvocation iExpr, BType bType) {
Name funcName = names.fromString(iExpr.name.value);
BSymbol funcSymbol = symResolver.lookupLangLibMethod(bType, funcName);
if (funcSymbol == symTable.notFoundSymbol) {
return symTable.notFoundSymbol;
}
iExpr.symbol = funcSymbol;
iExpr.langLibInvocation = true;
SymbolEnv enclEnv = this.env;
this.env = SymbolEnv.createInvocationEnv(iExpr, this.env);
iExpr.argExprs.add(0, iExpr.expr);
checkInvocationParamAndReturnType(iExpr);
this.env = enclEnv;
return funcSymbol;
}
private void checkInvocationParamAndReturnType(BLangInvocation iExpr) {
BType actualType = checkInvocationParam(iExpr);
resultType = types.checkType(iExpr, actualType, this.expType);
}
private BVarSymbol incRecordParamAllowAdditionalFields(List<BVarSymbol> openIncRecordParams,
Set<String> requiredParamNames) {
if (openIncRecordParams.size() != 1) {
return null;
}
LinkedHashMap<String, BField> fields = ((BRecordType) openIncRecordParams.get(0).type).fields;
for (String paramName : requiredParamNames) {
if (!fields.containsKey(paramName)) {
return null;
}
}
return openIncRecordParams.get(0);
}
private BVarSymbol checkForIncRecordParamAllowAdditionalFields(BInvokableSymbol invokableSymbol,
List<BVarSymbol> incRecordParams) {
Set<String> requiredParamNames = new HashSet<>();
List<BVarSymbol> openIncRecordParams = new ArrayList<>();
for (BVarSymbol paramSymbol : invokableSymbol.params) {
if (Symbols.isFlagOn(Flags.asMask(paramSymbol.getFlags()), Flags.INCLUDED) &&
paramSymbol.type.getKind() == TypeKind.RECORD) {
boolean recordWithDisallowFieldsOnly = true;
LinkedHashMap<String, BField> fields = ((BRecordType) paramSymbol.type).fields;
for (String fieldName : fields.keySet()) {
BField field = fields.get(fieldName);
if (field.symbol.type.tag != TypeTags.NEVER) {
recordWithDisallowFieldsOnly = false;
incRecordParams.add(field.symbol);
requiredParamNames.add(fieldName);
}
}
if (recordWithDisallowFieldsOnly && ((BRecordType) paramSymbol.type).restFieldType != symTable.noType) {
openIncRecordParams.add(paramSymbol);
}
} else {
requiredParamNames.add(paramSymbol.name.value);
}
}
return incRecordParamAllowAdditionalFields(openIncRecordParams, requiredParamNames);
}
private BType checkInvocationParam(BLangInvocation iExpr) {
if (Symbols.isFlagOn(iExpr.symbol.type.flags, Flags.ANY_FUNCTION)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_POINTER_INVOCATION_WITH_TYPE);
return symTable.semanticError;
}
if (iExpr.symbol.type.tag != TypeTags.INVOKABLE) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type);
return symTable.noType;
}
BInvokableSymbol invokableSymbol = ((BInvokableSymbol) iExpr.symbol);
List<BType> paramTypes = ((BInvokableType) invokableSymbol.type).getParameterTypes();
List<BVarSymbol> incRecordParams = new ArrayList<>();
BVarSymbol incRecordParamAllowAdditionalFields = checkForIncRecordParamAllowAdditionalFields(invokableSymbol,
incRecordParams);
int parameterCountForPositionalArgs = paramTypes.size();
int parameterCountForNamedArgs = parameterCountForPositionalArgs + incRecordParams.size();
iExpr.requiredArgs = new ArrayList<>();
for (BVarSymbol symbol : invokableSymbol.params) {
if (!Symbols.isFlagOn(Flags.asMask(symbol.getFlags()), Flags.INCLUDED) ||
symbol.type.tag != TypeTags.RECORD) {
continue;
}
LinkedHashMap<String, BField> fields = ((BRecordType) symbol.type).fields;
if (fields.isEmpty()) {
continue;
}
for (String field : fields.keySet()) {
if (fields.get(field).type.tag != TypeTags.NEVER) {
parameterCountForNamedArgs = parameterCountForNamedArgs - 1;
break;
}
}
}
int i = 0;
BLangExpression vararg = null;
boolean foundNamedArg = false;
for (BLangExpression expr : iExpr.argExprs) {
switch (expr.getKind()) {
case NAMED_ARGS_EXPR:
foundNamedArg = true;
if (i < parameterCountForNamedArgs || incRecordParamAllowAdditionalFields != null) {
iExpr.requiredArgs.add(expr);
} else {
dlog.error(expr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
}
i++;
break;
case REST_ARGS_EXPR:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.REST_ARG_DEFINED_AFTER_NAMED_ARG);
continue;
}
vararg = expr;
break;
default:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG);
}
if (i < parameterCountForPositionalArgs) {
iExpr.requiredArgs.add(expr);
} else {
iExpr.restArgs.add(expr);
}
i++;
break;
}
}
return checkInvocationArgs(iExpr, paramTypes, vararg, incRecordParams,
incRecordParamAllowAdditionalFields);
}
private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, BLangExpression vararg,
List<BVarSymbol> incRecordParams,
BVarSymbol incRecordParamAllowAdditionalFields) {
BInvokableSymbol invokableSymbol = (BInvokableSymbol) iExpr.symbol;
BInvokableType bInvokableType = (BInvokableType) invokableSymbol.type;
BInvokableTypeSymbol invokableTypeSymbol = (BInvokableTypeSymbol) bInvokableType.tsymbol;
List<BVarSymbol> nonRestParams = new ArrayList<>(invokableTypeSymbol.params);
List<BLangExpression> nonRestArgs = iExpr.requiredArgs;
List<BVarSymbol> valueProvidedParams = new ArrayList<>();
List<BVarSymbol> requiredParams = new ArrayList<>();
List<BVarSymbol> requiredIncRecordParams = new ArrayList<>();
for (BVarSymbol nonRestParam : nonRestParams) {
if (nonRestParam.isDefaultable) {
continue;
}
requiredParams.add(nonRestParam);
}
for (BVarSymbol incRecordParam : incRecordParams) {
if (Symbols.isFlagOn(Flags.asMask(incRecordParam.getFlags()), Flags.REQUIRED)) {
requiredIncRecordParams.add(incRecordParam);
}
}
int i = 0;
for (; i < nonRestArgs.size(); i++) {
BLangExpression arg = nonRestArgs.get(i);
if (i == 0 && arg.typeChecked && iExpr.expr != null && iExpr.expr == arg) {
BType expectedType = paramTypes.get(i);
types.checkType(arg.pos, arg.getBType(), expectedType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
types.setImplicitCastExpr(arg, arg.getBType(), expectedType);
}
if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) {
if (i < nonRestParams.size()) {
BVarSymbol param = nonRestParams.get(i);
checkTypeParamExpr(arg, this.env, param.type, iExpr.langLibInvocation);
valueProvidedParams.add(param);
requiredParams.remove(param);
continue;
}
break;
}
if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) {
BLangIdentifier argName = ((NamedArgNode) arg).getName();
BVarSymbol varSym = checkParameterNameForDefaultArgument(argName, ((BLangNamedArgsExpression) arg).expr,
nonRestParams, incRecordParams, incRecordParamAllowAdditionalFields);
if (varSym == null) {
dlog.error(arg.pos, DiagnosticErrorCode.UNDEFINED_PARAMETER, argName);
break;
}
requiredParams.remove(varSym);
requiredIncRecordParams.remove(varSym);
if (valueProvidedParams.contains(varSym)) {
dlog.error(arg.pos, DiagnosticErrorCode.DUPLICATE_NAMED_ARGS, varSym.name.value);
continue;
}
checkTypeParamExpr(arg, this.env, varSym.type, iExpr.langLibInvocation);
valueProvidedParams.add(varSym);
}
}
BVarSymbol restParam = invokableTypeSymbol.restParam;
boolean errored = false;
if (!requiredParams.isEmpty() && vararg == null) {
for (BVarSymbol requiredParam : requiredParams) {
if (!Symbols.isFlagOn(Flags.asMask(requiredParam.getFlags()), Flags.INCLUDED)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER, requiredParam.name,
iExpr.name.value);
errored = true;
}
}
}
if (!requiredIncRecordParams.isEmpty() && !requiredParams.isEmpty()) {
for (BVarSymbol requiredIncRecordParam : requiredIncRecordParams) {
for (BVarSymbol requiredParam : requiredParams) {
if (requiredParam.type == requiredIncRecordParam.owner.type) {
dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER,
requiredIncRecordParam.name, iExpr.name.value);
errored = true;
}
}
}
}
if (restParam == null &&
(!iExpr.restArgs.isEmpty() ||
(vararg != null && valueProvidedParams.size() == nonRestParams.size()))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
errored = true;
}
if (errored) {
return symTable.semanticError;
}
BType listTypeRestArg = restParam == null ? null : restParam.type;
BRecordType mappingTypeRestArg = null;
if (vararg != null && nonRestArgs.size() < nonRestParams.size()) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
List<BType> tupleMemberTypes = new ArrayList<>();
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, null, VIRTUAL);
mappingTypeRestArg = new BRecordType(recordSymbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
BType tupleRestType = null;
BVarSymbol fieldSymbol;
for (int j = nonRestArgs.size(); j < nonRestParams.size(); j++) {
BType paramType = paramTypes.get(j);
BVarSymbol nonRestParam = nonRestParams.get(j);
Name paramName = nonRestParam.name;
tupleMemberTypes.add(paramType);
boolean required = requiredParams.contains(nonRestParam);
fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{
add(required ? Flag.REQUIRED : Flag.OPTIONAL); }}), paramName,
pkgID, paramType, recordSymbol, null, VIRTUAL);
fields.put(paramName.value, new BField(paramName, null, fieldSymbol));
}
if (listTypeRestArg != null) {
if (listTypeRestArg.tag == TypeTags.ARRAY) {
tupleRestType = ((BArrayType) listTypeRestArg).eType;
} else if (listTypeRestArg.tag == TypeTags.TUPLE) {
BTupleType restTupleType = (BTupleType) listTypeRestArg;
tupleMemberTypes.addAll(restTupleType.tupleTypes);
if (restTupleType.restType != null) {
tupleRestType = restTupleType.restType;
}
}
}
BTupleType tupleType = new BTupleType(tupleMemberTypes);
tupleType.restType = tupleRestType;
listTypeRestArg = tupleType;
mappingTypeRestArg.sealed = true;
mappingTypeRestArg.restFieldType = symTable.noType;
mappingTypeRestArg.fields = fields;
recordSymbol.type = mappingTypeRestArg;
mappingTypeRestArg.tsymbol = recordSymbol;
}
if (listTypeRestArg == null && (vararg != null || !iExpr.restArgs.isEmpty())) {
dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
return symTable.semanticError;
}
BType restType = null;
if (vararg != null && !iExpr.restArgs.isEmpty()) {
BType elementType = ((BArrayType) listTypeRestArg).eType;
for (BLangExpression restArg : iExpr.restArgs) {
checkTypeParamExpr(restArg, this.env, elementType, true);
}
checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation);
iExpr.restArgs.add(vararg);
restType = this.resultType;
} else if (vararg != null) {
iExpr.restArgs.add(vararg);
if (mappingTypeRestArg != null) {
LinkedHashSet<BType> restTypes = new LinkedHashSet<>();
restTypes.add(listTypeRestArg);
restTypes.add(mappingTypeRestArg);
BType actualType = BUnionType.create(null, restTypes);
checkTypeParamExpr(vararg, this.env, actualType, iExpr.langLibInvocation);
} else {
checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation);
}
restType = this.resultType;
} else if (!iExpr.restArgs.isEmpty()) {
if (listTypeRestArg.tag == TypeTags.ARRAY) {
BType elementType = ((BArrayType) listTypeRestArg).eType;
for (BLangExpression restArg : iExpr.restArgs) {
checkTypeParamExpr(restArg, this.env, elementType, true);
if (restType != symTable.semanticError && this.resultType == symTable.semanticError) {
restType = this.resultType;
}
}
} else {
BTupleType tupleType = (BTupleType) listTypeRestArg;
List<BType> tupleMemberTypes = tupleType.tupleTypes;
BType tupleRestType = tupleType.restType;
int tupleMemCount = tupleMemberTypes.size();
for (int j = 0; j < iExpr.restArgs.size(); j++) {
BLangExpression restArg = iExpr.restArgs.get(j);
BType memType = j < tupleMemCount ? tupleMemberTypes.get(j) : tupleRestType;
checkTypeParamExpr(restArg, this.env, memType, true);
if (restType != symTable.semanticError && this.resultType == symTable.semanticError) {
restType = this.resultType;
}
}
}
}
BType retType = typeParamAnalyzer.getReturnTypeParams(env, bInvokableType.getReturnType());
if (restType != symTable.semanticError &&
Symbols.isFlagOn(invokableSymbol.flags, Flags.NATIVE) &&
Symbols.isFlagOn(retType.flags, Flags.PARAMETERIZED)) {
retType = unifier.build(retType, expType, iExpr, types, symTable, dlog);
}
boolean langLibPackageID = PackageID.isLangLibPackageID(iExpr.symbol.pkgID);
String sortFuncName = "sort";
if (langLibPackageID && sortFuncName.equals(iExpr.name.value)) {
checkArrayLibSortFuncArgs(iExpr);
}
if (iExpr instanceof ActionNode && ((BLangInvocation.BLangActionInvocation) iExpr).async) {
return this.generateFutureType(invokableSymbol, retType);
} else {
return retType;
}
}
private void checkArrayLibSortFuncArgs(BLangInvocation iExpr) {
if (iExpr.argExprs.size() <= 2 && !types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) {
dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE,
iExpr.argExprs.get(0).getBType());
}
if (iExpr.argExprs.size() != 3) {
return;
}
BLangExpression keyFunction = iExpr.argExprs.get(2);
BType keyFunctionType = keyFunction.getBType();
if (keyFunctionType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
if (keyFunctionType.tag == TypeTags.NIL) {
if (!types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) {
dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE,
iExpr.argExprs.get(0).getBType());
}
return;
}
Location pos;
BType returnType;
if (keyFunction.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
pos = keyFunction.pos;
returnType = keyFunction.getBType().getReturnType();
} else if (keyFunction.getKind() == NodeKind.ARROW_EXPR) {
BLangArrowFunction arrowFunction = ((BLangArrowFunction) keyFunction);
pos = arrowFunction.body.expr.pos;
returnType = arrowFunction.body.expr.getBType();
if (returnType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
} else {
BLangLambdaFunction keyLambdaFunction = (BLangLambdaFunction) keyFunction;
pos = keyLambdaFunction.function.pos;
returnType = keyLambdaFunction.function.getBType().getReturnType();
}
if (!types.isOrderedType(returnType, false)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_SORT_FUNC_RETURN_TYPE, returnType);
}
}
private BVarSymbol checkParameterNameForDefaultArgument(BLangIdentifier argName, BLangExpression expr,
List<BVarSymbol> nonRestParams,
List<BVarSymbol> incRecordParams,
BVarSymbol incRecordParamAllowAdditionalFields) {
for (BVarSymbol nonRestParam : nonRestParams) {
if (nonRestParam.getName().value.equals(argName.value)) {
return nonRestParam;
}
}
for (BVarSymbol incRecordParam : incRecordParams) {
if (incRecordParam.getName().value.equals(argName.value)) {
return incRecordParam;
}
}
if (incRecordParamAllowAdditionalFields != null) {
BRecordType incRecordType = (BRecordType) incRecordParamAllowAdditionalFields.type;
checkExpr(expr, env, incRecordType.restFieldType);
if (!incRecordType.fields.containsKey(argName.value)) {
return new BVarSymbol(0, names.fromIdNode(argName), null, symTable.noType, null, argName.pos, VIRTUAL);
}
}
return null;
}
private BFutureType generateFutureType(BInvokableSymbol invocableSymbol, BType retType) {
boolean isWorkerStart = invocableSymbol.name.value.startsWith(WORKER_LAMBDA_VAR_PREFIX);
return new BFutureType(TypeTags.FUTURE, retType, null, isWorkerStart);
}
private void checkTypeParamExpr(BLangExpression arg, SymbolEnv env, BType expectedType,
boolean inferTypeForNumericLiteral) {
checkTypeParamExpr(arg.pos, arg, env, expectedType, inferTypeForNumericLiteral);
}
private void checkTypeParamExpr(Location pos, BLangExpression arg, SymbolEnv env, BType expectedType,
boolean inferTypeForNumericLiteral) {
if (typeParamAnalyzer.notRequireTypeParams(env)) {
checkExpr(arg, env, expectedType);
return;
}
if (requireTypeInference(arg, inferTypeForNumericLiteral)) {
BType expType = typeParamAnalyzer.getMatchingBoundType(expectedType, env);
BType inferredType = checkExpr(arg, env, expType);
typeParamAnalyzer.checkForTypeParamsInArg(pos, inferredType, this.env, expectedType);
return;
}
checkExpr(arg, env, expectedType);
typeParamAnalyzer.checkForTypeParamsInArg(pos, arg.getBType(), this.env, expectedType);
}
private boolean requireTypeInference(BLangExpression expr, boolean inferTypeForNumericLiteral) {
switch (expr.getKind()) {
case GROUP_EXPR:
return requireTypeInference(((BLangGroupExpr) expr).expression, inferTypeForNumericLiteral);
case ARROW_EXPR:
case LIST_CONSTRUCTOR_EXPR:
case RECORD_LITERAL_EXPR:
return true;
case NUMERIC_LITERAL:
return inferTypeForNumericLiteral;
default:
return false;
}
}
private BType checkMappingField(RecordLiteralNode.RecordField field, BType mappingType) {
BType fieldType = symTable.semanticError;
boolean keyValueField = field.isKeyValueField();
boolean spreadOpField = field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP;
boolean readOnlyConstructorField = false;
String fieldName = null;
Location pos = null;
BLangExpression valueExpr = null;
if (keyValueField) {
valueExpr = ((BLangRecordKeyValueField) field).valueExpr;
} else if (!spreadOpField) {
valueExpr = (BLangRecordVarNameField) field;
}
switch (mappingType.tag) {
case TypeTags.RECORD:
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
fieldType = checkRecordLiteralKeyExpr(key.expr, key.computedKey, (BRecordType) mappingType);
readOnlyConstructorField = keyValField.readonly;
pos = key.expr.pos;
fieldName = getKeyValueFieldName(keyValField);
} else if (spreadOpField) {
BLangExpression spreadExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
checkExpr(spreadExpr, this.env);
BType spreadExprType = spreadExpr.getBType();
if (spreadExprType.tag == TypeTags.MAP) {
return types.checkType(spreadExpr.pos, ((BMapType) spreadExprType).constraint,
getAllFieldType((BRecordType) mappingType),
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
if (spreadExprType.tag != TypeTags.RECORD) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadExprType);
return symTable.semanticError;
}
boolean errored = false;
for (BField bField : ((BRecordType) spreadExprType).fields.values()) {
BType specFieldType = bField.type;
BType expectedFieldType = checkRecordLiteralKeyByName(spreadExpr.pos, this.env, bField.name,
(BRecordType) mappingType);
if (expectedFieldType != symTable.semanticError &&
!types.isAssignable(specFieldType, expectedFieldType)) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_FIELD,
expectedFieldType, bField.name, specFieldType);
if (!errored) {
errored = true;
}
}
}
return errored ? symTable.semanticError : symTable.noType;
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
fieldType = checkRecordLiteralKeyExpr(varNameField, false, (BRecordType) mappingType);
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
break;
case TypeTags.MAP:
if (spreadOpField) {
BLangExpression spreadExp = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
BType spreadOpType = checkExpr(spreadExp, this.env);
BType spreadOpMemberType;
switch (spreadOpType.tag) {
case TypeTags.RECORD:
List<BType> types = new ArrayList<>();
BRecordType recordType = (BRecordType) spreadOpType;
for (BField recField : recordType.fields.values()) {
types.add(recField.type);
}
if (!recordType.sealed) {
types.add(recordType.restFieldType);
}
spreadOpMemberType = getRepresentativeBroadType(types);
break;
case TypeTags.MAP:
spreadOpMemberType = ((BMapType) spreadOpType).constraint;
break;
default:
dlog.error(spreadExp.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadOpType);
return symTable.semanticError;
}
return types.checkType(spreadExp.pos, spreadOpMemberType, ((BMapType) mappingType).constraint,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
boolean validMapKey;
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(key.expr, key.computedKey);
readOnlyConstructorField = keyValField.readonly;
pos = key.pos;
fieldName = getKeyValueFieldName(keyValField);
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(varNameField, false);
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
fieldType = validMapKey ? ((BMapType) mappingType).constraint : symTable.semanticError;
break;
}
if (readOnlyConstructorField) {
if (types.isSelectivelyImmutableType(fieldType)) {
fieldType =
ImmutableTypeCloner.getImmutableIntersectionType(pos, types,
(SelectivelyImmutableReferenceType) fieldType,
env, symTable, anonymousModelHelper, names,
new HashSet<>());
} else if (!types.isInherentlyImmutableType(fieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_READONLY_MAPPING_FIELD, fieldName, fieldType);
fieldType = symTable.semanticError;
}
}
if (spreadOpField) {
valueExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
}
BLangExpression exprToCheck = valueExpr;
if (this.nonErrorLoggingCheck) {
exprToCheck = nodeCloner.cloneNode(valueExpr);
} else {
((BLangNode) field).setBType(fieldType);
}
return checkExpr(exprToCheck, this.env, fieldType);
}
private BType checkRecordLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey, BRecordType recordType) {
Name fieldName;
if (computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
LinkedHashSet<BType> fieldTypes = recordType.fields.values().stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (recordType.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(recordType.restFieldType);
}
return BUnionType.create(null, fieldTypes);
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr;
fieldName = names.fromIdNode(varRef.variableName);
} else if (keyExpr.getKind() == NodeKind.LITERAL && keyExpr.getBType().tag == TypeTags.STRING) {
fieldName = names.fromString((String) ((BLangLiteral) keyExpr).value);
} else {
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY);
return symTable.semanticError;
}
return checkRecordLiteralKeyByName(keyExpr.pos, this.env, fieldName, recordType);
}
private BType checkRecordLiteralKeyByName(Location location, SymbolEnv env, Name key,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(location, env, key, recordType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
return fieldSymbol.type;
}
if (recordType.sealed) {
dlog.error(location, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, key,
recordType.tsymbol.type.getKind().typeName(), recordType);
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType getAllFieldType(BRecordType recordType) {
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BField field : recordType.fields.values()) {
possibleTypes.add(field.type);
}
BType restFieldType = recordType.restFieldType;
if (restFieldType != null && restFieldType != symTable.noType) {
possibleTypes.add(restFieldType);
}
return BUnionType.create(null, possibleTypes);
}
private boolean checkValidJsonOrMapLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey) {
if (computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.getBType() == symTable.semanticError) {
return false;
}
return true;
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF ||
(keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).getBType().tag == TypeTags.STRING)) {
return true;
}
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY);
return false;
}
private BType addNilForNillableAccessType(BType actualType) {
if (actualType.isNullable()) {
return actualType;
}
return BUnionType.create(null, actualType, symTable.nilType);
}
private BType checkRecordRequiredFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol || Symbols.isOptional(fieldSymbol)) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordOptionalFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol || !Symbols.isOptional(fieldSymbol)) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordRestFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
return symTable.semanticError;
}
if (recordType.sealed) {
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType checkObjectFieldAccess(BLangFieldBasedAccess bLangFieldBasedAccess,
Name fieldName, BObjectType objectType) {
BSymbol fieldSymbol = symResolver.resolveStructField(bLangFieldBasedAccess.pos,
this.env, fieldName, objectType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
bLangFieldBasedAccess.symbol = fieldSymbol;
return fieldSymbol.type;
}
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value,
fieldName.value));
fieldSymbol = symResolver.resolveObjectField(bLangFieldBasedAccess.pos, env, objFuncName, objectType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
dlog.error(bLangFieldBasedAccess.field.pos,
DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName,
objectType.tsymbol.type.getKind().typeName(), objectType.tsymbol);
return symTable.semanticError;
}
if (Symbols.isFlagOn(fieldSymbol.type.flags, Flags.ISOLATED) &&
!Symbols.isFlagOn(objectType.flags, Flags.ISOLATED)) {
fieldSymbol = ASTBuilderUtil.duplicateInvokableSymbol((BInvokableSymbol) fieldSymbol);
fieldSymbol.flags &= ~Flags.ISOLATED;
fieldSymbol.type.flags &= ~Flags.ISOLATED;
}
bLangFieldBasedAccess.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkTupleFieldType(BType tupleType, int indexValue) {
BTupleType bTupleType = (BTupleType) tupleType;
if (bTupleType.tupleTypes.size() <= indexValue && bTupleType.restType != null) {
return bTupleType.restType;
} else if (indexValue < 0 || bTupleType.tupleTypes.size() <= indexValue) {
return symTable.semanticError;
}
return bTupleType.tupleTypes.get(indexValue);
}
private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) {
BLangExpression startTagName = bLangXMLElementLiteral.startTagName;
checkExpr(startTagName, xmlElementEnv, symTable.stringType);
BLangExpression endTagName = bLangXMLElementLiteral.endTagName;
if (endTagName == null) {
return;
}
checkExpr(endTagName, xmlElementEnv, symTable.stringType);
if (startTagName.getKind() == NodeKind.XML_QNAME && endTagName.getKind() == NodeKind.XML_QNAME &&
startTagName.equals(endTagName)) {
return;
}
if (startTagName.getKind() != NodeKind.XML_QNAME && endTagName.getKind() != NodeKind.XML_QNAME) {
return;
}
dlog.error(bLangXMLElementLiteral.pos, DiagnosticErrorCode.XML_TAGS_MISMATCH);
}
private void checkStringTemplateExprs(List<? extends BLangExpression> exprs) {
for (BLangExpression expr : exprs) {
checkExpr(expr, env);
BType type = expr.getBType();
if (type == symTable.semanticError) {
continue;
}
if (!types.isNonNilSimpleBasicTypeOrString(type)) {
dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType,
symTable.decimalType, symTable.stringType,
symTable.booleanType), type);
}
}
}
/**
* Concatenate the consecutive text type nodes, and get the reduced set of children.
*
* @param exprs Child nodes
* @param xmlElementEnv
* @return Reduced set of children
*/
private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) {
List<BLangExpression> newChildren = new ArrayList<>();
List<BLangExpression> tempConcatExpressions = new ArrayList<>();
for (BLangExpression expr : exprs) {
BType exprType;
if (expr.getKind() == NodeKind.QUERY_EXPR) {
exprType = checkExpr(expr, xmlElementEnv, expType);
} else {
exprType = checkExpr(expr, xmlElementEnv);
}
if (TypeTags.isXMLTypeTag(exprType.tag)) {
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
tempConcatExpressions = new ArrayList<>();
}
newChildren.add(expr);
continue;
}
BType type = expr.getBType();
if (type.tag >= TypeTags.JSON) {
if (type != symTable.semanticError && !TypeTags.isXMLTypeTag(type.tag)) {
dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType,
symTable.decimalType, symTable.stringType,
symTable.booleanType, symTable.xmlType), type);
}
continue;
}
tempConcatExpressions.add(expr);
}
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
}
return newChildren;
}
private BLangExpression getXMLTextLiteral(List<BLangExpression> exprs) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.textFragments = exprs;
xmlTextLiteral.pos = exprs.get(0).pos;
xmlTextLiteral.setBType(symTable.xmlType);
return xmlTextLiteral;
}
private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) {
accessExpr.originalType = actualType;
BUnionType unionType = BUnionType.create(null, actualType);
if (returnsNull(accessExpr)) {
unionType.add(symTable.nilType);
}
BType parentType = accessExpr.expr.getBType();
if (accessExpr.errorSafeNavigation
&& (parentType.tag == TypeTags.SEMANTIC_ERROR || (parentType.tag == TypeTags.UNION
&& ((BUnionType) parentType).getMemberTypes().contains(symTable.errorType)))) {
unionType.add(symTable.errorType);
}
if (unionType.getMemberTypes().size() == 1) {
return unionType.getMemberTypes().toArray(new BType[0])[0];
}
return unionType;
}
private boolean returnsNull(BLangAccessExpression accessExpr) {
BType parentType = accessExpr.expr.getBType();
if (parentType.isNullable() && parentType.tag != TypeTags.JSON) {
return true;
}
if (parentType.tag != TypeTags.MAP) {
return false;
}
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
&& accessExpr.expr.getBType().tag == TypeTags.MAP) {
BType constraintType = ((BMapType) accessExpr.expr.getBType()).constraint;
return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON;
}
return false;
}
private BType checkObjectFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.OBJECT) {
return checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) memType);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
return checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkRecordFieldAccessLhsExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
return checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return symTable.semanticError;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkOptionalRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
fieldType = checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType == symTable.semanticError) {
return fieldType;
}
return BUnionType.create(null, fieldType, symTable.nilType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.isEmpty()) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType;
}
private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
BType actualType = symTable.semanticError;
if (types.isSubTypeOfBaseType(varRefType, TypeTags.OBJECT)) {
actualType = checkObjectFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD)) {
actualType = checkRecordFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
if (actualType != symTable.semanticError) {
fieldAccessExpr.originalType = actualType;
return actualType;
}
if (!fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_NON_REQUIRED_FIELD,
varRefType, fieldName);
return actualType;
}
actualType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE,
fieldName, varRefType.tsymbol.type.getKind().typeName(), varRefType);
}
} else if (types.isLax(varRefType)) {
if (fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_ASSIGNMENT,
varRefType);
return symTable.semanticError;
}
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
BType laxFieldAccessType = getLaxFieldAccessType(varRefType);
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
if (fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE);
}
actualType = symTable.xmlType;
fieldAccessExpr.originalType = actualType;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS,
varRefType);
}
return actualType;
}
private void resolveXMLNamespace(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess fieldAccessExpr) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldAccess = fieldAccessExpr;
String nsPrefix = nsPrefixedFieldAccess.nsPrefix.value;
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(nsPrefix));
if (nsSymbol == symTable.notFoundSymbol) {
dlog.error(nsPrefixedFieldAccess.nsPrefix.pos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE,
nsPrefixedFieldAccess.nsPrefix);
} else if (nsSymbol.getKind() == SymbolKind.PACKAGE) {
nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) findXMLNamespaceFromPackageConst(
nsPrefixedFieldAccess.field.value, nsPrefixedFieldAccess.nsPrefix.value,
(BPackageSymbol) nsSymbol, fieldAccessExpr.pos);
} else {
nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) nsSymbol;
}
}
private boolean hasLaxOriginalType(BLangFieldBasedAccess fieldBasedAccess) {
return fieldBasedAccess.originalType != null && types.isLax(fieldBasedAccess.originalType);
}
private BType getLaxFieldAccessType(BType exprType) {
switch (exprType.tag) {
case TypeTags.JSON:
return symTable.jsonType;
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
return symTable.stringType;
case TypeTags.MAP:
return ((BMapType) exprType).constraint;
case TypeTags.UNION:
BUnionType unionType = (BUnionType) exprType;
if (types.isSameType(symTable.jsonType, unionType)) {
return symTable.jsonType;
}
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
unionType.getMemberTypes().forEach(bType -> memberTypes.add(getLaxFieldAccessType(bType)));
return memberTypes.size() == 1 ? memberTypes.iterator().next() : BUnionType.create(null, memberTypes);
}
return symTable.semanticError;
}
private BType checkOptionalFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
BType actualType = symTable.semanticError;
boolean nillableExprType = false;
BType effectiveType = varRefType;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
effectiveType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
}
}
if (types.isSubTypeOfBaseType(effectiveType, TypeTags.RECORD)) {
actualType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, effectiveType, fieldName);
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS_FOR_FIELD,
varRefType, fieldName);
}
fieldAccessExpr.nilSafeNavigation = nillableExprType;
fieldAccessExpr.originalType = fieldAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (types.isLax(effectiveType)) {
BType laxFieldAccessType = getLaxFieldAccessType(effectiveType);
actualType = accessCouldResultInError(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
actualType = accessCouldResultInError(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS, varRefType);
}
if (nillableExprType && actualType != symTable.semanticError && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private boolean accessCouldResultInError(BType type) {
if (type.tag == TypeTags.JSON) {
return true;
}
if (type.tag == TypeTags.MAP) {
return false;
}
if (type.tag == TypeTags.XML) {
return true;
}
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream().anyMatch(this::accessCouldResultInError);
} else {
return false;
}
}
private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr) {
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(indexBasedAccessExpr.expr.getBType());
boolean nillableExprType = false;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
if (nillableExprType) {
varRefType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
if (!types.isSubTypeOfMapping(varRefType)) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
}
}
}
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
BType actualType = symTable.semanticError;
if (types.isSubTypeOfMapping(varRefType)) {
checkExpr(indexExpr, this.env, symTable.stringType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkMappingIndexBasedAccess(indexBasedAccessExpr, varRefType);
if (actualType == symTable.semanticError) {
if (indexExpr.getBType().tag == TypeTags.STRING && isConst(indexExpr)) {
String fieldName = getConstFieldName(indexExpr);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD,
fieldName, indexBasedAccessExpr.expr.getBType());
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_RECORD_MEMBER_ACCESS_EXPR, indexExpr.getBType());
return actualType;
}
indexBasedAccessExpr.nilSafeNavigation = nillableExprType;
indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (types.isSubTypeOfList(varRefType)) {
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkListIndexBasedAccess(indexBasedAccessExpr, varRefType);
indexBasedAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
if (indexExpr.getBType().tag == TypeTags.INT && isConst(indexExpr)) {
dlog.error(indexBasedAccessExpr.indexExpr.pos,
DiagnosticErrorCode.LIST_INDEX_OUT_OF_RANGE, getConstIndex(indexExpr));
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_LIST_MEMBER_ACCESS_EXPR, indexExpr.getBType());
return actualType;
}
} else if (types.isAssignable(varRefType, symTable.stringType)) {
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
indexBasedAccessExpr.originalType = symTable.stringType;
actualType = symTable.stringType;
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
if (indexBasedAccessExpr.isLValue) {
indexExpr.setBType(symTable.semanticError);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE);
return actualType;
}
BType type = checkExpr(indexExpr, this.env, symTable.intType);
if (type == symTable.semanticError) {
return type;
}
indexBasedAccessExpr.originalType = varRefType;
actualType = varRefType;
} else if (varRefType.tag == TypeTags.TABLE) {
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_TABLE_USING_MEMBER_ACCESS,
varRefType);
return symTable.semanticError;
}
BTableType tableType = (BTableType) indexBasedAccessExpr.expr.getBType();
BType keyTypeConstraint = tableType.keyTypeConstraint;
if (tableType.keyTypeConstraint == null) {
keyTypeConstraint = createTableKeyConstraint(((BTableType) indexBasedAccessExpr.expr.getBType()).
fieldNameList, ((BTableType) indexBasedAccessExpr.expr.getBType()).constraint);
if (keyTypeConstraint == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.MEMBER_ACCESS_NOT_SUPPORT_FOR_KEYLESS_TABLE,
indexBasedAccessExpr.expr);
return symTable.semanticError;
}
}
if (indexExpr.getKind() != NodeKind.TABLE_MULTI_KEY) {
checkExpr(indexExpr, this.env, keyTypeConstraint);
if (indexExpr.getBType() == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
} else {
List<BLangExpression> multiKeyExpressionList = ((BLangTableMultiKeyExpr)
indexBasedAccessExpr.indexExpr).multiKeyIndexExprs;
List<BType> keyConstraintTypes = ((BTupleType) keyTypeConstraint).tupleTypes;
if (keyConstraintTypes.size() != multiKeyExpressionList.size()) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
for (int i = 0; i < multiKeyExpressionList.size(); i++) {
BLangExpression keyExpr = multiKeyExpressionList.get(i);
checkExpr(keyExpr, this.env, keyConstraintTypes.get(i));
if (keyExpr.getBType() == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
}
}
if (expType.tag != TypeTags.NONE) {
BType resultType = checkExpr(indexBasedAccessExpr.expr, env, expType);
if (resultType == symTable.semanticError) {
return symTable.semanticError;
}
}
BType constraint = tableType.constraint;
actualType = addNilForNillableAccessType(constraint);
indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (varRefType == symTable.semanticError) {
indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError);
return symTable.semanticError;
} else {
indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
if (nillableExprType && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private Long getConstIndex(BLangExpression indexExpr) {
return indexExpr.getKind() == NodeKind.NUMERIC_LITERAL ? (Long) ((BLangLiteral) indexExpr).value :
(Long) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
private String getConstFieldName(BLangExpression indexExpr) {
return indexExpr.getKind() == NodeKind.LITERAL ? (String) ((BLangLiteral) indexExpr).value :
(String) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
private BType checkArrayIndexBasedAccess(BLangIndexBasedAccess indexBasedAccess, BType indexExprType,
BArrayType arrayType) {
BType actualType = symTable.semanticError;
switch (indexExprType.tag) {
case TypeTags.INT:
BLangExpression indexExpr = indexBasedAccess.indexExpr;
if (!isConst(indexExpr) || arrayType.state == BArrayState.OPEN) {
actualType = arrayType.eType;
break;
}
actualType = getConstIndex(indexExpr) >= arrayType.size ? symTable.semanticError : arrayType.eType;
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) indexExprType;
boolean validIndexExists = false;
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
if (indexValue >= 0 &&
(arrayType.state == BArrayState.OPEN || indexValue < arrayType.size)) {
validIndexExists = true;
break;
}
}
if (!validIndexExists) {
return symTable.semanticError;
}
actualType = arrayType.eType;
break;
case TypeTags.UNION:
List<BFiniteType> finiteTypes = ((BUnionType) indexExprType).getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.map(matchedType -> (BFiniteType) matchedType)
.collect(Collectors.toList());
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType elementType = checkArrayIndexBasedAccess(indexBasedAccess, finiteType, arrayType);
if (elementType == symTable.semanticError) {
return symTable.semanticError;
}
actualType = arrayType.eType;
}
return actualType;
}
private BType checkListIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) {
if (type.tag == TypeTags.ARRAY) {
return checkArrayIndexBasedAccess(accessExpr, accessExpr.indexExpr.getBType(), (BArrayType) type);
}
if (type.tag == TypeTags.TUPLE) {
return checkTupleIndexBasedAccess(accessExpr, (BTupleType) type, accessExpr.indexExpr.getBType());
}
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkListIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkTupleIndexBasedAccess(BLangIndexBasedAccess accessExpr, BTupleType tuple, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.INT:
if (isConst(indexExpr)) {
actualType = checkTupleFieldType(tuple, getConstIndex(indexExpr).intValue());
} else {
BTupleType tupleExpr = (BTupleType) accessExpr.expr.getBType();
LinkedHashSet<BType> tupleTypes = collectTupleFieldTypes(tupleExpr, new LinkedHashSet<>());
actualType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null,
tupleTypes);
}
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
BType fieldType = checkTupleFieldType(tuple, indexValue);
if (fieldType.tag != TypeTags.SEMANTIC_ERROR) {
possibleTypes.add(fieldType);
}
}
if (possibleTypes.size() == 0) {
return symTable.semanticError;
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
((BUnionType) currentType).getMemberTypes().forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
}
return actualType;
}
private LinkedHashSet<BType> collectTupleFieldTypes(BTupleType tupleType, LinkedHashSet<BType> memberTypes) {
tupleType.tupleTypes
.forEach(memberType -> {
if (memberType.tag == TypeTags.UNION) {
collectMemberTypes((BUnionType) memberType, memberTypes);
} else {
memberTypes.add(memberType);
}
});
return memberTypes;
}
private BType checkMappingIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) {
if (type.tag == TypeTags.MAP) {
BType constraint = ((BMapType) type).constraint;
return accessExpr.isLValue ? constraint : addNilForNillableAccessType(constraint);
}
if (type.tag == TypeTags.RECORD) {
return checkRecordIndexBasedAccess(accessExpr, (BRecordType) type, accessExpr.indexExpr.getBType());
}
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkMappingIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType;
}
private BType checkRecordIndexBasedAccess(BLangIndexBasedAccess accessExpr, BRecordType record, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.STRING:
if (isConst(indexExpr)) {
String fieldName = IdentifierUtils.escapeSpecialCharacters(getConstFieldName(indexExpr));
actualType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType != symTable.semanticError) {
return actualType;
}
actualType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
actualType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
return actualType;
}
if (actualType == symTable.neverType) {
return actualType;
}
return addNilForNillableAccessType(actualType);
}
if (accessExpr.isLValue) {
return actualType;
}
return addNilForNillableAccessType(actualType);
}
LinkedHashSet<BType> fieldTypes = record.fields.values().stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (record.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(record.restFieldType);
}
if (fieldTypes.stream().noneMatch(BType::isNullable)) {
fieldTypes.add(symTable.nilType);
}
actualType = BUnionType.create(null, fieldTypes);
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
String fieldName = (String) ((BLangLiteral) finiteMember).value;
BType fieldType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
}
if (fieldType != symTable.semanticError) {
fieldType = addNilForNillableAccessType(fieldType);
}
}
if (fieldType.tag == TypeTags.SEMANTIC_ERROR) {
continue;
}
possibleTypes.add(fieldType);
}
if (possibleTypes.isEmpty()) {
return symTable.semanticError;
}
if (possibleTypes.stream().noneMatch(BType::isNullable)) {
possibleTypes.add(symTable.nilType);
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
((BUnionType) currentType).getMemberTypes().forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
}
return actualType;
}
private List<BType> getTypesList(BType type) {
if (type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) type;
return new ArrayList<>(unionType.getMemberTypes());
} else {
return Lists.of(type);
}
}
private LinkedHashSet<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.getBType());
LinkedHashSet<BType> matchExprTypes = new LinkedHashSet<>();
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
BType patternExprType = pattern.expr.getBType();
matchExprTypes.addAll(getTypesList(patternExprType));
if (type.tag == TypeTags.SEMANTIC_ERROR || patternExprType.tag == TypeTags.SEMANTIC_ERROR) {
return new LinkedHashSet<BType>() {
{
add(symTable.semanticError);
}
};
}
assignable = this.types.isAssignable(type, pattern.variable.getBType());
if (assignable) {
break;
}
}
if (!assignable) {
matchExprTypes.add(type);
}
}
return matchExprTypes;
}
private boolean couldHoldTableValues(BType type, List<BType> encounteredTypes) {
if (encounteredTypes.contains(type)) {
return false;
}
encounteredTypes.add(type);
switch (type.tag) {
case TypeTags.UNION:
for (BType bType1 : ((BUnionType) type).getMemberTypes()) {
if (couldHoldTableValues(bType1, encounteredTypes)) {
return true;
}
}
return false;
case TypeTags.MAP:
return couldHoldTableValues(((BMapType) type).constraint, encounteredTypes);
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
if (couldHoldTableValues(field.type, encounteredTypes)) {
return true;
}
}
return !recordType.sealed && couldHoldTableValues(recordType.restFieldType, encounteredTypes);
case TypeTags.ARRAY:
return couldHoldTableValues(((BArrayType) type).eType, encounteredTypes);
case TypeTags.TUPLE:
for (BType bType : ((BTupleType) type).getTupleTypes()) {
if (couldHoldTableValues(bType, encounteredTypes)) {
return true;
}
}
return false;
}
return false;
}
private boolean isConst(BLangExpression expression) {
if (ConstantAnalyzer.isValidConstantExpressionNode(expression)) {
return true;
}
if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return false;
}
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
}
private Name getCurrentCompUnit(BLangNode node) {
return names.fromString(node.pos.lineRange().filePath());
}
private BType getRepresentativeBroadType(List<BType> inferredTypeList) {
for (int i = 0; i < inferredTypeList.size(); i++) {
BType type = inferredTypeList.get(i);
if (type.tag == TypeTags.SEMANTIC_ERROR) {
return type;
}
for (int j = i + 1; j < inferredTypeList.size(); j++) {
BType otherType = inferredTypeList.get(j);
if (otherType.tag == TypeTags.SEMANTIC_ERROR) {
return otherType;
}
if (types.isAssignable(otherType, type)) {
inferredTypeList.remove(j);
j -= 1;
continue;
}
if (types.isAssignable(type, otherType)) {
inferredTypeList.remove(i);
i -= 1;
break;
}
}
}
if (inferredTypeList.size() == 1) {
return inferredTypeList.get(0);
}
return BUnionType.create(null, inferredTypeList.toArray(new BType[0]));
}
private BType defineInferredRecordType(BLangRecordLiteral recordLiteral, BType expType) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL);
Map<String, FieldInfo> nonRestFieldTypes = new LinkedHashMap<>();
List<BType> restFieldTypes = new ArrayList<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.isKeyValueField()) {
BLangRecordKeyValueField keyValue = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValue.key;
BLangExpression expression = keyValue.valueExpr;
BLangExpression keyExpr = key.expr;
if (key.computedKey) {
checkExpr(keyExpr, env, symTable.stringType);
BType exprType = checkExpr(expression, env, expType);
if (isUniqueType(restFieldTypes, exprType)) {
restFieldTypes.add(exprType);
}
} else {
addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(keyExpr),
keyValue.readonly ? checkExpr(expression, env, symTable.readonlyType) :
checkExpr(expression, env, expType),
true, keyValue.readonly);
}
} else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
BType type = checkExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env, expType);
int typeTag = type.tag;
if (typeTag == TypeTags.MAP) {
BType constraintType = ((BMapType) type).constraint;
if (isUniqueType(restFieldTypes, constraintType)) {
restFieldTypes.add(constraintType);
}
}
if (type.tag != TypeTags.RECORD) {
continue;
}
BRecordType recordType = (BRecordType) type;
for (BField recField : recordType.fields.values()) {
addToNonRestFieldTypes(nonRestFieldTypes, recField.name.value, recField.type,
!Symbols.isOptional(recField.symbol), false);
}
if (!recordType.sealed) {
BType restFieldType = recordType.restFieldType;
if (isUniqueType(restFieldTypes, restFieldType)) {
restFieldTypes.add(restFieldType);
}
}
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(varNameField),
varNameField.readonly ? checkExpr(varNameField, env, symTable.readonlyType) :
checkExpr(varNameField, env, expType),
true, varNameField.readonly);
}
}
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
boolean allReadOnlyNonRestFields = true;
for (Map.Entry<String, FieldInfo> entry : nonRestFieldTypes.entrySet()) {
FieldInfo fieldInfo = entry.getValue();
List<BType> types = fieldInfo.types;
if (types.contains(symTable.semanticError)) {
return symTable.semanticError;
}
String key = entry.getKey();
Name fieldName = names.fromString(key);
BType type = types.size() == 1 ? types.get(0) : BUnionType.create(null, types.toArray(new BType[0]));
Set<Flag> flags = new HashSet<>();
if (fieldInfo.required) {
flags.add(Flag.REQUIRED);
} else {
flags.add(Flag.OPTIONAL);
}
if (fieldInfo.readonly) {
flags.add(Flag.READONLY);
} else if (allReadOnlyNonRestFields) {
allReadOnlyNonRestFields = false;
}
BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(flags), fieldName, pkgID, type, recordSymbol,
symTable.builtinPos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordType = new BRecordType(recordSymbol);
recordType.fields = fields;
if (restFieldTypes.contains(symTable.semanticError)) {
return symTable.semanticError;
}
if (restFieldTypes.isEmpty()) {
recordType.sealed = true;
recordType.restFieldType = symTable.noType;
} else if (restFieldTypes.size() == 1) {
recordType.restFieldType = restFieldTypes.get(0);
} else {
recordType.restFieldType = BUnionType.create(null, restFieldTypes.toArray(new BType[0]));
}
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
if (expType == symTable.readonlyType || (recordType.sealed && allReadOnlyNonRestFields)) {
recordType.flags |= Flags.READONLY;
recordSymbol.flags |= Flags.READONLY;
}
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
recordLiteral.pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env);
return recordType;
}
private BRecordTypeSymbol createRecordTypeSymbol(PackageID pkgID, Location location,
SymbolOrigin origin) {
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(Flags.ANONYMOUS,
names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(pkgID)),
pkgID, null, env.scope.owner, location, origin);
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false,
symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol,
bInvokableType, location);
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
return recordSymbol;
}
private String getKeyName(BLangExpression key) {
return key.getKind() == NodeKind.SIMPLE_VARIABLE_REF ?
((BLangSimpleVarRef) key).variableName.value : (String) ((BLangLiteral) key).value;
}
private void addToNonRestFieldTypes(Map<String, FieldInfo> nonRestFieldTypes, String keyString,
BType exprType, boolean required, boolean readonly) {
if (!nonRestFieldTypes.containsKey(keyString)) {
nonRestFieldTypes.put(keyString, new FieldInfo(new ArrayList<BType>() {{ add(exprType); }}, required,
readonly));
return;
}
FieldInfo fieldInfo = nonRestFieldTypes.get(keyString);
List<BType> typeList = fieldInfo.types;
if (isUniqueType(typeList, exprType)) {
typeList.add(exprType);
}
if (required && !fieldInfo.required) {
fieldInfo.required = true;
}
}
private boolean isUniqueType(List<BType> typeList, BType type) {
boolean isRecord = type.tag == TypeTags.RECORD;
for (BType bType : typeList) {
if (isRecord) {
if (type == bType) {
return false;
}
} else if (types.isSameType(type, bType)) {
return false;
}
}
return true;
}
private BType checkXmlSubTypeLiteralCompatibility(Location location, BXMLSubType mutableXmlSubType,
BType expType) {
if (expType == symTable.semanticError) {
return expType;
}
boolean unionExpType = expType.tag == TypeTags.UNION;
if (expType == mutableXmlSubType) {
return expType;
}
if (!unionExpType && types.isAssignable(mutableXmlSubType, expType)) {
return mutableXmlSubType;
}
BXMLSubType immutableXmlSubType = (BXMLSubType)
ImmutableTypeCloner.getEffectiveImmutableType(location, types, mutableXmlSubType, env, symTable,
anonymousModelHelper, names);
if (expType == immutableXmlSubType) {
return expType;
}
if (!unionExpType && types.isAssignable(immutableXmlSubType, expType)) {
return immutableXmlSubType;
}
if (!unionExpType) {
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType);
return symTable.semanticError;
}
List<BType> compatibleTypes = new ArrayList<>();
for (BType memberType : ((BUnionType) expType).getMemberTypes()) {
if (compatibleTypes.contains(memberType)) {
continue;
}
if (memberType == mutableXmlSubType || memberType == immutableXmlSubType) {
compatibleTypes.add(memberType);
continue;
}
if (types.isAssignable(mutableXmlSubType, memberType) && !compatibleTypes.contains(mutableXmlSubType)) {
compatibleTypes.add(mutableXmlSubType);
continue;
}
if (types.isAssignable(immutableXmlSubType, memberType) && !compatibleTypes.contains(immutableXmlSubType)) {
compatibleTypes.add(immutableXmlSubType);
}
}
if (compatibleTypes.isEmpty()) {
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType);
return symTable.semanticError;
}
if (compatibleTypes.size() == 1) {
return compatibleTypes.get(0);
}
dlog.error(location, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType);
return symTable.semanticError;
}
private void markChildrenAsImmutable(BLangXMLElementLiteral bLangXMLElementLiteral) {
for (BLangExpression modifiedChild : bLangXMLElementLiteral.modifiedChildren) {
BType childType = modifiedChild.getBType();
if (Symbols.isFlagOn(childType.flags, Flags.READONLY) || !types.isSelectivelyImmutableType(childType)) {
continue;
}
modifiedChild.setBType(ImmutableTypeCloner.getEffectiveImmutableType(modifiedChild.pos, types,
(SelectivelyImmutableReferenceType) childType,
env, symTable, anonymousModelHelper, names));
if (modifiedChild.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
markChildrenAsImmutable((BLangXMLElementLiteral) modifiedChild);
}
}
}
private void logUndefinedSymbolError(Location pos, String name) {
if (!missingNodesHelper.isMissingNode(name)) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, name);
}
}
private void markTypeAsIsolated(BType actualType) {
actualType.flags |= Flags.ISOLATED;
actualType.tsymbol.flags |= Flags.ISOLATED;
}
private boolean isObjectConstructorExpr(BLangTypeInit cIExpr, BType actualType) {
return cIExpr.getType() != null && Symbols.isFlagOn(actualType.tsymbol.flags, Flags.ANONYMOUS);
}
private BLangClassDefinition getClassDefinitionForObjectConstructorExpr(BLangTypeInit cIExpr, SymbolEnv env) {
List<BLangClassDefinition> classDefinitions = env.enclPkg.classDefinitions;
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) cIExpr.getType();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(userDefinedType.pos, env,
names.fromIdNode(userDefinedType.pkgAlias),
names.fromIdNode(userDefinedType.typeName));
for (BLangClassDefinition classDefinition : classDefinitions) {
if (classDefinition.symbol == symbol) {
return classDefinition;
}
}
return null;
}
private void handleObjectConstrExprForReadOnly(BLangTypeInit cIExpr, BObjectType actualObjectType,
BLangClassDefinition classDefForConstructor, SymbolEnv env,
boolean logErrors) {
boolean hasNeverReadOnlyField = false;
for (BField field : actualObjectType.fields.values()) {
BType fieldType = field.type;
if (!types.isInherentlyImmutableType(fieldType) && !types.isSelectivelyImmutableType(fieldType, false)) {
analyzeObjectConstructor(classDefForConstructor, env);
hasNeverReadOnlyField = true;
if (!logErrors) {
return;
}
dlog.error(field.pos,
DiagnosticErrorCode.INVALID_FIELD_IN_OBJECT_CONSTUCTOR_EXPR_WITH_READONLY_REFERENCE,
fieldType);
}
}
if (hasNeverReadOnlyField) {
return;
}
classDefForConstructor.flagSet.add(Flag.READONLY);
actualObjectType.flags |= Flags.READONLY;
actualObjectType.tsymbol.flags |= Flags.READONLY;
ImmutableTypeCloner.markFieldsAsImmutable(classDefForConstructor, env, actualObjectType, types,
anonymousModelHelper, symTable, names, cIExpr.pos);
analyzeObjectConstructor(classDefForConstructor, env);
}
private void markConstructedObjectIsolatedness(BObjectType actualObjectType) {
if (Symbols.isFlagOn(actualObjectType.flags, Flags.READONLY)) {
markTypeAsIsolated(actualObjectType);
return;
}
for (BField field : actualObjectType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.FINAL) ||
!types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(field.type)) {
return;
}
}
markTypeAsIsolated(actualObjectType);
}
private void markLeafNode(BLangAccessExpression accessExpression) {
BLangNode parent = accessExpression.parent;
if (parent == null) {
accessExpression.leafNode = true;
return;
}
NodeKind kind = parent.getKind();
while (kind == NodeKind.GROUP_EXPR) {
parent = parent.parent;
if (parent == null) {
accessExpression.leafNode = true;
break;
}
kind = parent.getKind();
}
if (kind != NodeKind.FIELD_BASED_ACCESS_EXPR && kind != NodeKind.INDEX_BASED_ACCESS_EXPR) {
accessExpression.leafNode = true;
}
}
private static class FieldInfo {
List<BType> types;
boolean required;
boolean readonly;
private FieldInfo(List<BType> types, boolean required, boolean readonly) {
this.types = types;
this.required = required;
this.readonly = readonly;
}
}
} | class TypeChecker extends BLangNodeVisitor {
private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY = new CompilerContext.Key<>();
private static Set<String> listLengthModifierFunctions = new HashSet<>();
private static Map<String, HashSet<String>> modifierFunctions = new HashMap<>();
private static final String TABLE_TNAME = "table";
private static final String LIST_LANG_LIB = "lang.array";
private static final String MAP_LANG_LIB = "lang.map";
private static final String TABLE_LANG_LIB = "lang.table";
private static final String VALUE_LANG_LIB = "lang.value";
private static final String XML_LANG_LIB = "lang.xml";
private static final String FUNCTION_NAME_PUSH = "push";
private static final String FUNCTION_NAME_POP = "pop";
private static final String FUNCTION_NAME_SHIFT = "shift";
private static final String FUNCTION_NAME_UNSHIFT = "unshift";
private static final String FUNCTION_NAME_ENSURE_TYPE = "ensureType";
private Names names;
private SymbolTable symTable;
private SymbolEnter symbolEnter;
private SymbolResolver symResolver;
private NodeCloner nodeCloner;
private Types types;
private BLangDiagnosticLog dlog;
private SymbolEnv env;
private boolean isTypeChecked;
private TypeNarrower typeNarrower;
private TypeParamAnalyzer typeParamAnalyzer;
private BLangAnonymousModelHelper anonymousModelHelper;
private SemanticAnalyzer semanticAnalyzer;
private Unifier unifier;
private boolean nonErrorLoggingCheck = false;
private int letCount = 0;
private Stack<SymbolEnv> queryEnvs, prevEnvs;
private Stack<BLangNode> queryFinalClauses;
private boolean checkWithinQueryExpr = false;
private BLangMissingNodesHelper missingNodesHelper;
private boolean breakToParallelQueryEnv = false;
/**
* Expected types or inherited types.
*/
private BType expType;
private BType resultType;
private DiagnosticCode diagCode;
static {
listLengthModifierFunctions.add(FUNCTION_NAME_PUSH);
listLengthModifierFunctions.add(FUNCTION_NAME_POP);
listLengthModifierFunctions.add(FUNCTION_NAME_SHIFT);
listLengthModifierFunctions.add(FUNCTION_NAME_UNSHIFT);
modifierFunctions.put(LIST_LANG_LIB, new HashSet<String>() {{
add("remove");
add("removeAll");
add("setLength");
add("reverse");
add("sort");
add("pop");
add("push");
add("shift");
add("unshift");
}});
modifierFunctions.put(MAP_LANG_LIB, new HashSet<String>() {{
add("remove");
add("removeIfHasKey");
add("removeAll");
}});
modifierFunctions.put(TABLE_LANG_LIB, new HashSet<String>() {{
add("put");
add("add");
add("remove");
add("removeIfHasKey");
add("removeAll");
}});
modifierFunctions.put(VALUE_LANG_LIB, new HashSet<String>() {{
add("mergeJson");
}});
modifierFunctions.put(XML_LANG_LIB, new HashSet<String>() {{
add("setName");
add("setChildren");
add("strip");
}});
}
public static TypeChecker getInstance(CompilerContext context) {
TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY);
if (typeChecker == null) {
typeChecker = new TypeChecker(context);
}
return typeChecker;
}
public TypeChecker(CompilerContext context) {
context.put(TYPE_CHECKER_KEY, this);
this.names = Names.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.nodeCloner = NodeCloner.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeNarrower = TypeNarrower.getInstance(context);
this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context);
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.semanticAnalyzer = SemanticAnalyzer.getInstance(context);
this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);
this.queryFinalClauses = new Stack<>();
this.queryEnvs = new Stack<>();
this.prevEnvs = new Stack<>();
this.unifier = new Unifier();
}
public BType checkExpr(BLangExpression expr, SymbolEnv env) {
return checkExpr(expr, env, symTable.noType);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) {
return checkExpr(expr, env, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) {
if (expr.typeChecked) {
return expr.getBType();
}
if (expType.tag == TypeTags.INTERSECTION) {
expType = ((BIntersectionType) expType).effectiveType;
}
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
DiagnosticCode preDiagCode = this.diagCode;
this.env = env;
this.diagCode = diagCode;
this.expType = expType;
this.isTypeChecked = true;
expr.expectedType = expType;
expr.accept(this);
if (resultType.tag == TypeTags.INTERSECTION) {
resultType = ((BIntersectionType) resultType).effectiveType;
}
expr.setTypeCheckedType(resultType);
expr.typeChecked = isTypeChecked;
this.env = prevEnv;
this.expType = preExpType;
this.diagCode = preDiagCode;
validateAndSetExprExpectedType(expr);
return resultType;
}
private void validateAndSetExprExpectedType(BLangExpression expr) {
if (resultType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
if (expr.getKind() == NodeKind.RECORD_LITERAL_EXPR && expr.expectedType != null &&
expr.expectedType.tag == TypeTags.MAP && expr.getBType().tag == TypeTags.RECORD) {
return;
}
expr.expectedType = resultType;
}
public void visit(BLangLiteral literalExpr) {
BType literalType = setLiteralValueAndGetType(literalExpr, expType);
if (literalType == symTable.semanticError || literalExpr.isFiniteContext) {
return;
}
resultType = types.checkType(literalExpr, literalType, expType);
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
checkXMLNamespacePrefixes(xmlElementAccess.filters);
checkExpr(xmlElementAccess.expr, env, symTable.xmlType);
resultType = types.checkType(xmlElementAccess, symTable.xmlElementSeqType, expType);
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
checkXMLNamespacePrefixes(xmlNavigation.filters);
if (xmlNavigation.childIndex != null) {
checkExpr(xmlNavigation.childIndex, env, symTable.intType);
}
BType exprType = checkExpr(xmlNavigation.expr, env, symTable.xmlType);
if (exprType.tag == TypeTags.UNION) {
dlog.error(xmlNavigation.pos, DiagnosticErrorCode.TYPE_DOES_NOT_SUPPORT_XML_NAVIGATION_ACCESS,
xmlNavigation.expr.getBType());
}
BType actualType = xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN
? symTable.xmlType : symTable.xmlElementSeqType;
types.checkType(xmlNavigation, actualType, expType);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
resultType = symTable.xmlType;
} else {
resultType = symTable.xmlElementSeqType;
}
}
private void checkXMLNamespacePrefixes(List<BLangXMLElementFilter> filters) {
for (BLangXMLElementFilter filter : filters) {
if (!filter.namespace.isEmpty()) {
Name nsName = names.fromString(filter.namespace);
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, nsName);
filter.namespaceSymbol = nsSymbol;
if (nsSymbol == symTable.notFoundSymbol) {
dlog.error(filter.nsPos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE, nsName);
}
}
}
}
private BType setLiteralValueAndGetType(BLangLiteral literalExpr, BType expType) {
BType literalType = symTable.getTypeFromTag(literalExpr.getBType().tag);
Object literalValue = literalExpr.value;
if (literalType.tag == TypeTags.INT || literalType.tag == TypeTags.BYTE) {
if (expType.tag == TypeTags.FLOAT) {
literalType = symTable.floatType;
literalExpr.value = ((Long) literalValue).doubleValue();
} else if (expType.tag == TypeTags.DECIMAL &&
!NumericLiteralSupport.hasHexIndicator(literalExpr.originalValue)) {
literalType = symTable.decimalType;
literalExpr.value = String.valueOf(literalValue);
} else if (TypeTags.isIntegerTypeTag(expType.tag) || expType.tag == TypeTags.BYTE) {
literalType = getIntLiteralType(literalExpr.pos, expType, literalType, literalValue);
if (literalType == symTable.semanticError) {
return symTable.semanticError;
}
} else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.intType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.BYTE)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.byteType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED32_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed32IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED16_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed16IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.SIGNED8_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.signed8IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED32_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned32IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED16_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned16IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.UNSIGNED8_INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.unsigned8IntType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes();
BType intSubType = null;
boolean intOrIntCompatibleTypeFound = false;
for (BType memType : memberTypes) {
if ((memType.tag != TypeTags.INT && TypeTags.isIntegerTypeTag(memType.tag)) ||
memType.tag == TypeTags.BYTE) {
intSubType = memType;
} else if (memType.tag == TypeTags.INT || memType.tag == TypeTags.JSON ||
memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY) {
intOrIntCompatibleTypeFound = true;
}
}
if (intOrIntCompatibleTypeFound) {
return setLiteralValueAndGetType(literalExpr, symTable.intType);
}
if (intSubType != null) {
return setLiteralValueAndGetType(literalExpr, intSubType);
}
BType finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.intType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.BYTE)) {
return setLiteralValueAndGetType(literalExpr, symTable.byteType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.byteType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.FLOAT)) {
return setLiteralValueAndGetType(literalExpr, symTable.floatType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.floatType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) {
return setLiteralValueAndGetType(literalExpr, symTable.decimalType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.decimalType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
}
} else if (literalType.tag == TypeTags.FLOAT) {
String literal = String.valueOf(literalValue);
String numericLiteral = NumericLiteralSupport.stripDiscriminator(literal);
boolean isDiscriminatedFloat = NumericLiteralSupport.isFloatDiscriminated(literal);
if (expType.tag == TypeTags.DECIMAL) {
if (isDiscriminatedFloat || NumericLiteralSupport.isHexLiteral(numericLiteral)) {
dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
symTable.floatType);
resultType = symTable.semanticError;
return resultType;
}
literalType = symTable.decimalType;
literalExpr.value = numericLiteral;
} else if (expType.tag == TypeTags.FLOAT) {
literalExpr.value = Double.parseDouble(String.valueOf(numericLiteral));
} else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (!isDiscriminatedFloat
&& literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
} else if (literalType.tag == TypeTags.DECIMAL) {
return decimalLiteral(literalValue, literalExpr, expType);
} else if (literalType.tag == TypeTags.STRING && types.isCharLiteralValue((String) literalValue)) {
if (expType.tag == TypeTags.CHAR_STRING) {
return symTable.charStringType;
}
if (expType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes();
for (BType memType : memberTypes) {
if (TypeTags.isStringTypeTag(memType.tag)) {
return setLiteralValueAndGetType(literalExpr, memType);
} else if (memType.tag == TypeTags.JSON || memType.tag == TypeTags.ANYDATA ||
memType.tag == TypeTags.ANY) {
return setLiteralValueAndGetType(literalExpr, symTable.charStringType);
} else if (memType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(memType,
literalExpr)) {
setLiteralValueForFiniteType(literalExpr, symTable.charStringType);
return literalType;
}
}
}
boolean foundMember = types.isAssignableToFiniteType(expType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
} else {
if (this.expType.tag == TypeTags.FINITE) {
boolean foundMember = types.isAssignableToFiniteType(this.expType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
} else if (this.expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) this.expType;
boolean foundMember = unionType.getMemberTypes()
.stream()
.anyMatch(memberType -> types.isAssignableToFiniteType(memberType, literalExpr));
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
}
}
if (literalExpr.getBType().tag == TypeTags.BYTE_ARRAY) {
literalType = new BArrayType(symTable.byteType);
}
return literalType;
}
private BType getAndSetAssignableUnionMember(BLangLiteral literalExpr, BUnionType expType, BType desiredType) {
Set<BType> memberTypes = expType.getMemberTypes();
if (memberTypes.stream()
.anyMatch(memType -> memType.tag == desiredType.tag
|| memType.tag == TypeTags.JSON
|| memType.tag == TypeTags.ANYDATA
|| memType.tag == TypeTags.ANY)) {
return setLiteralValueAndGetType(literalExpr, desiredType);
}
BType finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.floatType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) {
return setLiteralValueAndGetType(literalExpr, symTable.decimalType);
}
finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.decimalType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
return symTable.noType;
}
private boolean literalAssignableToFiniteType(BLangLiteral literalExpr, BFiniteType finiteType,
int targetMemberTypeTag) {
for (BLangExpression valueExpr : finiteType.getValueSpace()) {
if (valueExpr.getBType().tag == targetMemberTypeTag &&
types.checkLiteralAssignabilityBasedOnType((BLangLiteral) valueExpr, literalExpr)) {
return true;
}
}
return false;
}
private BType decimalLiteral(Object literalValue, BLangLiteral literalExpr, BType expType) {
String literal = String.valueOf(literalValue);
if (expType.tag == TypeTags.FLOAT && NumericLiteralSupport.isDecimalDiscriminated(literal)) {
dlog.error(literalExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
symTable.decimalType);
resultType = symTable.semanticError;
return resultType;
}
if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.decimalType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
literalExpr.value = NumericLiteralSupport.stripDiscriminator(literal);
resultType = symTable.decimalType;
return symTable.decimalType;
}
private void setLiteralValueForFiniteType(BLangLiteral literalExpr, BType type) {
types.setImplicitCastExpr(literalExpr, type, this.expType);
this.resultType = type;
literalExpr.isFiniteContext = true;
}
private BType getFiniteTypeWithValuesOfSingleType(BUnionType unionType, BType matchType) {
List<BFiniteType> finiteTypeMembers = unionType.getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.map(memFiniteType -> (BFiniteType) memFiniteType)
.collect(Collectors.toList());
if (finiteTypeMembers.isEmpty()) {
return symTable.semanticError;
}
int tag = matchType.tag;
Set<BLangExpression> matchedValueSpace = new LinkedHashSet<>();
for (BFiniteType finiteType : finiteTypeMembers) {
Set<BLangExpression> set = new HashSet<>();
for (BLangExpression expression : finiteType.getValueSpace()) {
if (expression.getBType().tag == tag) {
set.add(expression);
}
}
matchedValueSpace.addAll(set);
}
if (matchedValueSpace.isEmpty()) {
return symTable.semanticError;
}
return new BFiniteType(null, matchedValueSpace);
}
private BType getIntLiteralType(Location location, BType expType, BType literalType,
Object literalValue) {
switch (expType.tag) {
case TypeTags.INT:
return symTable.intType;
case TypeTags.BYTE:
if (types.isByteLiteralValue((Long) literalValue)) {
return symTable.byteType;
}
break;
case TypeTags.SIGNED32_INT:
if (types.isSigned32LiteralValue((Long) literalValue)) {
return symTable.signed32IntType;
}
break;
case TypeTags.SIGNED16_INT:
if (types.isSigned16LiteralValue((Long) literalValue)) {
return symTable.signed16IntType;
}
break;
case TypeTags.SIGNED8_INT:
if (types.isSigned8LiteralValue((Long) literalValue)) {
return symTable.signed8IntType;
}
break;
case TypeTags.UNSIGNED32_INT:
if (types.isUnsigned32LiteralValue((Long) literalValue)) {
return symTable.unsigned32IntType;
}
break;
case TypeTags.UNSIGNED16_INT:
if (types.isUnsigned16LiteralValue((Long) literalValue)) {
return symTable.unsigned16IntType;
}
break;
case TypeTags.UNSIGNED8_INT:
if (types.isUnsigned8LiteralValue((Long) literalValue)) {
return symTable.unsigned8IntType;
}
break;
default:
}
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, literalType);
resultType = symTable.semanticError;
return resultType;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.READONLY) {
BType inferredType = getInferredTupleType(listConstructor, expType);
resultType = inferredType == symTable.semanticError ?
symTable.semanticError : types.checkType(listConstructor, inferredType, expType);
return;
}
resultType = checkListConstructorCompatibility(expType, listConstructor);
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
if (expType.tag == TypeTags.NONE || expType.tag == TypeTags.ANY || expType.tag == TypeTags.ANYDATA) {
List<BType> memTypes = checkExprList(new ArrayList<>(tableConstructorExpr.recordLiteralList), env);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
}
if (tableConstructorExpr.recordLiteralList.size() == 0) {
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE);
resultType = symTable.semanticError;
return;
}
BType inherentMemberType = inferTableMemberType(memTypes, tableConstructorExpr);
BTableType tableType = new BTableType(TypeTags.TABLE, inherentMemberType, null);
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
recordLiteral.setBType(inherentMemberType);
}
if (!validateTableConstructorExpr(tableConstructorExpr, tableType)) {
resultType = symTable.semanticError;
return;
}
if (checkKeySpecifier(tableConstructorExpr, tableType)) {
return;
}
resultType = tableType;
return;
}
BType applicableExpType = expType.tag == TypeTags.INTERSECTION ?
((BIntersectionType) expType).effectiveType : expType;
if (applicableExpType.tag == TypeTags.TABLE) {
List<BType> memTypes = new ArrayList<>();
for (BLangRecordLiteral recordLiteral : tableConstructorExpr.recordLiteralList) {
BLangRecordLiteral clonedExpr = recordLiteral;
if (this.nonErrorLoggingCheck) {
clonedExpr.cloneAttempt++;
clonedExpr = nodeCloner.cloneNode(recordLiteral);
}
BType recordType = checkExpr(clonedExpr, env, ((BTableType) applicableExpType).constraint);
if (recordType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
memTypes.add(recordType);
}
if (((BTableType) applicableExpType).constraint.tag == TypeTags.MAP &&
((BTableType) applicableExpType).isTypeInlineDefined) {
validateMapConstraintTable(tableConstructorExpr, applicableExpType);
return;
}
if (!(validateTableType((BTableType) applicableExpType,
tableConstructorExpr.recordLiteralList) &&
validateTableConstructorExpr(tableConstructorExpr, (BTableType) applicableExpType))) {
resultType = symTable.semanticError;
return;
}
BTableType tableType = new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, applicableExpType),
null);
if (Symbols.isFlagOn(applicableExpType.flags, Flags.READONLY)) {
tableType.flags |= Flags.READONLY;
}
if (checkKeySpecifier(tableConstructorExpr, tableType)) {
return;
}
BTableType expectedTableType = (BTableType) applicableExpType;
if (expectedTableType.fieldNameList != null && tableType.fieldNameList == null) {
tableType.fieldNameList = expectedTableType.fieldNameList;
}
resultType = tableType;
} else if (applicableExpType.tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
List<BType> matchingTypes = new ArrayList<>();
BUnionType expectedType = (BUnionType) applicableExpType;
for (BType memType : expectedType.getMemberTypes()) {
dlog.resetErrorCount();
BLangTableConstructorExpr clonedTableExpr = tableConstructorExpr;
if (this.nonErrorLoggingCheck) {
tableConstructorExpr.cloneAttempt++;
clonedTableExpr = nodeCloner.cloneNode(tableConstructorExpr);
}
BType resultType = checkExpr(clonedTableExpr, env, memType);
if (resultType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(matchingTypes, resultType)) {
matchingTypes.add(resultType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (matchingTypes.isEmpty()) {
BLangTableConstructorExpr exprToLog = tableConstructorExpr;
if (this.nonErrorLoggingCheck) {
tableConstructorExpr.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(tableConstructorExpr);
}
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getInferredTableType(exprToLog));
} else if (matchingTypes.size() != 1) {
dlog.error(tableConstructorExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES,
expType);
} else {
resultType = checkExpr(tableConstructorExpr, env, matchingTypes.get(0));
return;
}
resultType = symTable.semanticError;
} else {
resultType = symTable.semanticError;
}
}
private BType getInferredTableType(BLangTableConstructorExpr exprToLog) {
List<BType> memTypes = checkExprList(new ArrayList<>(exprToLog.recordLiteralList), env);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
return symTable.semanticError;
}
}
return new BTableType(TypeTags.TABLE, inferTableMemberType(memTypes, exprToLog), null);
}
private boolean checkKeySpecifier(BLangTableConstructorExpr tableConstructorExpr, BTableType tableType) {
if (tableConstructorExpr.tableKeySpecifier != null) {
if (!(validateTableKeyValue(getTableKeyNameList(tableConstructorExpr.
tableKeySpecifier), tableConstructorExpr.recordLiteralList))) {
resultType = symTable.semanticError;
return true;
}
tableType.fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier);
}
return false;
}
private BType inferTableMemberType(List<BType> memTypes, BType expType) {
if (memTypes.isEmpty()) {
return ((BTableType) expType).constraint;
}
LinkedHashSet<BType> result = new LinkedHashSet<>();
result.add(memTypes.get(0));
BUnionType unionType = BUnionType.create(null, result);
for (int i = 1; i < memTypes.size(); i++) {
BType source = memTypes.get(i);
if (!types.isAssignable(source, unionType)) {
result.add(source);
unionType = BUnionType.create(null, result);
}
}
if (unionType.getMemberTypes().size() == 1) {
return memTypes.get(0);
}
return unionType;
}
private BType inferTableMemberType(List<BType> memTypes, BLangTableConstructorExpr tableConstructorExpr) {
BLangTableKeySpecifier keySpecifier = tableConstructorExpr.tableKeySpecifier;
List<String> keySpecifierFieldNames = new ArrayList<>();
Set<BField> allFieldSet = new LinkedHashSet<>();
for (BType memType : memTypes) {
allFieldSet.addAll(((BRecordType) memType).fields.values());
}
Set<BField> commonFieldSet = new LinkedHashSet<>(allFieldSet);
for (BType memType : memTypes) {
commonFieldSet.retainAll(((BRecordType) memType).fields.values());
}
List<String> requiredFieldNames = new ArrayList<>();
if (keySpecifier != null) {
for (IdentifierNode identifierNode : keySpecifier.fieldNameIdentifierList) {
requiredFieldNames.add(((BLangIdentifier) identifierNode).value);
keySpecifierFieldNames.add(((BLangIdentifier) identifierNode).value);
}
}
List<String> fieldNames = new ArrayList<>();
for (BField field : allFieldSet) {
String fieldName = field.name.value;
if (fieldNames.contains(fieldName)) {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.CANNOT_INFER_MEMBER_TYPE_FOR_TABLE_DUE_AMBIGUITY,
fieldName);
return symTable.semanticError;
}
fieldNames.add(fieldName);
boolean isOptional = true;
for (BField commonField : commonFieldSet) {
if (commonField.name.value.equals(fieldName)) {
isOptional = false;
requiredFieldNames.add(commonField.name.value);
}
}
if (isOptional) {
field.symbol.flags = Flags.asMask(EnumSet.of(Flag.OPTIONAL));
} else if (requiredFieldNames.contains(fieldName) && keySpecifierFieldNames.contains(fieldName)) {
field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED)) + Flags.asMask(EnumSet.of(Flag.READONLY));
} else if (requiredFieldNames.contains(fieldName)) {
field.symbol.flags = Flags.asMask(EnumSet.of(Flag.REQUIRED));
}
}
return createTableConstraintRecordType(allFieldSet, tableConstructorExpr.pos);
}
private BRecordType createTableConstraintRecordType(Set<BField> allFieldSet, Location pos) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, pos, VIRTUAL);
for (BField field : allFieldSet) {
recordSymbol.scope.define(field.name, field.symbol);
}
BRecordType recordType = new BRecordType(recordSymbol);
recordType.fields = allFieldSet.stream().collect(getFieldCollector());
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env);
recordType.sealed = true;
recordType.restFieldType = symTable.noType;
return recordType;
}
private Collector<BField, ?, LinkedHashMap<String, BField>> getFieldCollector() {
BinaryOperator<BField> mergeFunc = (u, v) -> {
throw new IllegalStateException(String.format("Duplicate key %s", u));
};
return Collectors.toMap(field -> field.name.value, Function.identity(), mergeFunc, LinkedHashMap::new);
}
private boolean validateTableType(BTableType tableType, List<BLangRecordLiteral> recordLiterals) {
BType constraint = tableType.constraint;
if (tableType.isTypeInlineDefined && !types.isAssignable(constraint, symTable.mapAllType)) {
dlog.error(tableType.constraintPos, DiagnosticErrorCode.TABLE_CONSTRAINT_INVALID_SUBTYPE, constraint);
resultType = symTable.semanticError;
return false;
}
List<String> fieldNameList = tableType.fieldNameList;
if (fieldNameList != null) {
boolean isKeySpecifierValidated = !tableType.isTypeInlineDefined || validateKeySpecifier(fieldNameList,
constraint.tag != TypeTags.INTERSECTION ? constraint :
((BIntersectionType) constraint).effectiveType,
tableType.keyPos);
return (isKeySpecifierValidated && validateTableKeyValue(fieldNameList, recordLiterals));
}
return true;
}
private boolean validateTableKeyValue(List<String> keySpecifierFieldNames,
List<BLangRecordLiteral> recordLiterals) {
for (String fieldName : keySpecifierFieldNames) {
for (BLangRecordLiteral recordLiteral : recordLiterals) {
BLangRecordKeyValueField recordKeyValueField = getRecordKeyValueField(recordLiteral, fieldName);
if (recordKeyValueField != null && isConstExpression(recordKeyValueField.getValue())) {
continue;
}
dlog.error(recordLiteral.pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_VALUE_MUST_BE_CONSTANT_EXPR, fieldName);
resultType = symTable.semanticError;
return false;
}
}
return true;
}
private boolean isConstExpression(BLangExpression expression) {
switch(expression.getKind()) {
case LITERAL:
case NUMERIC_LITERAL:
case STRING_TEMPLATE_LITERAL:
case XML_ELEMENT_LITERAL:
case XML_TEXT_LITERAL:
case LIST_CONSTRUCTOR_EXPR:
case TABLE_CONSTRUCTOR_EXPR:
case RECORD_LITERAL_EXPR:
case TYPE_CONVERSION_EXPR:
case UNARY_EXPR:
case BINARY_EXPR:
case TYPE_TEST_EXPR:
case TERNARY_EXPR:
return true;
case SIMPLE_VARIABLE_REF:
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
case GROUP_EXPR:
return isConstExpression(((BLangGroupExpr) expression).expression);
default:
return false;
}
}
private BLangRecordKeyValueField getRecordKeyValueField(BLangRecordLiteral recordLiteral,
String fieldName) {
for (RecordLiteralNode.RecordField recordField : recordLiteral.fields) {
BLangRecordKeyValueField recordKeyValueField = (BLangRecordKeyValueField) recordField;
if (fieldName.equals(recordKeyValueField.key.toString())) {
return recordKeyValueField;
}
}
return null;
}
public boolean validateKeySpecifier(List<String> fieldNameList, BType constraint,
Location pos) {
for (String fieldName : fieldNameList) {
BField field = types.getTableConstraintField(constraint, fieldName);
if (field == null) {
dlog.error(pos,
DiagnosticErrorCode.INVALID_FIELD_NAMES_IN_KEY_SPECIFIER, fieldName, constraint);
resultType = symTable.semanticError;
return false;
}
if (!Symbols.isFlagOn(field.symbol.flags, Flags.READONLY)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_READONLY, fieldName);
resultType = symTable.semanticError;
return false;
}
if (!Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_REQUIRED, fieldName);
resultType = symTable.semanticError;
return false;
}
if (!types.isAssignable(field.type, symTable.anydataType)) {
dlog.error(pos,
DiagnosticErrorCode.KEY_SPECIFIER_FIELD_MUST_BE_ANYDATA, fieldName, constraint);
resultType = symTable.semanticError;
return false;
}
}
return true;
}
private boolean validateTableConstructorExpr(BLangTableConstructorExpr tableConstructorExpr,
BTableType tableType) {
BType constraintType = tableType.constraint;
if (tableConstructorExpr.tableKeySpecifier != null) {
List<String> fieldNameList = getTableKeyNameList(tableConstructorExpr.tableKeySpecifier);
if (tableType.fieldNameList == null &&
!validateKeySpecifier(fieldNameList,
constraintType.tag != TypeTags.INTERSECTION ? constraintType :
((BIntersectionType) constraintType).effectiveType,
tableConstructorExpr.tableKeySpecifier.pos)) {
return false;
}
if (tableType.fieldNameList != null && !tableType.fieldNameList.equals(fieldNameList)) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos, DiagnosticErrorCode.TABLE_KEY_SPECIFIER_MISMATCH,
tableType.fieldNameList.toString(), fieldNameList.toString());
resultType = symTable.semanticError;
return false;
}
}
BType keyTypeConstraint = tableType.keyTypeConstraint;
if (keyTypeConstraint != null) {
List<BType> memberTypes = new ArrayList<>();
if (keyTypeConstraint.tag == TypeTags.TUPLE) {
for (Type type : ((TupleType) keyTypeConstraint).getTupleTypes()) {
memberTypes.add((BType) type);
}
} else {
memberTypes.add(keyTypeConstraint);
}
if (tableConstructorExpr.tableKeySpecifier == null && keyTypeConstraint.tag == TypeTags.NEVER) {
return true;
}
if (tableConstructorExpr.tableKeySpecifier == null ||
tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size() != memberTypes.size()) {
dlog.error(tableConstructorExpr.pos,
DiagnosticErrorCode.KEY_SPECIFIER_SIZE_MISMATCH_WITH_KEY_CONSTRAINT,
memberTypes.size(),
tableConstructorExpr.tableKeySpecifier == null ?
0 : tableConstructorExpr.tableKeySpecifier.fieldNameIdentifierList.size());
resultType = symTable.semanticError;
return false;
}
List<IdentifierNode> fieldNameIdentifierList = tableConstructorExpr.tableKeySpecifier.
fieldNameIdentifierList;
int index = 0;
for (IdentifierNode identifier : fieldNameIdentifierList) {
BField field = types.getTableConstraintField(constraintType, ((BLangIdentifier) identifier).value);
if (field == null || !types.isAssignable(field.type, memberTypes.get(index))) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos,
DiagnosticErrorCode.KEY_SPECIFIER_MISMATCH_WITH_KEY_CONSTRAINT,
fieldNameIdentifierList.toString(), memberTypes.toString());
resultType = symTable.semanticError;
return false;
}
index++;
}
}
return true;
}
public void validateMapConstraintTable(BLangTableConstructorExpr tableConstructorExpr, BType expType) {
if (((BTableType) expType).fieldNameList != null || ((BTableType) expType).keyTypeConstraint != null) {
dlog.error(((BTableType) expType).keyPos,
DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT);
resultType = symTable.semanticError;
return;
}
if (tableConstructorExpr != null && tableConstructorExpr.tableKeySpecifier != null) {
dlog.error(tableConstructorExpr.tableKeySpecifier.pos,
DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT);
resultType = symTable.semanticError;
return;
}
if (tableConstructorExpr != null && !(validateTableType((BTableType) expType,
tableConstructorExpr.recordLiteralList))) {
resultType = symTable.semanticError;
return;
}
resultType = expType;
}
private List<String> getTableKeyNameList(BLangTableKeySpecifier tableKeySpecifier) {
List<String> fieldNamesList = new ArrayList<>();
for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) {
fieldNamesList.add(((BLangIdentifier) identifier).value);
}
return fieldNamesList;
}
private BType createTableKeyConstraint(List<String> fieldNames, BType constraintType) {
if (fieldNames == null) {
return symTable.semanticError;
}
List<BType> memTypes = new ArrayList<>();
for (String fieldName : fieldNames) {
BField tableConstraintField = types.getTableConstraintField(constraintType, fieldName);
if (tableConstraintField == null) {
return symTable.semanticError;
}
BType fieldType = tableConstraintField.type;
memTypes.add(fieldType);
}
if (memTypes.size() == 1) {
return memTypes.get(0);
}
return new BTupleType(memTypes);
}
private BType checkListConstructorCompatibility(BType bType, BLangListConstructorExpr listConstructor) {
int tag = bType.tag;
if (tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.nonErrorLoggingCheck = true;
this.dlog.mute();
List<BType> compatibleTypes = new ArrayList<>();
boolean erroredExpType = false;
for (BType memberType : ((BUnionType) bType).getMemberTypes()) {
if (memberType == symTable.semanticError) {
if (!erroredExpType) {
erroredExpType = true;
}
continue;
}
BType listCompatibleMemType = getListConstructorCompatibleNonUnionType(memberType);
if (listCompatibleMemType == symTable.semanticError) {
continue;
}
dlog.resetErrorCount();
BType memCompatibiltyType = checkListConstructorCompatibility(listCompatibleMemType, listConstructor);
if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(compatibleTypes, memCompatibiltyType)) {
compatibleTypes.add(memCompatibiltyType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (compatibleTypes.isEmpty()) {
BLangListConstructorExpr exprToLog = listConstructor;
if (this.nonErrorLoggingCheck) {
listConstructor.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(listConstructor);
}
BType inferredTupleType = getInferredTupleType(exprToLog, symTable.noType);
if (!erroredExpType && inferredTupleType != symTable.semanticError) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, inferredTupleType);
}
return symTable.semanticError;
} else if (compatibleTypes.size() != 1) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES,
expType);
return symTable.semanticError;
}
return checkListConstructorCompatibility(compatibleTypes.get(0), listConstructor);
}
if (tag == TypeTags.INTERSECTION) {
return checkListConstructorCompatibility(((BIntersectionType) bType).effectiveType, listConstructor);
}
BType possibleType = getListConstructorCompatibleNonUnionType(bType);
switch (possibleType.tag) {
case TypeTags.ARRAY:
return checkArrayType(listConstructor, (BArrayType) possibleType);
case TypeTags.TUPLE:
return checkTupleType(listConstructor, (BTupleType) possibleType);
case TypeTags.READONLY:
return checkReadOnlyListType(listConstructor);
case TypeTags.TYPEDESC:
List<BType> results = new ArrayList<>();
listConstructor.isTypedescExpr = true;
for (int i = 0; i < listConstructor.exprs.size(); i++) {
results.add(checkExpr(listConstructor.exprs.get(i), env, symTable.noType));
}
List<BType> actualTypes = new ArrayList<>();
for (int i = 0; i < listConstructor.exprs.size(); i++) {
final BLangExpression expr = listConstructor.exprs.get(i);
if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
actualTypes.add(((BLangTypedescExpr) expr).resolvedType);
} else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
actualTypes.add(((BLangSimpleVarRef) expr).symbol.type);
} else {
actualTypes.add(results.get(i));
}
}
if (actualTypes.size() == 1) {
listConstructor.typedescType = actualTypes.get(0);
} else {
listConstructor.typedescType = new BTupleType(actualTypes);
}
return new BTypedescType(listConstructor.typedescType, null);
}
BLangListConstructorExpr exprToLog = listConstructor;
if (this.nonErrorLoggingCheck) {
listConstructor.cloneAttempt++;
exprToLog = nodeCloner.cloneNode(listConstructor);
}
if (bType == symTable.semanticError) {
getInferredTupleType(exprToLog, symTable.semanticError);
} else {
dlog.error(listConstructor.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, bType,
getInferredTupleType(exprToLog, symTable.noType));
}
return symTable.semanticError;
}
private BType getListConstructorCompatibleNonUnionType(BType type) {
switch (type.tag) {
case TypeTags.ARRAY:
case TypeTags.TUPLE:
case TypeTags.READONLY:
case TypeTags.TYPEDESC:
return type;
case TypeTags.JSON:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayJsonType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayJsonType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANYDATA:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayAnydataType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayAnydataType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANY:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.arrayType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.arrayType, env,
symTable, anonymousModelHelper, names);
case TypeTags.INTERSECTION:
return ((BIntersectionType) type).effectiveType;
}
return symTable.semanticError;
}
private BType checkArrayType(BLangListConstructorExpr listConstructor, BArrayType arrayType) {
BType eType = arrayType.eType;
if (arrayType.state == BArrayState.INFERRED) {
arrayType.size = listConstructor.exprs.size();
arrayType.state = BArrayState.CLOSED;
} else if ((arrayType.state != BArrayState.OPEN) && (arrayType.size != listConstructor.exprs.size())) {
if (arrayType.size < listConstructor.exprs.size()) {
dlog.error(listConstructor.pos,
DiagnosticErrorCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size,
listConstructor.exprs.size());
return symTable.semanticError;
}
if (!types.hasFillerValue(eType)) {
dlog.error(listConstructor.pos,
DiagnosticErrorCode.INVALID_LIST_CONSTRUCTOR_ELEMENT_TYPE, expType);
return symTable.semanticError;
}
}
boolean errored = false;
for (BLangExpression expr : listConstructor.exprs) {
if (exprIncompatible(eType, expr) && !errored) {
errored = true;
}
}
return errored ? symTable.semanticError : arrayType;
}
private BType checkTupleType(BLangListConstructorExpr listConstructor, BTupleType tupleType) {
List<BLangExpression> exprs = listConstructor.exprs;
List<BType> memberTypes = tupleType.tupleTypes;
BType restType = tupleType.restType;
int listExprSize = exprs.size();
int memberTypeSize = memberTypes.size();
if (listExprSize < memberTypeSize) {
for (int i = listExprSize; i < memberTypeSize; i++) {
if (!types.hasFillerValue(memberTypes.get(i))) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR,
"tuple and expression size does not match");
return symTable.semanticError;
}
}
} else if (listExprSize > memberTypeSize && restType == null) {
dlog.error(listConstructor.pos, DiagnosticErrorCode.SYNTAX_ERROR,
"tuple and expression size does not match");
return symTable.semanticError;
}
boolean errored = false;
int nonRestCountToCheck = listExprSize < memberTypeSize ? listExprSize : memberTypeSize;
for (int i = 0; i < nonRestCountToCheck; i++) {
if (exprIncompatible(memberTypes.get(i), exprs.get(i)) && !errored) {
errored = true;
}
}
for (int i = nonRestCountToCheck; i < exprs.size(); i++) {
if (exprIncompatible(restType, exprs.get(i)) && !errored) {
errored = true;
}
}
return errored ? symTable.semanticError : tupleType;
}
private BType checkReadOnlyListType(BLangListConstructorExpr listConstructor) {
if (!this.nonErrorLoggingCheck) {
BType inferredType = getInferredTupleType(listConstructor, symTable.readonlyType);
if (inferredType == symTable.semanticError) {
return symTable.semanticError;
}
return types.checkType(listConstructor, inferredType, symTable.readonlyType);
}
for (BLangExpression expr : listConstructor.exprs) {
if (exprIncompatible(symTable.readonlyType, expr)) {
return symTable.semanticError;
}
}
return symTable.readonlyType;
}
private boolean exprIncompatible(BType eType, BLangExpression expr) {
if (expr.typeChecked) {
return expr.getBType() == symTable.semanticError;
}
BLangExpression exprToCheck = expr;
if (this.nonErrorLoggingCheck) {
expr.cloneAttempt++;
exprToCheck = nodeCloner.cloneNode(expr);
}
return checkExpr(exprToCheck, this.env, eType) == symTable.semanticError;
}
private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env) {
return checkExprList(exprs, env, symTable.noType);
}
private List<BType> checkExprList(List<BLangExpression> exprs, SymbolEnv env, BType expType) {
List<BType> types = new ArrayList<>();
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
this.env = env;
this.expType = expType;
for (BLangExpression e : exprs) {
checkExpr(e, this.env, expType);
types.add(resultType);
}
this.env = prevEnv;
this.expType = preExpType;
return types;
}
private BType getInferredTupleType(BLangListConstructorExpr listConstructor, BType expType) {
List<BType> memTypes = checkExprList(listConstructor.exprs, env, expType);
for (BType memType : memTypes) {
if (memType == symTable.semanticError) {
return symTable.semanticError;
}
}
BTupleType tupleType = new BTupleType(memTypes);
if (expType.tag != TypeTags.READONLY) {
return tupleType;
}
tupleType.flags |= Flags.READONLY;
return tupleType;
}
public void visit(BLangRecordLiteral recordLiteral) {
int expTypeTag = expType.tag;
if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.READONLY) {
expType = defineInferredRecordType(recordLiteral, expType);
} else if (expTypeTag == TypeTags.OBJECT) {
dlog.error(recordLiteral.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL, expType);
resultType = symTable.semanticError;
return;
}
resultType = getEffectiveMappingType(recordLiteral,
checkMappingConstructorCompatibility(expType, recordLiteral));
}
private BType getEffectiveMappingType(BLangRecordLiteral recordLiteral, BType applicableMappingType) {
if (applicableMappingType == symTable.semanticError ||
(applicableMappingType.tag == TypeTags.RECORD && Symbols.isFlagOn(applicableMappingType.flags,
Flags.READONLY))) {
return applicableMappingType;
}
Map<String, RecordLiteralNode.RecordField> readOnlyFields = new LinkedHashMap<>();
LinkedHashMap<String, BField> applicableTypeFields =
applicableMappingType.tag == TypeTags.RECORD ? ((BRecordType) applicableMappingType).fields :
new LinkedHashMap<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
continue;
}
String name;
if (field.isKeyValueField()) {
BLangRecordKeyValueField keyValueField = (BLangRecordKeyValueField) field;
if (!keyValueField.readonly) {
continue;
}
BLangExpression keyExpr = keyValueField.key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
name = ((BLangSimpleVarRef) keyExpr).variableName.value;
} else {
name = (String) ((BLangLiteral) keyExpr).value;
}
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
if (!varNameField.readonly) {
continue;
}
name = varNameField.variableName.value;
}
if (applicableTypeFields.containsKey(name) &&
Symbols.isFlagOn(applicableTypeFields.get(name).symbol.flags, Flags.READONLY)) {
continue;
}
readOnlyFields.put(name, field);
}
if (readOnlyFields.isEmpty()) {
return applicableMappingType;
}
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL);
LinkedHashMap<String, BField> newFields = new LinkedHashMap<>();
for (Map.Entry<String, RecordLiteralNode.RecordField> readOnlyEntry : readOnlyFields.entrySet()) {
RecordLiteralNode.RecordField field = readOnlyEntry.getValue();
String key = readOnlyEntry.getKey();
Name fieldName = names.fromString(key);
BType readOnlyFieldType;
if (field.isKeyValueField()) {
readOnlyFieldType = ((BLangRecordKeyValueField) field).valueExpr.getBType();
} else {
readOnlyFieldType = ((BLangRecordVarNameField) field).getBType();
}
BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{
add(Flag.REQUIRED);
add(Flag.READONLY);
}}), fieldName, pkgID, readOnlyFieldType, recordSymbol,
((BLangNode) field).pos, VIRTUAL);
newFields.put(key, new BField(fieldName, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordType = new BRecordType(recordSymbol, recordSymbol.flags);
if (applicableMappingType.tag == TypeTags.MAP) {
recordType.sealed = false;
recordType.restFieldType = ((BMapType) applicableMappingType).constraint;
} else {
BRecordType applicableRecordType = (BRecordType) applicableMappingType;
boolean allReadOnlyFields = true;
for (Map.Entry<String, BField> origEntry : applicableRecordType.fields.entrySet()) {
String fieldName = origEntry.getKey();
BField field = origEntry.getValue();
if (readOnlyFields.containsKey(fieldName)) {
continue;
}
BVarSymbol origFieldSymbol = field.symbol;
long origFieldFlags = origFieldSymbol.flags;
if (allReadOnlyFields && !Symbols.isFlagOn(origFieldFlags, Flags.READONLY)) {
allReadOnlyFields = false;
}
BVarSymbol fieldSymbol = new BVarSymbol(origFieldFlags, field.name, pkgID,
origFieldSymbol.type, recordSymbol, field.pos, VIRTUAL);
newFields.put(fieldName, new BField(field.name, null, fieldSymbol));
recordSymbol.scope.define(field.name, fieldSymbol);
}
recordType.sealed = applicableRecordType.sealed;
recordType.restFieldType = applicableRecordType.restFieldType;
if (recordType.sealed && allReadOnlyFields) {
recordType.flags |= Flags.READONLY;
recordType.tsymbol.flags |= Flags.READONLY;
}
}
recordType.fields = newFields;
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
recordLiteral.pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env);
if (applicableMappingType.tag == TypeTags.MAP) {
recordLiteral.expectedType = applicableMappingType;
}
return recordType;
}
private BType checkMappingConstructorCompatibility(BType bType, BLangRecordLiteral mappingConstructor) {
int tag = bType.tag;
if (tag == TypeTags.UNION) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
List<BType> compatibleTypes = new ArrayList<>();
boolean erroredExpType = false;
for (BType memberType : ((BUnionType) bType).getMemberTypes()) {
if (memberType == symTable.semanticError) {
if (!erroredExpType) {
erroredExpType = true;
}
continue;
}
BType listCompatibleMemType = getMappingConstructorCompatibleNonUnionType(memberType);
if (listCompatibleMemType == symTable.semanticError) {
continue;
}
dlog.resetErrorCount();
BType memCompatibiltyType = checkMappingConstructorCompatibility(listCompatibleMemType,
mappingConstructor);
if (memCompatibiltyType != symTable.semanticError && dlog.errorCount() == 0 &&
isUniqueType(compatibleTypes, memCompatibiltyType)) {
compatibleTypes.add(memCompatibiltyType);
}
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (compatibleTypes.isEmpty()) {
if (!erroredExpType) {
reportIncompatibleMappingConstructorError(mappingConstructor, bType);
}
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
} else if (compatibleTypes.size() != 1) {
dlog.error(mappingConstructor.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, bType);
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
}
return checkMappingConstructorCompatibility(compatibleTypes.get(0), mappingConstructor);
}
if (tag == TypeTags.INTERSECTION) {
return checkMappingConstructorCompatibility(((BIntersectionType) bType).effectiveType, mappingConstructor);
}
BType possibleType = getMappingConstructorCompatibleNonUnionType(bType);
switch (possibleType.tag) {
case TypeTags.MAP:
return validateSpecifiedFields(mappingConstructor, possibleType) ? possibleType :
symTable.semanticError;
case TypeTags.RECORD:
boolean isSpecifiedFieldsValid = validateSpecifiedFields(mappingConstructor, possibleType);
boolean hasAllRequiredFields = validateRequiredFields((BRecordType) possibleType,
mappingConstructor.fields,
mappingConstructor.pos);
return isSpecifiedFieldsValid && hasAllRequiredFields ? possibleType : symTable.semanticError;
case TypeTags.READONLY:
return checkReadOnlyMappingType(mappingConstructor);
}
reportIncompatibleMappingConstructorError(mappingConstructor, bType);
validateSpecifiedFields(mappingConstructor, symTable.semanticError);
return symTable.semanticError;
}
private BType checkReadOnlyMappingType(BLangRecordLiteral mappingConstructor) {
if (!this.nonErrorLoggingCheck) {
BType inferredType = defineInferredRecordType(mappingConstructor, symTable.readonlyType);
if (inferredType == symTable.semanticError) {
return symTable.semanticError;
}
return checkMappingConstructorCompatibility(inferredType, mappingConstructor);
}
for (RecordLiteralNode.RecordField field : mappingConstructor.fields) {
BLangExpression exprToCheck;
if (field.isKeyValueField()) {
exprToCheck = ((BLangRecordKeyValueField) field).valueExpr;
} else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
exprToCheck = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
} else {
exprToCheck = (BLangRecordVarNameField) field;
}
if (exprIncompatible(symTable.readonlyType, exprToCheck)) {
return symTable.semanticError;
}
}
return symTable.readonlyType;
}
private BType getMappingConstructorCompatibleNonUnionType(BType type) {
switch (type.tag) {
case TypeTags.MAP:
case TypeTags.RECORD:
case TypeTags.READONLY:
return type;
case TypeTags.JSON:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapJsonType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapJsonType, env,
symTable, anonymousModelHelper, names);
case TypeTags.ANYDATA:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapAnydataType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapAnydataType,
env, symTable, anonymousModelHelper, names);
case TypeTags.ANY:
return !Symbols.isFlagOn(type.flags, Flags.READONLY) ? symTable.mapType :
ImmutableTypeCloner.getEffectiveImmutableType(null, types, symTable.mapType, env,
symTable, anonymousModelHelper, names);
case TypeTags.INTERSECTION:
return ((BIntersectionType) type).effectiveType;
}
return symTable.semanticError;
}
private boolean isMappingConstructorCompatibleType(BType type) {
return type.tag == TypeTags.RECORD || type.tag == TypeTags.MAP;
}
private void reportIncompatibleMappingConstructorError(BLangRecordLiteral mappingConstructorExpr, BType expType) {
if (expType == symTable.semanticError) {
return;
}
if (expType.tag != TypeTags.UNION) {
dlog.error(mappingConstructorExpr.pos,
DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, expType);
return;
}
BUnionType unionType = (BUnionType) expType;
BType[] memberTypes = unionType.getMemberTypes().toArray(new BType[0]);
if (memberTypes.length == 2) {
BRecordType recType = null;
if (memberTypes[0].tag == TypeTags.RECORD && memberTypes[1].tag == TypeTags.NIL) {
recType = (BRecordType) memberTypes[0];
} else if (memberTypes[1].tag == TypeTags.RECORD && memberTypes[0].tag == TypeTags.NIL) {
recType = (BRecordType) memberTypes[1];
}
if (recType != null) {
validateSpecifiedFields(mappingConstructorExpr, recType);
validateRequiredFields(recType, mappingConstructorExpr.fields, mappingConstructorExpr.pos);
return;
}
}
for (BType bType : memberTypes) {
if (isMappingConstructorCompatibleType(bType)) {
dlog.error(mappingConstructorExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_MAPPING_CONSTRUCTOR,
unionType);
return;
}
}
dlog.error(mappingConstructorExpr.pos,
DiagnosticErrorCode.MAPPING_CONSTRUCTOR_COMPATIBLE_TYPE_NOT_FOUND, unionType);
}
private boolean validateSpecifiedFields(BLangRecordLiteral mappingConstructor, BType possibleType) {
boolean isFieldsValid = true;
for (RecordLiteralNode.RecordField field : mappingConstructor.fields) {
BType checkedType = checkMappingField(field, possibleType);
if (isFieldsValid && checkedType == symTable.semanticError) {
isFieldsValid = false;
}
}
return isFieldsValid;
}
private boolean validateRequiredFields(BRecordType type, List<RecordLiteralNode.RecordField> specifiedFields,
Location pos) {
HashSet<String> specFieldNames = getFieldNames(specifiedFields);
boolean hasAllRequiredFields = true;
for (BField field : type.fields.values()) {
String fieldName = field.name.value;
if (!specFieldNames.contains(fieldName) && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)
&& !types.isNeverTypeOrStructureTypeWithARequiredNeverMember(field.type)) {
dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
if (hasAllRequiredFields) {
hasAllRequiredFields = false;
}
}
}
return hasAllRequiredFields;
}
private HashSet<String> getFieldNames(List<RecordLiteralNode.RecordField> specifiedFields) {
HashSet<String> fieldNames = new HashSet<>();
for (RecordLiteralNode.RecordField specifiedField : specifiedFields) {
if (specifiedField.isKeyValueField()) {
String name = getKeyValueFieldName((BLangRecordKeyValueField) specifiedField);
if (name == null) {
continue;
}
fieldNames.add(name);
} else if (specifiedField.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
fieldNames.add(getVarNameFieldName((BLangRecordVarNameField) specifiedField));
} else {
fieldNames.addAll(getSpreadOpFieldRequiredFieldNames(
(BLangRecordLiteral.BLangRecordSpreadOperatorField) specifiedField));
}
}
return fieldNames;
}
private String getKeyValueFieldName(BLangRecordKeyValueField field) {
BLangRecordKey key = field.key;
if (key.computedKey) {
return null;
}
BLangExpression keyExpr = key.expr;
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) keyExpr).variableName.value;
} else if (keyExpr.getKind() == NodeKind.LITERAL) {
return (String) ((BLangLiteral) keyExpr).value;
}
return null;
}
private String getVarNameFieldName(BLangRecordVarNameField field) {
return field.variableName.value;
}
private List<String> getSpreadOpFieldRequiredFieldNames(BLangRecordLiteral.BLangRecordSpreadOperatorField field) {
BType spreadType = checkExpr(field.expr, env);
if (spreadType.tag != TypeTags.RECORD) {
return Collections.emptyList();
}
List<String> fieldNames = new ArrayList<>();
for (BField bField : ((BRecordType) spreadType).getFields().values()) {
if (!Symbols.isOptional(bField.symbol)) {
fieldNames.add(bField.name.value);
}
}
return fieldNames;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
if (workerFlushExpr.workerIdentifier != null) {
String workerName = workerFlushExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(workerFlushExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
} else {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromString(workerName));
if (symbol != symTable.notFoundSymbol) {
workerFlushExpr.workerSymbol = symbol;
}
}
}
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(workerFlushExpr, actualType, expType);
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(syncSendExpr.workerIdentifier));
if (symTable.notFoundSymbol.equals(symbol)) {
syncSendExpr.workerType = symTable.semanticError;
} else {
syncSendExpr.workerType = symbol.type;
syncSendExpr.workerSymbol = symbol;
}
syncSendExpr.env = this.env;
checkExpr(syncSendExpr.expr, this.env);
if (!types.isAssignable(syncSendExpr.expr.getBType(), symTable.cloneableType)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_SEND,
syncSendExpr.expr.getBType());
}
String workerName = syncSendExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
}
syncSendExpr.expectedType = expType;
resultType = expType == symTable.noType ? symTable.nilType : expType;
}
@Override
public void visit(BLangWorkerReceive workerReceiveExpr) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(workerReceiveExpr.workerIdentifier));
workerReceiveExpr.env = this.env;
if (symTable.notFoundSymbol.equals(symbol)) {
workerReceiveExpr.workerType = symTable.semanticError;
} else {
workerReceiveExpr.workerType = symbol.type;
workerReceiveExpr.workerSymbol = symbol;
}
if (symTable.noType == this.expType) {
this.dlog.error(workerReceiveExpr.pos, DiagnosticErrorCode.INVALID_USAGE_OF_RECEIVE_EXPRESSION);
}
workerReceiveExpr.setBType(this.expType);
resultType = this.expType;
}
private boolean workerExists(SymbolEnv env, String workerName) {
if (workerName.equals(DEFAULT_WORKER_NAME)) {
return true;
}
BSymbol symbol = this.symResolver.lookupSymbolInMainSpace(env, new Name(workerName));
return symbol != this.symTable.notFoundSymbol &&
symbol.type.tag == TypeTags.FUTURE &&
((BFutureType) symbol.type).workerDerivative;
}
@Override
public void visit(BLangConstRef constRef) {
constRef.symbol = symResolver.lookupMainSpaceSymbolInPackage(constRef.pos, env,
names.fromIdNode(constRef.pkgAlias), names.fromIdNode(constRef.variableName));
types.setImplicitCastExpr(constRef, constRef.getBType(), expType);
resultType = constRef.getBType();
}
public void visit(BLangSimpleVarRef varRefExpr) {
BType actualType = symTable.semanticError;
Name varName = names.fromIdNode(varRefExpr.variableName);
if (varName == Names.IGNORE) {
if (varRefExpr.isLValue) {
varRefExpr.setBType(this.symTable.anyType);
} else {
varRefExpr.setBType(this.symTable.semanticError);
dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDERSCORE_NOT_ALLOWED);
}
varRefExpr.symbol = new BVarSymbol(0, true, varName, env.enclPkg.symbol.pkgID, varRefExpr.getBType(),
env.scope.owner, varRefExpr.pos, VIRTUAL);
resultType = varRefExpr.getBType();
return;
}
Name compUnitName = getCurrentCompUnit(varRefExpr);
varRefExpr.pkgSymbol =
symResolver.resolvePrefixSymbol(env, names.fromIdNode(varRefExpr.pkgAlias), compUnitName);
if (varRefExpr.pkgSymbol == symTable.notFoundSymbol) {
varRefExpr.symbol = symTable.notFoundSymbol;
dlog.error(varRefExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, varRefExpr.pkgAlias);
}
if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
actualType = symTable.stringType;
} else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(varRefExpr.pos, env,
names.fromIdNode(varRefExpr.pkgAlias), varName);
if (symbol == symTable.notFoundSymbol && env.enclType != null) {
Name objFuncName = names.fromString(Symbols
.getAttachedFuncSymbolName(env.enclType.getBType().tsymbol.name.value, varName.value));
symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName,
env.enclType.getBType().tsymbol);
}
if (((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE)) {
BVarSymbol varSym = (BVarSymbol) symbol;
checkSelfReferences(varRefExpr.pos, env, varSym);
varRefExpr.symbol = varSym;
actualType = varSym.type;
markAndRegisterClosureVariable(symbol, varRefExpr.pos, env);
} else if ((symbol.tag & SymTag.TYPE_DEF) == SymTag.TYPE_DEF) {
actualType = symbol.type.tag == TypeTags.TYPEDESC ? symbol.type : new BTypedescType(symbol.type, null);
varRefExpr.symbol = symbol;
} else if ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) symbol;
varRefExpr.symbol = constSymbol;
BType symbolType = symbol.type;
if (symbolType != symTable.noType && expType.tag == TypeTags.FINITE ||
(expType.tag == TypeTags.UNION && ((BUnionType) expType).getMemberTypes().stream()
.anyMatch(memType -> memType.tag == TypeTags.FINITE &&
types.isAssignable(symbolType, memType)))) {
actualType = symbolType;
} else {
actualType = constSymbol.literalType;
}
if (varRefExpr.isLValue || varRefExpr.isCompoundAssignmentLValue) {
actualType = symTable.semanticError;
dlog.error(varRefExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_CONSTANT_VALUE);
}
} else {
varRefExpr.symbol = symbol;
logUndefinedSymbolError(varRefExpr.pos, varName.value);
}
}
if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) {
dlog.error(varRefExpr.pos, DiagnosticErrorCode.CLOSED_ARRAY_TYPE_CAN_NOT_INFER_SIZE);
return;
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
@Override
public void visit(BLangRecordVarRef varRefExpr) {
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
String recordName = this.anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.symbol.pkgID);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, names.fromString(recordName),
env.enclPkg.symbol.pkgID, null, env.scope.owner,
varRefExpr.pos, SOURCE);
symbolEnter.defineSymbol(varRefExpr.pos, recordSymbol, env);
boolean unresolvedReference = false;
for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) {
BLangVariableReference bLangVarReference = (BLangVariableReference) recordRefField.variableReference;
bLangVarReference.isLValue = true;
checkExpr(recordRefField.variableReference, env);
if (bLangVarReference.symbol == null || bLangVarReference.symbol == symTable.notFoundSymbol ||
!isValidVariableReference(recordRefField.variableReference)) {
unresolvedReference = true;
continue;
}
BVarSymbol bVarSymbol = (BVarSymbol) bLangVarReference.symbol;
BField field = new BField(names.fromIdNode(recordRefField.variableName), varRefExpr.pos,
new BVarSymbol(0, names.fromIdNode(recordRefField.variableName),
env.enclPkg.symbol.pkgID, bVarSymbol.type, recordSymbol,
varRefExpr.pos, SOURCE));
fields.put(field.name.value, field);
}
BLangExpression restParam = (BLangExpression) varRefExpr.restParam;
if (restParam != null) {
checkExpr(restParam, env);
unresolvedReference = !isValidVariableReference(restParam);
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BRecordType bRecordType = new BRecordType(recordSymbol);
bRecordType.fields = fields;
recordSymbol.type = bRecordType;
varRefExpr.symbol = new BVarSymbol(0, recordSymbol.name,
env.enclPkg.symbol.pkgID, bRecordType, env.scope.owner, varRefExpr.pos,
SOURCE);
if (restParam == null) {
bRecordType.sealed = true;
bRecordType.restFieldType = symTable.noType;
} else if (restParam.getBType() == symTable.semanticError) {
bRecordType.restFieldType = symTable.mapType;
} else {
BType restFieldType;
if (restParam.getBType().tag == TypeTags.RECORD) {
restFieldType = ((BRecordType) restParam.getBType()).restFieldType;
} else if (restParam.getBType().tag == TypeTags.MAP) {
restFieldType = ((BMapType) restParam.getBType()).constraint;
} else {
restFieldType = restParam.getBType();
}
bRecordType.restFieldType = restFieldType;
}
resultType = bRecordType;
}
@Override
public void visit(BLangErrorVarRef varRefExpr) {
if (varRefExpr.typeNode != null) {
BType bType = symResolver.resolveTypeNode(varRefExpr.typeNode, env);
varRefExpr.setBType(bType);
checkIndirectErrorVarRef(varRefExpr);
resultType = bType;
return;
}
if (varRefExpr.message != null) {
varRefExpr.message.isLValue = true;
checkExpr(varRefExpr.message, env);
if (!types.isAssignable(symTable.stringType, varRefExpr.message.getBType())) {
dlog.error(varRefExpr.message.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType,
varRefExpr.message.getBType());
}
}
if (varRefExpr.cause != null) {
varRefExpr.cause.isLValue = true;
checkExpr(varRefExpr.cause, env);
if (!types.isAssignable(symTable.errorOrNilType, varRefExpr.cause.getBType())) {
dlog.error(varRefExpr.cause.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.errorOrNilType,
varRefExpr.cause.getBType());
}
}
boolean unresolvedReference = false;
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
BLangVariableReference refItem = (BLangVariableReference) detailItem.expr;
refItem.isLValue = true;
checkExpr(refItem, env);
if (!isValidVariableReference(refItem)) {
unresolvedReference = true;
continue;
}
if (refItem.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR
|| refItem.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
dlog.error(refItem.pos, DiagnosticErrorCode.INVALID_VARIABLE_REFERENCE_IN_BINDING_PATTERN,
refItem);
unresolvedReference = true;
continue;
}
if (refItem.symbol == null) {
unresolvedReference = true;
}
}
if (varRefExpr.restVar != null) {
varRefExpr.restVar.isLValue = true;
if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
checkExpr(varRefExpr.restVar, env);
unresolvedReference = unresolvedReference
|| varRefExpr.restVar.symbol == null
|| !isValidVariableReference(varRefExpr.restVar);
}
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BType errorRefRestFieldType;
if (varRefExpr.restVar == null) {
errorRefRestFieldType = symTable.anydataOrReadonly;
} else if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) varRefExpr.restVar).variableName.value.equals(Names.IGNORE.value)) {
errorRefRestFieldType = symTable.anydataOrReadonly;
} else if (varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
|| varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
errorRefRestFieldType = varRefExpr.restVar.getBType();
} else if (varRefExpr.restVar.getBType().tag == TypeTags.MAP) {
errorRefRestFieldType = ((BMapType) varRefExpr.restVar.getBType()).constraint;
} else {
dlog.error(varRefExpr.restVar.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
varRefExpr.restVar.getBType(), symTable.detailType);
resultType = symTable.semanticError;
return;
}
BType errorDetailType = errorRefRestFieldType == symTable.anydataOrReadonly
? symTable.errorType.detailType
: new BMapType(TypeTags.MAP, errorRefRestFieldType, null, Flags.PUBLIC);
resultType = new BErrorType(symTable.errorType.tsymbol, errorDetailType);
}
private void checkIndirectErrorVarRef(BLangErrorVarRef varRefExpr) {
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
checkExpr(detailItem.expr, env);
checkExpr(detailItem, env, detailItem.expr.getBType());
}
if (varRefExpr.restVar != null) {
checkExpr(varRefExpr.restVar, env);
}
if (varRefExpr.message != null) {
varRefExpr.message.isLValue = true;
checkExpr(varRefExpr.message, env);
}
if (varRefExpr.cause != null) {
varRefExpr.cause.isLValue = true;
checkExpr(varRefExpr.cause, env);
}
}
@Override
public void visit(BLangTupleVarRef varRefExpr) {
List<BType> results = new ArrayList<>();
for (int i = 0; i < varRefExpr.expressions.size(); i++) {
((BLangVariableReference) varRefExpr.expressions.get(i)).isLValue = true;
results.add(checkExpr(varRefExpr.expressions.get(i), env, symTable.noType));
}
BTupleType actualType = new BTupleType(results);
if (varRefExpr.restParam != null) {
BLangExpression restExpr = (BLangExpression) varRefExpr.restParam;
((BLangVariableReference) restExpr).isLValue = true;
BType checkedType = checkExpr(restExpr, env, symTable.noType);
if (!(checkedType.tag == TypeTags.ARRAY || checkedType.tag == TypeTags.TUPLE)) {
dlog.error(varRefExpr.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_REST_DESCRIPTOR, checkedType);
resultType = symTable.semanticError;
return;
}
if (checkedType.tag == TypeTags.ARRAY) {
actualType.restType = ((BArrayType) checkedType).eType;
} else {
actualType.restType = checkedType;
}
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
/**
* This method will recursively check if a multidimensional array has at least one open sealed dimension.
*
* @param arrayType array to check if open sealed
* @return true if at least one dimension is open sealed
*/
public boolean isArrayOpenSealedType(BArrayType arrayType) {
if (arrayType.state == BArrayState.INFERRED) {
return true;
}
if (arrayType.eType.tag == TypeTags.ARRAY) {
return isArrayOpenSealedType((BArrayType) arrayType.eType);
}
return false;
}
/**
* This method will recursively traverse and find the symbol environment of a lambda node (which is given as the
* enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the
* enclosing invokable node's environment, which are outside of the scope of a lambda function.
*/
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) {
if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {
return env.enclEnv;
}
if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) {
return env.enclEnv;
}
if (env.enclInvokable != null && env.enclInvokable == encInvokable) {
return findEnclosingInvokableEnv(env.enclEnv, encInvokable);
}
return env;
}
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangRecordTypeNode recordTypeNode) {
if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {
return env.enclEnv;
}
if (env.enclEnv.node != null && (env.enclEnv.node.getKind() == NodeKind.ON_FAIL)) {
return env.enclEnv;
}
if (env.enclType != null && env.enclType == recordTypeNode) {
return findEnclosingInvokableEnv(env.enclEnv, recordTypeNode);
}
return env;
}
private boolean isFunctionArgument(BSymbol symbol, List<BLangSimpleVariable> params) {
return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) &&
param.getBType().tag == symbol.type.tag));
}
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
markLeafNode(fieldAccessExpr);
BLangExpression containerExpression = fieldAccessExpr.expr;
if (containerExpression instanceof BLangValueExpression) {
((BLangValueExpression) containerExpression).isLValue = fieldAccessExpr.isLValue;
((BLangValueExpression) containerExpression).isCompoundAssignmentLValue =
fieldAccessExpr.isCompoundAssignmentLValue;
}
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(checkExpr(containerExpression, env));
if (fieldAccessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess
&& !isXmlAccess(fieldAccessExpr)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.INVALID_FIELD_ACCESS_EXPRESSION);
resultType = symTable.semanticError;
return;
}
BType actualType;
if (fieldAccessExpr.optionalFieldAccess) {
if (fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPTIONAL_FIELD_ACCESS_NOT_REQUIRED_ON_LHS);
resultType = symTable.semanticError;
return;
}
actualType = checkOptionalFieldAccessExpr(fieldAccessExpr, varRefType,
names.fromIdNode(fieldAccessExpr.field));
} else {
actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field));
if (actualType != symTable.semanticError &&
(fieldAccessExpr.isLValue || fieldAccessExpr.isCompoundAssignmentLValue)) {
if (isAllReadonlyTypes(varRefType)) {
if (varRefType.tag != TypeTags.OBJECT || !isInitializationInInit(varRefType)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE,
varRefType);
resultType = symTable.semanticError;
return;
}
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD) &&
isInvalidReadonlyFieldUpdate(varRefType, fieldAccessExpr.field.value)) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD,
fieldAccessExpr.field.value, varRefType);
resultType = symTable.semanticError;
return;
}
}
}
resultType = types.checkType(fieldAccessExpr, actualType, this.expType);
}
private boolean isAllReadonlyTypes(BType type) {
if (type.tag != TypeTags.UNION) {
return Symbols.isFlagOn(type.flags, Flags.READONLY);
}
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isAllReadonlyTypes(memberType)) {
return false;
}
}
return true;
}
private boolean isInitializationInInit(BType type) {
BObjectType objectType = (BObjectType) type;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) objectType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
return env.enclInvokable != null && initializerFunc != null &&
env.enclInvokable.symbol == initializerFunc.symbol;
}
private boolean isInvalidReadonlyFieldUpdate(BType type, String fieldName) {
if (type.tag == TypeTags.RECORD) {
if (Symbols.isFlagOn(type.flags, Flags.READONLY)) {
return true;
}
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
if (!field.name.value.equals(fieldName)) {
continue;
}
return Symbols.isFlagOn(field.symbol.flags, Flags.READONLY);
}
return recordType.sealed;
}
boolean allInvalidUpdates = true;
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isInvalidReadonlyFieldUpdate(memberType, fieldName)) {
allInvalidUpdates = false;
}
}
return allInvalidUpdates;
}
private boolean isXmlAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangExpression expr = fieldAccessExpr.expr;
BType exprType = expr.getBType();
if (exprType.tag == TypeTags.XML || exprType.tag == TypeTags.XML_ELEMENT) {
return true;
}
if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR && hasLaxOriginalType((BLangFieldBasedAccess) expr)
&& exprType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) exprType).getMemberTypes();
return memberTypes.contains(symTable.xmlType) || memberTypes.contains(symTable.xmlElementType);
}
return false;
}
public void visit(BLangIndexBasedAccess indexBasedAccessExpr) {
markLeafNode(indexBasedAccessExpr);
BLangExpression containerExpression = indexBasedAccessExpr.expr;
if (containerExpression.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
((BLangTypedescExpr) containerExpression).typeNode);
resultType = symTable.semanticError;
return;
}
if (containerExpression instanceof BLangValueExpression) {
((BLangValueExpression) containerExpression).isLValue = indexBasedAccessExpr.isLValue;
((BLangValueExpression) containerExpression).isCompoundAssignmentLValue =
indexBasedAccessExpr.isCompoundAssignmentLValue;
}
boolean isStringValue = containerExpression.getBType() != null
&& containerExpression.getBType().tag == TypeTags.STRING;
if (!isStringValue) {
checkExpr(containerExpression, this.env, symTable.noType);
}
if (indexBasedAccessExpr.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY &&
containerExpression.getBType().tag != TypeTags.TABLE) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.MULTI_KEY_MEMBER_ACCESS_NOT_SUPPORTED,
containerExpression.getBType());
resultType = symTable.semanticError;
return;
}
BType actualType = checkIndexAccessExpr(indexBasedAccessExpr);
BType exprType = containerExpression.getBType();
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
if (actualType != symTable.semanticError &&
(indexBasedAccessExpr.isLValue || indexBasedAccessExpr.isCompoundAssignmentLValue)) {
if (isAllReadonlyTypes(exprType)) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE,
exprType);
resultType = symTable.semanticError;
return;
} else if (types.isSubTypeOfBaseType(exprType, TypeTags.RECORD) &&
(indexExpr.getKind() == NodeKind.LITERAL || isConst(indexExpr)) &&
isInvalidReadonlyFieldUpdate(exprType, getConstFieldName(indexExpr))) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_RECORD_FIELD,
getConstFieldName(indexExpr), exprType);
resultType = symTable.semanticError;
return;
}
}
if (indexBasedAccessExpr.isLValue) {
indexBasedAccessExpr.originalType = actualType;
indexBasedAccessExpr.setBType(actualType);
resultType = actualType;
return;
}
this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType);
}
public void visit(BLangInvocation iExpr) {
if (iExpr.expr == null) {
checkFunctionInvocationExpr(iExpr);
return;
}
if (invalidModuleAliasUsage(iExpr)) {
return;
}
checkExpr(iExpr.expr, this.env, symTable.noType);
BType varRefType = iExpr.expr.getBType();
switch (varRefType.tag) {
case TypeTags.OBJECT:
checkObjectFunctionInvocationExpr(iExpr, (BObjectType) varRefType);
break;
case TypeTags.RECORD:
checkFieldFunctionPointer(iExpr, this.env);
break;
case TypeTags.NONE:
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, iExpr.name);
break;
case TypeTags.SEMANTIC_ERROR:
break;
default:
checkInLangLib(iExpr, varRefType);
}
}
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
BLangUserDefinedType userProvidedTypeRef = errorConstructorExpr.errorTypeRef;
if (userProvidedTypeRef != null) {
symResolver.resolveTypeNode(userProvidedTypeRef, env, DiagnosticErrorCode.UNDEFINED_ERROR_TYPE_DESCRIPTOR);
}
validateErrorConstructorPositionalArgs(errorConstructorExpr);
List<BType> expandedCandidates = getTypeCandidatesForErrorConstructor(errorConstructorExpr);
List<BType> errorDetailTypes = new ArrayList<>();
for (BType expandedCandidate : expandedCandidates) {
BType detailType = ((BErrorType) expandedCandidate).detailType;
errorDetailTypes.add(detailType);
}
BType detailCandidate;
if (errorDetailTypes.size() == 1) {
detailCandidate = errorDetailTypes.get(0);
} else {
detailCandidate = BUnionType.create(null, new LinkedHashSet<>(errorDetailTypes));
}
BLangRecordLiteral recordLiteral = createRecordLiteralForErrorConstructor(errorConstructorExpr);
BType inferredDetailType = checkExprSilent(recordLiteral, detailCandidate, env);
int index = errorDetailTypes.indexOf(inferredDetailType);
BType selectedCandidate = index < 0 ? symTable.semanticError : expandedCandidates.get(index);
if (selectedCandidate != symTable.semanticError
&& (userProvidedTypeRef == null || userProvidedTypeRef.getBType() == selectedCandidate)) {
checkProvidedErrorDetails(errorConstructorExpr, inferredDetailType);
resultType = types.checkType(errorConstructorExpr.pos, selectedCandidate, expType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
return;
}
if (userProvidedTypeRef == null && errorDetailTypes.size() > 1) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.CANNOT_INFER_ERROR_TYPE, expType);
}
BErrorType errorType;
if (userProvidedTypeRef != null && userProvidedTypeRef.getBType().tag == TypeTags.ERROR) {
errorType = (BErrorType) userProvidedTypeRef.getBType();
} else if (expandedCandidates.size() == 1) {
errorType = (BErrorType) expandedCandidates.get(0);
} else {
errorType = symTable.errorType;
}
List<BLangNamedArgsExpression> namedArgs =
checkProvidedErrorDetails(errorConstructorExpr, errorType.detailType);
BType detailType = errorType.detailType;
if (detailType.tag == TypeTags.MAP) {
BType errorDetailTypeConstraint = ((BMapType) detailType).constraint;
for (BLangNamedArgsExpression namedArgExpr: namedArgs) {
if (!types.isAssignable(namedArgExpr.expr.getBType(), errorDetailTypeConstraint)) {
dlog.error(namedArgExpr.pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE,
namedArgExpr.name, errorDetailTypeConstraint, namedArgExpr.expr.getBType());
}
}
} else if (detailType.tag == TypeTags.RECORD) {
BRecordType targetErrorDetailRec = (BRecordType) errorType.detailType;
LinkedList<String> missingRequiredFields = targetErrorDetailRec.fields.values().stream()
.filter(f -> (f.symbol.flags & Flags.REQUIRED) == Flags.REQUIRED)
.map(f -> f.name.value)
.collect(Collectors.toCollection(LinkedList::new));
LinkedHashMap<String, BField> targetFields = targetErrorDetailRec.fields;
for (BLangNamedArgsExpression namedArg : namedArgs) {
BField field = targetFields.get(namedArg.name.value);
Location pos = namedArg.pos;
if (field == null) {
if (targetErrorDetailRec.sealed) {
dlog.error(pos, DiagnosticErrorCode.UNKNOWN_DETAIL_ARG_TO_CLOSED_ERROR_DETAIL_REC,
namedArg.name, targetErrorDetailRec);
} else if (targetFields.isEmpty()
&& !types.isAssignable(namedArg.expr.getBType(), targetErrorDetailRec.restFieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_REST_ARG_TYPE,
namedArg.name, targetErrorDetailRec);
}
} else {
missingRequiredFields.remove(namedArg.name.value);
if (!types.isAssignable(namedArg.expr.getBType(), field.type)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ERROR_DETAIL_ARG_TYPE,
namedArg.name, field.type, namedArg.expr.getBType());
}
}
}
for (String requiredField : missingRequiredFields) {
dlog.error(errorConstructorExpr.pos, DiagnosticErrorCode.MISSING_ERROR_DETAIL_ARG, requiredField);
}
}
if (userProvidedTypeRef != null) {
errorConstructorExpr.setBType(userProvidedTypeRef.getBType());
} else {
errorConstructorExpr.setBType(errorType);
}
resultType = errorConstructorExpr.getBType();
}
private void validateErrorConstructorPositionalArgs(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.isEmpty()) {
return;
}
checkExpr(errorConstructorExpr.positionalArgs.get(0), this.env, symTable.stringType);
int positionalArgCount = errorConstructorExpr.positionalArgs.size();
if (positionalArgCount > 1) {
checkExpr(errorConstructorExpr.positionalArgs.get(1), this.env, symTable.errorOrNilType);
}
}
private BType checkExprSilent(BLangRecordLiteral recordLiteral, BType expType, SymbolEnv env) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int errorCount = this.dlog.errorCount();
this.dlog.mute();
BType type = checkExpr(recordLiteral, env, expType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
return type;
}
private BLangRecordLiteral createRecordLiteralForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
for (NamedArgNode namedArg : errorConstructorExpr.getNamedArgs()) {
BLangRecordKeyValueField field =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
field.valueExpr = (BLangExpression) namedArg.getExpression();
BLangLiteral expr = new BLangLiteral();
expr.value = namedArg.getName().value;
expr.setBType(symTable.stringType);
field.key = new BLangRecordKey(expr);
recordLiteral.fields.add(field);
}
return recordLiteral;
}
private List<BType> getTypeCandidatesForErrorConstructor(BLangErrorConstructorExpr errorConstructorExpr) {
BLangUserDefinedType errorTypeRef = errorConstructorExpr.errorTypeRef;
if (errorTypeRef == null) {
if (expType.tag == TypeTags.ERROR) {
return List.of(expType);
} else if (types.isAssignable(expType, symTable.errorType) || expType.tag == TypeTags.UNION) {
return expandExpectedErrorTypes(expType);
}
} else {
if (errorTypeRef.getBType().tag != TypeTags.ERROR) {
if (errorTypeRef.getBType().tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(errorTypeRef.pos, DiagnosticErrorCode.INVALID_ERROR_TYPE_REFERENCE, errorTypeRef);
}
} else {
return List.of(errorTypeRef.getBType());
}
}
return List.of(symTable.errorType);
}
private List<BType> expandExpectedErrorTypes(BType candidateType) {
List<BType> expandedCandidates = new ArrayList<>();
if (candidateType.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) candidateType).getMemberTypes()) {
if (types.isAssignable(memberType, symTable.errorType)) {
if (memberType.tag == TypeTags.INTERSECTION) {
expandedCandidates.add(((BIntersectionType) memberType).effectiveType);
} else {
expandedCandidates.add(memberType);
}
}
}
} else if (types.isAssignable(candidateType, symTable.errorType)) {
if (candidateType.tag == TypeTags.INTERSECTION) {
expandedCandidates.add(((BIntersectionType) candidateType).effectiveType);
} else {
expandedCandidates.add(candidateType);
}
}
return expandedCandidates;
}
public void visit(BLangInvocation.BLangActionInvocation aInv) {
if (aInv.expr == null) {
checkFunctionInvocationExpr(aInv);
return;
}
if (invalidModuleAliasUsage(aInv)) {
return;
}
checkExpr(aInv.expr, this.env, symTable.noType);
BLangExpression varRef = aInv.expr;
switch (varRef.getBType().tag) {
case TypeTags.OBJECT:
checkActionInvocation(aInv, (BObjectType) varRef.getBType());
break;
case TypeTags.RECORD:
checkFieldFunctionPointer(aInv, this.env);
break;
case TypeTags.NONE:
dlog.error(aInv.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, aInv.name);
resultType = symTable.semanticError;
break;
case TypeTags.SEMANTIC_ERROR:
default:
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, varRef.getBType());
resultType = symTable.semanticError;
break;
}
}
private boolean invalidModuleAliasUsage(BLangInvocation invocation) {
Name pkgAlias = names.fromIdNode(invocation.pkgAlias);
if (pkgAlias != Names.EMPTY) {
dlog.error(invocation.pos, DiagnosticErrorCode.PKG_ALIAS_NOT_ALLOWED_HERE);
return true;
}
return false;
}
public void visit(BLangLetExpression letExpression) {
BLetSymbol letSymbol = new BLetSymbol(SymTag.LET, Flags.asMask(new HashSet<>(Lists.of())),
new Name(String.format("$let_symbol_%d$", letCount++)),
env.enclPkg.symbol.pkgID, letExpression.getBType(), env.scope.owner,
letExpression.pos);
letExpression.env = SymbolEnv.createExprEnv(letExpression, env, letSymbol);
for (BLangLetVariable letVariable : letExpression.letVarDeclarations) {
semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letExpression.env);
}
BType exprType = checkExpr(letExpression.expr, letExpression.env, this.expType);
types.checkType(letExpression, exprType, this.expType);
}
private void checkInLangLib(BLangInvocation iExpr, BType varRefType) {
BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType);
if (langLibMethodSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION_IN_TYPE, iExpr.name.value,
iExpr.expr.getBType());
resultType = symTable.semanticError;
return;
}
if (checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol)) {
return;
}
checkIllegalStorageSizeChangeMethodCall(iExpr, varRefType);
}
private boolean checkInvalidImmutableValueUpdate(BLangInvocation iExpr, BType varRefType,
BSymbol langLibMethodSymbol) {
if (!Symbols.isFlagOn(varRefType.flags, Flags.READONLY)) {
return false;
}
String packageId = langLibMethodSymbol.pkgID.name.value;
if (!modifierFunctions.containsKey(packageId)) {
return false;
}
String funcName = langLibMethodSymbol.name.value;
if (!modifierFunctions.get(packageId).contains(funcName)) {
return false;
}
if (funcName.equals("mergeJson") && varRefType.tag != TypeTags.MAP) {
return false;
}
if (funcName.equals("strip") && TypeTags.isXMLTypeTag(varRefType.tag)) {
return false;
}
dlog.error(iExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_READONLY_VALUE_OF_TYPE, varRefType);
resultType = symTable.semanticError;
return true;
}
private boolean isFixedLengthList(BType type) {
switch(type.tag) {
case TypeTags.ARRAY:
return (((BArrayType) type).state != BArrayState.OPEN);
case TypeTags.TUPLE:
return (((BTupleType) type).restType == null);
case TypeTags.UNION:
BUnionType unionType = (BUnionType) type;
for (BType member : unionType.getMemberTypes()) {
if (!isFixedLengthList(member)) {
return false;
}
}
return true;
default:
return false;
}
}
private void checkIllegalStorageSizeChangeMethodCall(BLangInvocation iExpr, BType varRefType) {
String invocationName = iExpr.name.getValue();
if (!listLengthModifierFunctions.contains(invocationName)) {
return;
}
if (isFixedLengthList(varRefType)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_LIST_SIZE, invocationName,
varRefType);
resultType = symTable.semanticError;
return;
}
if (isShiftOnIncompatibleTuples(varRefType, invocationName)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.ILLEGAL_FUNCTION_CHANGE_TUPLE_SHAPE, invocationName,
varRefType);
resultType = symTable.semanticError;
return;
}
}
private boolean isShiftOnIncompatibleTuples(BType varRefType, String invocationName) {
if ((varRefType.tag == TypeTags.TUPLE) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0) &&
hasDifferentTypeThanRest((BTupleType) varRefType)) {
return true;
}
if ((varRefType.tag == TypeTags.UNION) && (invocationName.compareTo(FUNCTION_NAME_SHIFT) == 0)) {
BUnionType unionVarRef = (BUnionType) varRefType;
boolean allMemberAreFixedShapeTuples = true;
for (BType member : unionVarRef.getMemberTypes()) {
if (member.tag != TypeTags.TUPLE) {
allMemberAreFixedShapeTuples = false;
break;
}
if (!hasDifferentTypeThanRest((BTupleType) member)) {
allMemberAreFixedShapeTuples = false;
break;
}
}
return allMemberAreFixedShapeTuples;
}
return false;
}
private boolean hasDifferentTypeThanRest(BTupleType tupleType) {
if (tupleType.restType == null) {
return false;
}
for (BType member : tupleType.getTupleTypes()) {
if (!types.isSameType(tupleType.restType, member)) {
return true;
}
}
return false;
}
private boolean checkFieldFunctionPointer(BLangInvocation iExpr, SymbolEnv env) {
BType type = checkExpr(iExpr.expr, env);
BLangIdentifier invocationIdentifier = iExpr.name;
if (type == symTable.semanticError) {
return false;
}
BSymbol fieldSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(invocationIdentifier),
type.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
checkIfLangLibMethodExists(iExpr, type, iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_FIELD_IN_RECORD,
invocationIdentifier, type);
return false;
}
if (fieldSymbol.kind != SymbolKind.FUNCTION) {
checkIfLangLibMethodExists(iExpr, type, iExpr.pos, DiagnosticErrorCode.INVALID_METHOD_CALL_EXPR_ON_FIELD,
fieldSymbol.type);
return false;
}
iExpr.symbol = fieldSymbol;
iExpr.setBType(((BInvokableSymbol) fieldSymbol).retType);
checkInvocationParamAndReturnType(iExpr);
iExpr.functionPointerInvocation = true;
return true;
}
private void checkIfLangLibMethodExists(BLangInvocation iExpr, BType varRefType, Location pos,
DiagnosticErrorCode errCode, Object... diagMsgArgs) {
BSymbol langLibMethodSymbol = getLangLibMethod(iExpr, varRefType);
if (langLibMethodSymbol == symTable.notFoundSymbol) {
dlog.error(pos, errCode, diagMsgArgs);
resultType = symTable.semanticError;
} else {
checkInvalidImmutableValueUpdate(iExpr, varRefType, langLibMethodSymbol);
}
}
@Override
public void visit(BLangObjectConstructorExpression objectCtorExpression) {
if (objectCtorExpression.referenceType == null && objectCtorExpression.expectedType != null) {
BObjectType objectType = (BObjectType) objectCtorExpression.classNode.getBType();
if (objectCtorExpression.expectedType.tag == TypeTags.OBJECT) {
BObjectType expObjType = (BObjectType) objectCtorExpression.expectedType;
objectType.typeIdSet = expObjType.typeIdSet;
} else if (objectCtorExpression.expectedType.tag != TypeTags.NONE) {
if (!checkAndLoadTypeIdSet(objectCtorExpression.expectedType, objectType)) {
dlog.error(objectCtorExpression.pos, DiagnosticErrorCode.INVALID_TYPE_OBJECT_CONSTRUCTOR,
objectCtorExpression.expectedType);
resultType = symTable.semanticError;
return;
}
}
}
visit(objectCtorExpression.typeInit);
}
private boolean isDefiniteObjectType(BType type, Set<BTypeIdSet> typeIdSets) {
if (type.tag != TypeTags.OBJECT && type.tag != TypeTags.UNION) {
return false;
}
Set<BType> visitedTypes = new HashSet<>();
if (!collectObjectTypeIds(type, typeIdSets, visitedTypes)) {
return false;
}
return typeIdSets.size() <= 1;
}
private boolean collectObjectTypeIds(BType type, Set<BTypeIdSet> typeIdSets, Set<BType> visitedTypes) {
if (type.tag == TypeTags.OBJECT) {
var objectType = (BObjectType) type;
typeIdSets.add(objectType.typeIdSet);
return true;
}
if (type.tag == TypeTags.UNION) {
if (!visitedTypes.add(type)) {
return true;
}
for (BType member : ((BUnionType) type).getMemberTypes()) {
if (!collectObjectTypeIds(member, typeIdSets, visitedTypes)) {
return false;
}
}
return true;
}
return false;
}
private boolean checkAndLoadTypeIdSet(BType type, BObjectType objectType) {
Set<BTypeIdSet> typeIdSets = new HashSet<>();
if (!isDefiniteObjectType(type, typeIdSets)) {
return false;
}
if (typeIdSets.isEmpty()) {
objectType.typeIdSet = BTypeIdSet.emptySet();
return true;
}
var typeIdIterator = typeIdSets.iterator();
if (typeIdIterator.hasNext()) {
BTypeIdSet typeIdSet = typeIdIterator.next();
objectType.typeIdSet = typeIdSet;
return true;
}
return true;
}
public void visit(BLangTypeInit cIExpr) {
if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null) || expType.tag == TypeTags.RECORD) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_TYPE_NEW_LITERAL, expType);
resultType = symTable.semanticError;
return;
}
BType actualType;
if (cIExpr.userDefinedType != null) {
actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env);
} else {
actualType = expType;
}
if (actualType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
if (actualType.tag == TypeTags.INTERSECTION) {
actualType = ((BIntersectionType) actualType).effectiveType;
}
switch (actualType.tag) {
case TypeTags.OBJECT:
BObjectType actualObjectType = (BObjectType) actualType;
if (isObjectConstructorExpr(cIExpr, actualObjectType)) {
BLangClassDefinition classDefForConstructor = getClassDefinitionForObjectConstructorExpr(cIExpr,
env);
List<BLangType> typeRefs = classDefForConstructor.typeRefs;
SymbolEnv pkgEnv = symTable.pkgEnvMap.get(env.enclPkg.symbol);
if (Symbols.isFlagOn(expType.flags, Flags.READONLY)) {
handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv,
false);
} else if (!typeRefs.isEmpty() && Symbols.isFlagOn(typeRefs.get(0).getBType().flags,
Flags.READONLY)) {
handleObjectConstrExprForReadOnly(cIExpr, actualObjectType, classDefForConstructor, pkgEnv,
true);
} else {
analyzeObjectConstructor(classDefForConstructor, pkgEnv);
}
markConstructedObjectIsolatedness(actualObjectType);
}
if ((actualType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT,
actualType.tsymbol);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return;
}
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) actualType)) {
return;
}
}
break;
case TypeTags.STREAM:
if (cIExpr.initInvocation.argExprs.size() > 1) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR, cIExpr.initInvocation);
resultType = symTable.semanticError;
return;
}
BStreamType actualStreamType = (BStreamType) actualType;
if (actualStreamType.completionType != null) {
BType completionType = actualStreamType.completionType;
if (completionType.tag != symTable.nilType.tag && !types.containsErrorType(completionType)) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED, completionType.toString());
resultType = symTable.semanticError;
return;
}
}
if (!cIExpr.initInvocation.argExprs.isEmpty()) {
BLangExpression iteratorExpr = cIExpr.initInvocation.argExprs.get(0);
BType constructType = checkExpr(iteratorExpr, env, symTable.noType);
BUnionType expectedNextReturnType = createNextReturnType(cIExpr.pos, (BStreamType) actualType);
if (constructType.tag != TypeTags.OBJECT) {
dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
BAttachedFunction closeFunc = types.getAttachedFuncFromObject((BObjectType) constructType,
BLangCompilerConstants.CLOSE_FUNC);
if (closeFunc != null) {
BType closeableIteratorType = symTable.langQueryModuleSymbol.scope
.lookup(Names.ABSTRACT_STREAM_CLOSEABLE_ITERATOR).symbol.type;
if (!types.isAssignable(constructType, closeableIteratorType)) {
dlog.error(iteratorExpr.pos,
DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_CLOSEABLE_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
} else {
BType iteratorType = symTable.langQueryModuleSymbol.scope
.lookup(Names.ABSTRACT_STREAM_ITERATOR).symbol.type;
if (!types.isAssignable(constructType, iteratorType)) {
dlog.error(iteratorExpr.pos, DiagnosticErrorCode.INVALID_STREAM_CONSTRUCTOR_ITERATOR,
expectedNextReturnType, constructType);
resultType = symTable.semanticError;
return;
}
}
BUnionType nextReturnType = types.getVarTypeFromIteratorFuncReturnType(constructType);
if (nextReturnType != null) {
types.checkType(iteratorExpr.pos, nextReturnType, expectedNextReturnType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
} else {
dlog.error(constructType.tsymbol.getPosition(),
DiagnosticErrorCode.INVALID_NEXT_METHOD_RETURN_TYPE, expectedNextReturnType);
}
}
if (this.expType.tag != TypeTags.NONE && !types.isAssignable(actualType, this.expType)) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, this.expType,
actualType);
resultType = symTable.semanticError;
return;
}
resultType = actualType;
return;
case TypeTags.UNION:
List<BType> matchingMembers = findMembersWithMatchingInitFunc(cIExpr, (BUnionType) actualType);
BType matchedType = getMatchingType(matchingMembers, cIExpr, actualType);
cIExpr.initInvocation.setBType(symTable.nilType);
if (matchedType.tag == TypeTags.OBJECT) {
if (((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.setBType(((BInvokableSymbol) cIExpr.initInvocation.symbol).retType);
actualType = matchedType;
break;
} else {
if (!isValidInitInvocation(cIExpr, (BObjectType) matchedType)) {
return;
}
}
}
types.checkType(cIExpr, matchedType, expType);
cIExpr.setBType(matchedType);
resultType = matchedType;
return;
default:
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType);
resultType = symTable.semanticError;
return;
}
if (cIExpr.initInvocation.getBType() == null) {
cIExpr.initInvocation.setBType(symTable.nilType);
}
BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.getBType());
resultType = types.checkType(cIExpr, actualTypeInitType, expType);
}
private BUnionType createNextReturnType(Location pos, BStreamType streamType) {
BRecordType recordType = new BRecordType(null, Flags.ANONYMOUS);
recordType.restFieldType = symTable.noType;
recordType.sealed = true;
Name fieldName = Names.VALUE;
BField field = new BField(fieldName, pos, new BVarSymbol(Flags.PUBLIC,
fieldName, env.enclPkg.packageID,
streamType.constraint, env.scope.owner, pos, VIRTUAL));
field.type = streamType.constraint;
recordType.fields.put(field.name.value, field);
recordType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID,
recordType, env.scope.owner, pos, VIRTUAL);
recordType.tsymbol.scope = new Scope(env.scope.owner);
recordType.tsymbol.scope.define(fieldName, field.symbol);
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(recordType);
retTypeMembers.addAll(types.getAllTypes(streamType.completionType));
retTypeMembers.add(symTable.nilType);
BUnionType unionType = BUnionType.create(null);
unionType.addAll(retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0, Names.EMPTY,
env.enclPkg.symbol.pkgID, unionType, env.scope.owner, pos, VIRTUAL);
return unionType;
}
private boolean isValidInitInvocation(BLangTypeInit cIExpr, BObjectType objType) {
if (!cIExpr.initInvocation.argExprs.isEmpty()
&& ((BObjectTypeSymbol) objType.tsymbol).initializerFunc == null) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL,
cIExpr.initInvocation.name.value);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return false;
}
return true;
}
private BType getObjectConstructorReturnType(BType objType, BType initRetType) {
if (initRetType.tag == TypeTags.UNION) {
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(objType);
retTypeMembers.addAll(((BUnionType) initRetType).getMemberTypes());
retTypeMembers.remove(symTable.nilType);
BUnionType unionType = BUnionType.create(null, retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0,
Names.EMPTY, env.enclPkg.symbol.pkgID, unionType,
env.scope.owner, symTable.builtinPos, VIRTUAL);
return unionType;
} else if (initRetType.tag == TypeTags.NIL) {
return objType;
}
return symTable.semanticError;
}
private List<BType> findMembersWithMatchingInitFunc(BLangTypeInit cIExpr, BUnionType lhsUnionType) {
int objectCount = 0;
for (BType memberType : lhsUnionType.getMemberTypes()) {
int tag = memberType.tag;
if (tag == TypeTags.OBJECT) {
objectCount++;
continue;
}
if (tag != TypeTags.INTERSECTION) {
continue;
}
if (((BIntersectionType) memberType).effectiveType.tag == TypeTags.OBJECT) {
objectCount++;
}
}
boolean containsSingleObject = objectCount == 1;
List<BType> matchingLhsMemberTypes = new ArrayList<>();
for (BType memberType : lhsUnionType.getMemberTypes()) {
if (memberType.tag != TypeTags.OBJECT) {
continue;
}
if ((memberType.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT,
lhsUnionType.tsymbol);
}
if (containsSingleObject) {
return Collections.singletonList(memberType);
}
BAttachedFunction initializerFunc = ((BObjectTypeSymbol) memberType.tsymbol).initializerFunc;
if (isArgsMatchesFunction(cIExpr.argsExpr, initializerFunc)) {
matchingLhsMemberTypes.add(memberType);
}
}
return matchingLhsMemberTypes;
}
private BType getMatchingType(List<BType> matchingLhsMembers, BLangTypeInit cIExpr, BType lhsUnion) {
if (matchingLhsMembers.isEmpty()) {
dlog.error(cIExpr.pos, DiagnosticErrorCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
} else if (matchingLhsMembers.size() == 1) {
return matchingLhsMembers.get(0).tsymbol.type;
} else {
dlog.error(cIExpr.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
}
}
private boolean isArgsMatchesFunction(List<BLangExpression> invocationArguments, BAttachedFunction function) {
invocationArguments.forEach(expr -> checkExpr(expr, env, symTable.noType));
if (function == null) {
return invocationArguments.isEmpty();
}
if (function.symbol.params.isEmpty() && invocationArguments.isEmpty()) {
return true;
}
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
List<BLangExpression> positionalArgs = new ArrayList<>();
for (BLangExpression argument : invocationArguments) {
if (argument.getKind() == NodeKind.NAMED_ARGS_EXPR) {
namedArgs.add((BLangNamedArgsExpression) argument);
} else {
positionalArgs.add(argument);
}
}
List<BVarSymbol> requiredParams = function.symbol.params.stream()
.filter(param -> !param.isDefaultable)
.collect(Collectors.toList());
if (requiredParams.size() > invocationArguments.size()) {
return false;
}
List<BVarSymbol> defaultableParams = function.symbol.params.stream()
.filter(param -> param.isDefaultable)
.collect(Collectors.toList());
int givenRequiredParamCount = 0;
for (int i = 0; i < positionalArgs.size(); i++) {
if (function.symbol.params.size() > i) {
givenRequiredParamCount++;
BVarSymbol functionParam = function.symbol.params.get(i);
if (!types.isAssignable(positionalArgs.get(i).getBType(), functionParam.type)) {
return false;
}
requiredParams.remove(functionParam);
defaultableParams.remove(functionParam);
continue;
}
if (function.symbol.restParam != null) {
BType restParamType = ((BArrayType) function.symbol.restParam.type).eType;
if (!types.isAssignable(positionalArgs.get(i).getBType(), restParamType)) {
return false;
}
continue;
}
return false;
}
for (BLangNamedArgsExpression namedArg : namedArgs) {
boolean foundNamedArg = false;
List<BVarSymbol> params = function.symbol.params;
for (int i = givenRequiredParamCount; i < params.size(); i++) {
BVarSymbol functionParam = params.get(i);
if (!namedArg.name.value.equals(functionParam.name.value)) {
continue;
}
foundNamedArg = true;
BType namedArgExprType = checkExpr(namedArg.expr, env);
if (!types.isAssignable(functionParam.type, namedArgExprType)) {
return false;
}
requiredParams.remove(functionParam);
defaultableParams.remove(functionParam);
}
if (!foundNamedArg) {
return false;
}
}
return requiredParams.size() <= 0;
}
public void visit(BLangWaitForAllExpr waitForAllExpr) {
switch (expType.tag) {
case TypeTags.RECORD:
checkTypesForRecords(waitForAllExpr);
break;
case TypeTags.MAP:
checkTypesForMap(waitForAllExpr, ((BMapType) expType).constraint);
LinkedHashSet<BType> memberTypesForMap = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypesForMap.size() == 1) {
resultType = new BMapType(TypeTags.MAP,
memberTypesForMap.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintTypeForMap = BUnionType.create(null, memberTypesForMap);
resultType = new BMapType(TypeTags.MAP, constraintTypeForMap, symTable.mapType.tsymbol);
break;
case TypeTags.NONE:
case TypeTags.ANY:
checkTypesForMap(waitForAllExpr, expType);
LinkedHashSet<BType> memberTypes = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypes.size() == 1) {
resultType = new BMapType(TypeTags.MAP, memberTypes.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintType = BUnionType.create(null, memberTypes);
resultType = new BMapType(TypeTags.MAP, constraintType, symTable.mapType.tsymbol);
break;
default:
dlog.error(waitForAllExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getWaitForAllExprReturnType(waitForAllExpr, waitForAllExpr.pos));
resultType = symTable.semanticError;
break;
}
waitForAllExpr.setBType(resultType);
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitForAllExpr, waitForAllExpr.getBType(), expType);
}
}
private BRecordType getWaitForAllExprReturnType(BLangWaitForAllExpr waitExpr,
Location pos) {
BRecordType retType = new BRecordType(null, Flags.ANONYMOUS);
List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals = waitExpr.keyValuePairs;
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BLangIdentifier fieldName;
if (keyVal.valueExpr == null || keyVal.valueExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
fieldName = keyVal.key;
} else {
fieldName = ((BLangSimpleVarRef) keyVal.valueExpr).variableName;
}
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode(fieldName));
BType fieldType = symbol.type.tag == TypeTags.FUTURE ? ((BFutureType) symbol.type).constraint : symbol.type;
BField field = new BField(names.fromIdNode(keyVal.key), null,
new BVarSymbol(0, names.fromIdNode(keyVal.key), env.enclPkg.packageID,
fieldType, null, keyVal.pos, VIRTUAL));
retType.fields.put(field.name.value, field);
}
retType.restFieldType = symTable.noType;
retType.sealed = true;
retType.tsymbol = Symbols.createRecordSymbol(Flags.ANONYMOUS, Names.EMPTY, env.enclPkg.packageID, retType, null,
pos, VIRTUAL);
return retType;
}
private LinkedHashSet<BType> collectWaitExprTypes(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals) {
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BType bType = keyVal.keyExpr != null ? keyVal.keyExpr.getBType() : keyVal.valueExpr.getBType();
if (bType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) bType).constraint);
} else {
memberTypes.add(bType);
}
}
return memberTypes;
}
private void checkTypesForMap(BLangWaitForAllExpr waitForAllExpr, BType expType) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValuePairs = waitForAllExpr.keyValuePairs;
keyValuePairs.forEach(keyVal -> checkWaitKeyValExpr(keyVal, expType));
}
private void checkTypesForRecords(BLangWaitForAllExpr waitExpr) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> rhsFields = waitExpr.getKeyValuePairs();
Map<String, BField> lhsFields = ((BRecordType) expType).fields;
if (((BRecordType) expType).sealed && rhsFields.size() > lhsFields.size()) {
dlog.error(waitExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
getWaitForAllExprReturnType(waitExpr, waitExpr.pos));
resultType = symTable.semanticError;
return;
}
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : rhsFields) {
String key = keyVal.key.value;
if (!lhsFields.containsKey(key)) {
if (((BRecordType) expType).sealed) {
dlog.error(waitExpr.pos, DiagnosticErrorCode.INVALID_FIELD_NAME_RECORD_LITERAL, key, expType);
resultType = symTable.semanticError;
} else {
BType restFieldType = ((BRecordType) expType).restFieldType;
checkWaitKeyValExpr(keyVal, restFieldType);
}
} else {
checkWaitKeyValExpr(keyVal, lhsFields.get(key).type);
}
}
checkMissingReqFieldsForWait(((BRecordType) expType), rhsFields, waitExpr.pos);
if (symTable.semanticError != resultType) {
resultType = expType;
}
}
private void checkMissingReqFieldsForWait(BRecordType type, List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValPairs,
Location pos) {
type.fields.values().forEach(field -> {
boolean hasField = keyValPairs.stream().anyMatch(keyVal -> field.name.value.equals(keyVal.key.value));
if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos, DiagnosticErrorCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
}
});
}
private void checkWaitKeyValExpr(BLangWaitForAllExpr.BLangWaitKeyValue keyVal, BType type) {
BLangExpression expr;
if (keyVal.keyExpr != null) {
BSymbol symbol = symResolver.lookupSymbolInMainSpace(env, names.fromIdNode
(((BLangSimpleVarRef) keyVal.keyExpr).variableName));
keyVal.keyExpr.setBType(symbol.type);
expr = keyVal.keyExpr;
} else {
expr = keyVal.valueExpr;
}
BFutureType futureType = new BFutureType(TypeTags.FUTURE, type, null);
checkExpr(expr, env, futureType);
setEventualTypeForExpression(expr, type);
}
private void setEventualTypeForExpression(BLangExpression expression,
BType currentExpectedType) {
if (expression == null) {
return;
}
if (isSimpleWorkerReference(expression)) {
return;
}
BFutureType futureType = (BFutureType) expression.expectedType;
BType currentType = futureType.constraint;
if (types.containsErrorType(currentType)) {
return;
}
BUnionType eventualType = BUnionType.create(null, currentType, symTable.errorType);
if (((currentExpectedType.tag != TypeTags.NONE) && (currentExpectedType.tag != TypeTags.NIL)) &&
!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR,
currentExpectedType, eventualType, expression);
}
futureType.constraint = eventualType;
}
private void setEventualTypeForWaitExpression(BLangExpression expression,
Location pos) {
if ((resultType == symTable.semanticError) ||
(types.containsErrorType(resultType))) {
return;
}
if (isSimpleWorkerReference(expression)) {
return;
}
BType currentExpectedType = ((BFutureType) expType).constraint;
BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType);
if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) {
resultType = eventualType;
return;
}
if (!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType,
eventualType, expression);
resultType = symTable.semanticError;
return;
}
if (resultType.tag == TypeTags.FUTURE) {
((BFutureType) resultType).constraint = eventualType;
} else {
resultType = eventualType;
}
}
private void setEventualTypeForAlternateWaitExpression(BLangExpression expression, Location pos) {
if ((resultType == symTable.semanticError) ||
(expression.getKind() != NodeKind.BINARY_EXPR) ||
(types.containsErrorType(resultType))) {
return;
}
if (types.containsErrorType(resultType)) {
return;
}
if (!isReferencingNonWorker((BLangBinaryExpr) expression)) {
return;
}
BType currentExpectedType = ((BFutureType) expType).constraint;
BUnionType eventualType = BUnionType.create(null, resultType, symTable.errorType);
if ((currentExpectedType.tag == TypeTags.NONE) || (currentExpectedType.tag == TypeTags.NIL)) {
resultType = eventualType;
return;
}
if (!types.isAssignable(eventualType, currentExpectedType)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_WAIT_FUTURE_EXPR, currentExpectedType,
eventualType, expression);
resultType = symTable.semanticError;
return;
}
if (resultType.tag == TypeTags.FUTURE) {
((BFutureType) resultType).constraint = eventualType;
} else {
resultType = eventualType;
}
}
private boolean isSimpleWorkerReference(BLangExpression expression) {
if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return false;
}
BLangSimpleVarRef simpleVarRef = ((BLangSimpleVarRef) expression);
BSymbol varRefSymbol = simpleVarRef.symbol;
if (varRefSymbol == null) {
return false;
}
if (workerExists(env, simpleVarRef.variableName.value)) {
return true;
}
return false;
}
private boolean isReferencingNonWorker(BLangBinaryExpr binaryExpr) {
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
if (isReferencingNonWorker(lhsExpr)) {
return true;
}
return isReferencingNonWorker(rhsExpr);
}
private boolean isReferencingNonWorker(BLangExpression expression) {
if (expression.getKind() == NodeKind.BINARY_EXPR) {
return isReferencingNonWorker((BLangBinaryExpr) expression);
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) expression;
BSymbol varRefSymbol = simpleVarRef.symbol;
String varRefSymbolName = varRefSymbol.getName().value;
if (workerExists(env, varRefSymbolName)) {
return false;
}
}
return true;
}
public void visit(BLangTernaryExpr ternaryExpr) {
BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType);
SymbolEnv thenEnv = typeNarrower.evaluateTruth(ternaryExpr.expr, ternaryExpr.thenExpr, env);
BType thenType = checkExpr(ternaryExpr.thenExpr, thenEnv, expType);
SymbolEnv elseEnv = typeNarrower.evaluateFalsity(ternaryExpr.expr, ternaryExpr.elseExpr, env);
BType elseType = checkExpr(ternaryExpr.elseExpr, elseEnv, expType);
if (condExprType == symTable.semanticError || thenType == symTable.semanticError ||
elseType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isAssignable(elseType, thenType)) {
resultType = thenType;
} else if (types.isAssignable(thenType, elseType)) {
resultType = elseType;
} else {
dlog.error(ternaryExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, thenType, elseType);
resultType = symTable.semanticError;
}
} else {
resultType = expType;
}
}
public void visit(BLangWaitExpr waitExpr) {
expType = new BFutureType(TypeTags.FUTURE, expType, null);
checkExpr(waitExpr.getExpression(), env, expType);
if (resultType.tag == TypeTags.UNION) {
LinkedHashSet<BType> memberTypes = collectMemberTypes((BUnionType) resultType, new LinkedHashSet<>());
if (memberTypes.size() == 1) {
resultType = memberTypes.toArray(new BType[0])[0];
} else {
resultType = BUnionType.create(null, memberTypes);
}
} else if (resultType != symTable.semanticError) {
resultType = ((BFutureType) resultType).constraint;
}
BLangExpression waitFutureExpression = waitExpr.getExpression();
if (waitFutureExpression.getKind() == NodeKind.BINARY_EXPR) {
setEventualTypeForAlternateWaitExpression(waitFutureExpression, waitExpr.pos);
} else {
setEventualTypeForWaitExpression(waitFutureExpression, waitExpr.pos);
}
waitExpr.setBType(resultType);
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitExpr, waitExpr.getBType(), ((BFutureType) expType).constraint);
}
}
private LinkedHashSet<BType> collectMemberTypes(BUnionType unionType, LinkedHashSet<BType> memberTypes) {
for (BType memberType : unionType.getMemberTypes()) {
if (memberType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) memberType).constraint);
} else {
memberTypes.add(memberType);
}
}
return memberTypes;
}
@Override
public void visit(BLangTrapExpr trapExpr) {
boolean firstVisit = trapExpr.expr.getBType() == null;
BType actualType;
BType exprType = checkExpr(trapExpr.expr, env, expType);
boolean definedWithVar = expType == symTable.noType;
if (trapExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = trapExpr.getBType();
exprType = trapExpr.expr.getBType();
}
}
if (expType == symTable.semanticError || exprType == symTable.semanticError) {
actualType = symTable.semanticError;
} else {
LinkedHashSet<BType> resultTypes = new LinkedHashSet<>();
if (exprType.tag == TypeTags.UNION) {
resultTypes.addAll(((BUnionType) exprType).getMemberTypes());
} else {
resultTypes.add(exprType);
}
resultTypes.add(symTable.errorType);
actualType = BUnionType.create(null, resultTypes);
}
resultType = types.checkType(trapExpr, actualType, expType);
if (definedWithVar && resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(trapExpr.expr, trapExpr.expr.getBType(), resultType);
}
}
public void visit(BLangBinaryExpr binaryExpr) {
if (expType.tag == TypeTags.FUTURE && binaryExpr.opKind == OperatorKind.BITWISE_OR) {
BType lhsResultType = checkExpr(binaryExpr.lhsExpr, env, expType);
BType rhsResultType = checkExpr(binaryExpr.rhsExpr, env, expType);
if (lhsResultType == symTable.semanticError || rhsResultType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
resultType = BUnionType.create(null, lhsResultType, rhsResultType);
return;
}
checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(binaryExpr);
SymbolEnv rhsExprEnv;
BType lhsType;
if (binaryExpr.expectedType.tag == TypeTags.FLOAT || binaryExpr.expectedType.tag == TypeTags.DECIMAL) {
lhsType = checkAndGetType(binaryExpr.lhsExpr, env, binaryExpr);
} else {
lhsType = checkExpr(binaryExpr.lhsExpr, env);
}
if (binaryExpr.opKind == OperatorKind.AND) {
rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env, true);
} else if (binaryExpr.opKind == OperatorKind.OR) {
rhsExprEnv = typeNarrower.evaluateFalsity(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env);
} else {
rhsExprEnv = env;
}
BType rhsType;
if (binaryExpr.expectedType.tag == TypeTags.FLOAT || binaryExpr.expectedType.tag == TypeTags.DECIMAL) {
rhsType = checkAndGetType(binaryExpr.rhsExpr, rhsExprEnv, binaryExpr);
} else {
rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv);
}
BType actualType = symTable.semanticError;
switch (binaryExpr.opKind) {
case ADD:
BType leftConstituent = getXMLConstituents(lhsType);
BType rightConstituent = getXMLConstituents(rhsType);
if (leftConstituent != null && rightConstituent != null) {
actualType = new BXMLType(BUnionType.create(null, leftConstituent, rightConstituent), null);
break;
}
default:
if (lhsType != symTable.semanticError && rhsType != symTable.semanticError) {
BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBitwiseShiftOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getArithmeticOpsForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType,
binaryExpr);
}
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryComparisonOpForTypeSets(binaryExpr.opKind, lhsType, rhsType);
}
if (opSymbol == symTable.notFoundSymbol) {
dlog.error(binaryExpr.pos, DiagnosticErrorCode.BINARY_OP_INCOMPATIBLE_TYPES, binaryExpr.opKind,
lhsType, rhsType);
} else {
if ((binaryExpr.opKind == OperatorKind.EQUAL || binaryExpr.opKind == OperatorKind.NOT_EQUAL) &&
(couldHoldTableValues(lhsType, new ArrayList<>()) &&
couldHoldTableValues(rhsType, new ArrayList<>()))) {
dlog.error(binaryExpr.pos, DiagnosticErrorCode.EQUALITY_NOT_YET_SUPPORTED, TABLE_TNAME);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
actualType = opSymbol.type.getReturnType();
}
}
}
resultType = types.checkType(binaryExpr, actualType, expType);
}
private BType checkAndGetType(BLangExpression expr, SymbolEnv env, BLangBinaryExpr binaryExpr) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
expr.cloneAttempt++;
BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, binaryExpr.expectedType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if (errorCount == 0 && exprCompatibleType != symTable.semanticError) {
return checkExpr(expr, env, binaryExpr.expectedType);
} else {
return checkExpr(expr, env);
}
}
private SymbolEnv getEnvBeforeInputNode(SymbolEnv env, BLangNode node) {
while (env != null && env.node != node) {
env = env.enclEnv;
}
return env != null && env.enclEnv != null
? env.enclEnv.createClone()
: new SymbolEnv(node, null);
}
private SymbolEnv getEnvAfterJoinNode(SymbolEnv env, BLangNode node) {
SymbolEnv clone = env.createClone();
while (clone != null && clone.node != node) {
clone = clone.enclEnv;
}
if (clone != null) {
clone.enclEnv = getEnvBeforeInputNode(clone.enclEnv, getLastInputNodeFromEnv(clone.enclEnv));
} else {
clone = new SymbolEnv(node, null);
}
return clone;
}
private BLangNode getLastInputNodeFromEnv(SymbolEnv env) {
while (env != null && (env.node.getKind() != NodeKind.FROM && env.node.getKind() != NodeKind.JOIN)) {
env = env.enclEnv;
}
return env != null ? env.node : null;
}
public void visit(BLangTransactionalExpr transactionalExpr) {
resultType = types.checkType(transactionalExpr, symTable.booleanType, expType);
}
public void visit(BLangCommitExpr commitExpr) {
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(commitExpr, actualType, expType);
}
private BType getXMLConstituents(BType type) {
BType constituent = null;
if (type.tag == TypeTags.XML) {
constituent = ((BXMLType) type).constraint;
} else if (TypeTags.isXMLNonSequenceType(type.tag)) {
constituent = type;
}
return constituent;
}
private void checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(BLangBinaryExpr binaryExpr) {
if (expType.tag != TypeTags.DECIMAL) {
return;
}
switch (binaryExpr.opKind) {
case ADD:
case SUB:
case MUL:
case DIV:
checkExpr(binaryExpr.lhsExpr, env, expType);
checkExpr(binaryExpr.rhsExpr, env, expType);
break;
default:
break;
}
}
public void visit(BLangElvisExpr elvisExpr) {
BType lhsType = checkExpr(elvisExpr.lhsExpr, env);
BType actualType = symTable.semanticError;
if (lhsType != symTable.semanticError) {
if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) {
BUnionType unionType = (BUnionType) lhsType;
LinkedHashSet<BType> memberTypes = unionType.getMemberTypes().stream()
.filter(type -> type.tag != TypeTags.NIL)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (memberTypes.size() == 1) {
actualType = memberTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, memberTypes);
}
} else {
dlog.error(elvisExpr.pos, DiagnosticErrorCode.OPERATOR_NOT_SUPPORTED, OperatorKind.ELVIS,
lhsType);
}
}
BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType);
BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
if (rhsReturnType == symTable.semanticError || lhsReturnType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isSameType(rhsReturnType, lhsReturnType)) {
resultType = lhsReturnType;
} else {
dlog.error(elvisExpr.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsReturnType,
rhsReturnType);
resultType = symTable.semanticError;
}
} else {
resultType = expType;
}
}
@Override
public void visit(BLangGroupExpr groupExpr) {
resultType = checkExpr(groupExpr.expression, env, expType);
}
public void visit(BLangTypedescExpr accessExpr) {
if (accessExpr.resolvedType == null) {
accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env);
}
int resolveTypeTag = accessExpr.resolvedType.tag;
final BType actualType;
if (resolveTypeTag != TypeTags.TYPEDESC && resolveTypeTag != TypeTags.NONE) {
actualType = new BTypedescType(accessExpr.resolvedType, null);
} else {
actualType = accessExpr.resolvedType;
}
resultType = types.checkType(accessExpr, actualType, expType);
}
public void visit(BLangUnaryExpr unaryExpr) {
BType exprType;
BType actualType = symTable.semanticError;
if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = exprType;
}
} else if (OperatorKind.TYPEOF.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = new BTypedescType(exprType, null);
}
} else {
boolean decimalNegation = OperatorKind.SUB.equals(unaryExpr.operator) && expType.tag == TypeTags.DECIMAL;
boolean isAdd = OperatorKind.ADD.equals(unaryExpr.operator);
exprType = (decimalNegation || isAdd) ? checkExpr(unaryExpr.expr, env, expType) :
checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.pos, unaryExpr.operator, exprType);
if (symbol == symTable.notFoundSymbol) {
dlog.error(unaryExpr.pos, DiagnosticErrorCode.UNARY_OP_INCOMPATIBLE_TYPES,
unaryExpr.operator, exprType);
} else {
unaryExpr.opSymbol = (BOperatorSymbol) symbol;
actualType = symbol.type.getReturnType();
}
}
}
resultType = types.checkType(unaryExpr, actualType, expType);
}
public void visit(BLangTypeConversionExpr conversionExpr) {
BType actualType = symTable.semanticError;
for (BLangAnnotationAttachment annAttachment : conversionExpr.annAttachments) {
annAttachment.attachPoints.add(AttachPoint.Point.TYPE);
semanticAnalyzer.analyzeNode(annAttachment, this.env);
}
BLangExpression expr = conversionExpr.expr;
if (conversionExpr.typeNode == null) {
if (!conversionExpr.annAttachments.isEmpty()) {
resultType = checkExpr(expr, env, this.expType);
}
return;
}
BType targetType = getEffectiveReadOnlyType(conversionExpr.typeNode.pos,
symResolver.resolveTypeNode(conversionExpr.typeNode, env));
conversionExpr.targetType = targetType;
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
BType exprCompatibleType = checkExpr(nodeCloner.cloneNode(expr), env, targetType);
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
int errorCount = this.dlog.errorCount();
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
if ((errorCount == 0 && exprCompatibleType != symTable.semanticError) || requireTypeInference(expr, false)) {
checkExpr(expr, env, targetType);
} else {
checkExpr(expr, env, symTable.noType);
}
BType exprType = expr.getBType();
if (types.isTypeCastable(expr, exprType, targetType, this.env)) {
actualType = targetType;
} else if (exprType != symTable.semanticError && exprType != symTable.noType) {
dlog.error(conversionExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_CAST, exprType, targetType);
}
resultType = types.checkType(conversionExpr, actualType, this.expType);
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
bLangLambdaFunction.setBType(bLangLambdaFunction.function.getBType());
bLangLambdaFunction.capturedClosureEnv = env.createClone();
if (!this.nonErrorLoggingCheck) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.getBType(), expType);
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BType expectedType = expType;
if (expectedType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expectedType;
BType invokableType = unionType.getMemberTypes().stream().filter(type -> type.tag == TypeTags.INVOKABLE)
.collect(Collectors.collectingAndThen(Collectors.toList(), list -> {
if (list.size() != 1) {
return null;
}
return list.get(0);
}
));
if (invokableType != null) {
expectedType = invokableType;
}
}
if (expectedType.tag != TypeTags.INVOKABLE || Symbols.isFlagOn(expectedType.flags, Flags.ANY_FUNCTION)) {
dlog.error(bLangArrowFunction.pos,
DiagnosticErrorCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS);
resultType = symTable.semanticError;
return;
}
BInvokableType expectedInvocation = (BInvokableType) expectedType;
populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes);
bLangArrowFunction.body.expr.setBType(populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType));
if (expectedInvocation.retType.tag == TypeTags.NONE) {
expectedInvocation.retType = bLangArrowFunction.body.expr.getBType();
}
resultType = bLangArrowFunction.funcType = expectedInvocation;
}
public void visit(BLangXMLQName bLangXMLQName) {
String prefix = bLangXMLQName.prefix.value;
resultType = types.checkType(bLangXMLQName, symTable.stringType, expType);
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty()
&& bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
dlog.error(bLangXMLQName.pos, DiagnosticErrorCode.INVALID_NAMESPACE_PREFIX, prefix);
bLangXMLQName.setBType(symTable.semanticError);
return;
}
if (bLangXMLQName.prefix.value.isEmpty()) {
return;
}
BSymbol xmlnsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromIdNode(bLangXMLQName.prefix));
if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
return;
}
if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
logUndefinedSymbolError(bLangXMLQName.pos, prefix);
bLangXMLQName.setBType(symTable.semanticError);
return;
}
if (xmlnsSymbol.getKind() == SymbolKind.PACKAGE) {
xmlnsSymbol = findXMLNamespaceFromPackageConst(bLangXMLQName.localname.value, bLangXMLQName.prefix.value,
(BPackageSymbol) xmlnsSymbol, bLangXMLQName.pos);
}
if (xmlnsSymbol == null || xmlnsSymbol.getKind() != SymbolKind.XMLNS) {
resultType = symTable.semanticError;
return;
}
bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol;
bLangXMLQName.namespaceURI = bLangXMLQName.nsSymbol.namespaceURI;
}
private BSymbol findXMLNamespaceFromPackageConst(String localname, String prefix,
BPackageSymbol pkgSymbol, Location pos) {
BSymbol constSymbol = symResolver.lookupMemberSymbol(pos, pkgSymbol.scope, env,
names.fromString(localname), SymTag.CONSTANT);
if (constSymbol == symTable.notFoundSymbol) {
if (!missingNodesHelper.isMissingNode(prefix) && !missingNodesHelper.isMissingNode(localname)) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, prefix + ":" + localname);
}
return null;
}
BConstantSymbol constantSymbol = (BConstantSymbol) constSymbol;
if (constantSymbol.literalType.tag != TypeTags.STRING) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.stringType, constantSymbol.literalType);
return null;
}
String constVal = (String) constantSymbol.value.value;
int s = constVal.indexOf('{');
int e = constVal.lastIndexOf('}');
if (e > s + 1) {
pkgSymbol.isUsed = true;
String nsURI = constVal.substring(s + 1, e);
String local = constVal.substring(e);
return new BXMLNSSymbol(names.fromString(local), nsURI, constantSymbol.pkgID, constantSymbol.owner, pos,
SOURCE);
}
dlog.error(pos, DiagnosticErrorCode.INVALID_ATTRIBUTE_REFERENCE, prefix + ":" + localname);
return null;
}
public void visit(BLangXMLAttribute bLangXMLAttribute) {
SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env);
BLangXMLQName name = (BLangXMLQName) bLangXMLAttribute.name;
checkExpr(name, xmlAttributeEnv, symTable.stringType);
if (name.prefix.value.isEmpty()) {
name.namespaceURI = null;
}
checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType);
symbolEnter.defineNode(bLangXMLAttribute, env);
}
public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) {
SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env);
Set<String> usedPrefixes = new HashSet<>();
BLangIdentifier elemNamePrefix = ((BLangXMLQName) bLangXMLElementLiteral.startTagName).prefix;
if (elemNamePrefix != null && !elemNamePrefix.value.isEmpty()) {
usedPrefixes.add(elemNamePrefix.value);
}
for (BLangXMLAttribute attribute : bLangXMLElementLiteral.attributes) {
if (attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute)) {
BLangXMLQuotedString value = attribute.value;
if (value.getKind() == NodeKind.XML_QUOTED_STRING && value.textFragments.size() > 1) {
dlog.error(value.pos, DiagnosticErrorCode.INVALID_XML_NS_INTERPOLATION);
}
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
BLangIdentifier prefix = ((BLangXMLQName) attribute.name).prefix;
if (prefix != null && !prefix.value.isEmpty()) {
usedPrefixes.add(prefix.value);
}
}
bLangXMLElementLiteral.attributes.forEach(attribute -> {
if (!(attribute.name.getKind() == NodeKind.XML_QNAME && isXmlNamespaceAttribute(attribute))) {
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
});
Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv);
Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX);
if (namespaces.containsKey(defaultNs)) {
bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs);
}
for (Map.Entry<Name, BXMLNSSymbol> nsEntry : namespaces.entrySet()) {
if (usedPrefixes.contains(nsEntry.getKey().value)) {
bLangXMLElementLiteral.namespacesInScope.put(nsEntry.getKey(), nsEntry.getValue());
}
}
validateTags(bLangXMLElementLiteral, xmlElementEnv);
bLangXMLElementLiteral.modifiedChildren =
concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlElementType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLElementLiteral.pos, symTable.xmlElementType,
this.expType);
if (Symbols.isFlagOn(resultType.flags, Flags.READONLY)) {
markChildrenAsImmutable(bLangXMLElementLiteral);
}
}
private boolean isXmlNamespaceAttribute(BLangXMLAttribute attribute) {
BLangXMLQName attrName = (BLangXMLQName) attribute.name;
return (attrName.prefix.value.isEmpty()
&& attrName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE))
|| attrName.prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE);
}
public BType getXMLTypeFromLiteralKind(BLangExpression childXMLExpressions) {
if (childXMLExpressions.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
return symTable.xmlElementType;
}
if (childXMLExpressions.getKind() == NodeKind.XML_TEXT_LITERAL) {
return symTable.xmlTextType;
}
if (childXMLExpressions.getKind() == NodeKind.XML_PI_LITERAL) {
return symTable.xmlPIType;
}
return symTable.xmlCommentType;
}
public void muteErrorLog() {
this.nonErrorLoggingCheck = true;
this.dlog.mute();
}
public void unMuteErrorLog(boolean prevNonErrorLoggingCheck, int errorCount) {
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(errorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
}
public BType getXMLSequenceType(BType xmlSubType) {
switch (xmlSubType.tag) {
case TypeTags.XML_ELEMENT:
return new BXMLType(symTable.xmlElementType, null);
case TypeTags.XML_COMMENT:
return new BXMLType(symTable.xmlCommentType, null);
case TypeTags.XML_PI:
return new BXMLType(symTable.xmlPIType, null);
default:
return symTable.xmlTextType;
}
}
public void visit(BLangXMLSequenceLiteral bLangXMLSequenceLiteral) {
if (expType.tag != TypeTags.XML && expType.tag != TypeTags.UNION && expType.tag != TypeTags.XML_TEXT
&& expType != symTable.noType) {
dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType,
"XML Sequence");
resultType = symTable.semanticError;
return;
}
List<BType> xmlTypesInSequence = new ArrayList<>();
for (BLangExpression expressionItem : bLangXMLSequenceLiteral.xmlItems) {
resultType = checkExpr(expressionItem, env, expType);
if (!xmlTypesInSequence.contains(resultType)) {
xmlTypesInSequence.add(resultType);
}
}
if (expType.tag == TypeTags.XML || expType == symTable.noType) {
if (xmlTypesInSequence.size() == 1) {
resultType = getXMLSequenceType(xmlTypesInSequence.get(0));
return;
}
resultType = symTable.xmlType;
return;
}
if (expType.tag == TypeTags.XML_TEXT) {
resultType = symTable.xmlTextType;
return;
}
for (BType item : ((BUnionType) expType).getMemberTypes()) {
if (item.tag != TypeTags.XML_TEXT && item.tag != TypeTags.XML) {
dlog.error(bLangXMLSequenceLiteral.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
expType, symTable.xmlType);
resultType = symTable.semanticError;
return;
}
}
resultType = symTable.xmlType;
}
public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) {
List<BLangExpression> literalValues = bLangXMLTextLiteral.textFragments;
checkStringTemplateExprs(literalValues);
BLangExpression xmlExpression = literalValues.get(0);
if (literalValues.size() == 1 && xmlExpression.getKind() == NodeKind.LITERAL &&
((String) ((BLangLiteral) xmlExpression).value).isEmpty()) {
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlNeverType, expType);
return;
}
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlTextType, expType);
}
public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) {
checkStringTemplateExprs(bLangXMLCommentLiteral.textFragments);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlCommentType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLCommentLiteral.pos, symTable.xmlCommentType,
this.expType);
}
public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) {
checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType);
checkStringTemplateExprs(bLangXMLProcInsLiteral.dataFragments);
if (expType == symTable.noType) {
resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlPIType, expType);
return;
}
resultType = checkXmlSubTypeLiteralCompatibility(bLangXMLProcInsLiteral.pos, symTable.xmlPIType, this.expType);
}
public void visit(BLangXMLQuotedString bLangXMLQuotedString) {
checkStringTemplateExprs(bLangXMLQuotedString.textFragments);
resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType);
}
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
dlog.error(xmlAttributeAccessExpr.pos,
DiagnosticErrorCode.DEPRECATED_XML_ATTRIBUTE_ACCESS);
resultType = symTable.semanticError;
}
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
checkStringTemplateExprs(stringTemplateLiteral.exprs);
resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType);
}
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
BType type = determineRawTemplateLiteralType(rawTemplateLiteral, expType);
if (type == symTable.semanticError) {
resultType = type;
return;
}
BObjectType literalType = (BObjectType) type;
BType stringsType = literalType.fields.get("strings").type;
if (evaluateRawTemplateExprs(rawTemplateLiteral.strings, stringsType, INVALID_NUM_STRINGS,
rawTemplateLiteral.pos)) {
type = symTable.semanticError;
}
BType insertionsType = literalType.fields.get("insertions").type;
if (evaluateRawTemplateExprs(rawTemplateLiteral.insertions, insertionsType, INVALID_NUM_INSERTIONS,
rawTemplateLiteral.pos)) {
type = symTable.semanticError;
}
resultType = type;
}
private BType determineRawTemplateLiteralType(BLangRawTemplateLiteral rawTemplateLiteral, BType expType) {
if (expType == symTable.noType || containsAnyType(expType)) {
return symTable.rawTemplateType;
}
BType compatibleType = getCompatibleRawTemplateType(expType, rawTemplateLiteral.pos);
BType type = types.checkType(rawTemplateLiteral, compatibleType, symTable.rawTemplateType,
DiagnosticErrorCode.INVALID_RAW_TEMPLATE_TYPE);
if (type == symTable.semanticError) {
return type;
}
if (Symbols.isFlagOn(type.tsymbol.flags, Flags.CLASS)) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_RAW_TEMPLATE_ASSIGNMENT, type);
return symTable.semanticError;
}
BObjectType litObjType = (BObjectType) type;
BObjectTypeSymbol objTSymbol = (BObjectTypeSymbol) litObjType.tsymbol;
if (litObjType.fields.size() > 2) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.INVALID_NUM_FIELDS, litObjType);
type = symTable.semanticError;
}
if (!objTSymbol.attachedFuncs.isEmpty()) {
dlog.error(rawTemplateLiteral.pos, DiagnosticErrorCode.METHODS_NOT_ALLOWED, litObjType);
type = symTable.semanticError;
}
return type;
}
private boolean evaluateRawTemplateExprs(List<? extends BLangExpression> exprs, BType fieldType,
DiagnosticCode code, Location pos) {
BType listType = fieldType.tag != TypeTags.INTERSECTION ? fieldType :
((BIntersectionType) fieldType).effectiveType;
boolean errored = false;
if (listType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) listType;
if (arrayType.state == BArrayState.CLOSED && (exprs.size() != arrayType.size)) {
dlog.error(pos, code, arrayType.size, exprs.size());
return false;
}
for (BLangExpression expr : exprs) {
errored = (checkExpr(expr, env, arrayType.eType) == symTable.semanticError) || errored;
}
} else if (listType.tag == TypeTags.TUPLE) {
BTupleType tupleType = (BTupleType) listType;
final int size = exprs.size();
final int requiredItems = tupleType.tupleTypes.size();
if (size < requiredItems || (size > requiredItems && tupleType.restType == null)) {
dlog.error(pos, code, requiredItems, size);
return false;
}
int i;
List<BType> memberTypes = tupleType.tupleTypes;
for (i = 0; i < requiredItems; i++) {
errored = (checkExpr(exprs.get(i), env, memberTypes.get(i)) == symTable.semanticError) || errored;
}
if (size > requiredItems) {
for (; i < size; i++) {
errored = (checkExpr(exprs.get(i), env, tupleType.restType) == symTable.semanticError) || errored;
}
}
} else {
throw new IllegalStateException("Expected a list type, but found: " + listType);
}
return errored;
}
private boolean containsAnyType(BType type) {
if (type == symTable.anyType) {
return true;
}
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().contains(symTable.anyType);
}
return false;
}
private BType getCompatibleRawTemplateType(BType expType, Location pos) {
if (expType.tag != TypeTags.UNION) {
return expType;
}
BUnionType unionType = (BUnionType) expType;
List<BType> compatibleTypes = new ArrayList<>();
for (BType type : unionType.getMemberTypes()) {
if (types.isAssignable(type, symTable.rawTemplateType)) {
compatibleTypes.add(type);
}
}
if (compatibleTypes.size() == 0) {
return expType;
}
if (compatibleTypes.size() > 1) {
dlog.error(pos, DiagnosticErrorCode.MULTIPLE_COMPATIBLE_RAW_TEMPLATE_TYPES, symTable.rawTemplateType,
expType);
return symTable.semanticError;
}
return compatibleTypes.get(0);
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
checkExpr(intRangeExpression.startExpr, env, symTable.intType);
checkExpr(intRangeExpression.endExpr, env, symTable.intType);
resultType = new BArrayType(symTable.intType);
}
@Override
public void visit(BLangRestArgsExpression bLangRestArgExpression) {
resultType = checkExpr(bLangRestArgExpression.expr, env, expType);
}
@Override
public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr) {
if (expType.tag != TypeTags.TYPEDESC) {
dlog.error(inferTypedescExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, symTable.typeDesc);
resultType = symTable.semanticError;
return;
}
resultType = expType;
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType);
bLangNamedArgsExpression.setBType(bLangNamedArgsExpression.expr.getBType());
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env);
checkExpr(bLangMatchExpression.expr, matchExprEnv);
bLangMatchExpression.patternClauses.forEach(pattern -> {
if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) {
symbolEnter.defineNode(pattern.variable, matchExprEnv);
}
checkExpr(pattern.expr, matchExprEnv, expType);
pattern.variable.setBType(symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv));
});
LinkedHashSet<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression);
BType actualType;
if (matchExprTypes.contains(symTable.semanticError)) {
actualType = symTable.semanticError;
} else if (matchExprTypes.size() == 1) {
actualType = matchExprTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, matchExprTypes);
}
resultType = types.checkType(bLangMatchExpression, actualType, expType);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
checkWithinQueryExpr = isWithinQuery();
visitCheckAndCheckPanicExpr(checkedExpr);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr);
}
@Override
public void visit(BLangQueryExpr queryExpr) {
boolean cleanPrevEnvs = false;
if (prevEnvs.empty()) {
prevEnvs.push(env);
cleanPrevEnvs = true;
}
if (breakToParallelQueryEnv) {
queryEnvs.push(prevEnvs.peek());
} else {
queryEnvs.push(env);
}
queryFinalClauses.push(queryExpr.getSelectClause());
List<BLangNode> clauses = queryExpr.getQueryClauses();
BLangExpression collectionNode = (BLangExpression) ((BLangFromClause) clauses.get(0)).getCollection();
clauses.forEach(clause -> clause.accept(this));
BType actualType = resolveQueryType(queryEnvs.peek(), ((BLangSelectClause) queryFinalClauses.peek()).expression,
collectionNode.getBType(), expType, queryExpr);
actualType = (actualType == symTable.semanticError) ? actualType :
types.checkType(queryExpr.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
queryFinalClauses.pop();
queryEnvs.pop();
if (cleanPrevEnvs) {
prevEnvs.pop();
}
if (actualType.tag == TypeTags.TABLE) {
BTableType tableType = (BTableType) actualType;
tableType.constraintPos = queryExpr.pos;
tableType.isTypeInlineDefined = true;
if (!validateTableType(tableType, null)) {
resultType = symTable.semanticError;
return;
}
}
checkWithinQueryExpr = false;
resultType = actualType;
}
private boolean isWithinQuery() {
return !queryEnvs.isEmpty() && !queryFinalClauses.isEmpty();
}
private BType resolveQueryType(SymbolEnv env, BLangExpression selectExp, BType collectionType,
BType targetType, BLangQueryExpr queryExpr) {
List<BType> resultTypes = types.getAllTypes(targetType).stream()
.filter(t -> !types.isAssignable(t, symTable.errorType))
.filter(t -> !types.isAssignable(t, symTable.nilType))
.collect(Collectors.toList());
if (resultTypes.isEmpty()) {
resultTypes.add(symTable.noType);
}
BType actualType = symTable.semanticError;
List<BType> selectTypes = new ArrayList<>();
List<BType> resolvedTypes = new ArrayList<>();
BType selectType, resolvedType;
for (BType type : resultTypes) {
switch (type.tag) {
case TypeTags.ARRAY:
selectType = checkExpr(selectExp, env, ((BArrayType) type).eType);
resolvedType = new BArrayType(selectType);
break;
case TypeTags.TABLE:
selectType = checkExpr(selectExp, env, types.getSafeType(((BTableType) type).constraint,
true, true));
resolvedType = symTable.tableType;
break;
case TypeTags.STREAM:
selectType = checkExpr(selectExp, env, types.getSafeType(((BStreamType) type).constraint,
true, true));
resolvedType = symTable.streamType;
break;
case TypeTags.STRING:
case TypeTags.XML:
selectType = checkExpr(selectExp, env, type);
resolvedType = selectType;
break;
case TypeTags.NONE:
default:
selectType = checkExpr(selectExp, env, type);
resolvedType = getNonContextualQueryType(selectType, collectionType);
break;
}
if (selectType != symTable.semanticError) {
if (resolvedType.tag == TypeTags.STREAM) {
queryExpr.isStream = true;
}
if (resolvedType.tag == TypeTags.TABLE) {
queryExpr.isTable = true;
}
selectTypes.add(selectType);
resolvedTypes.add(resolvedType);
}
}
if (selectTypes.size() == 1) {
BType errorType = getErrorType(collectionType, queryExpr);
selectType = selectTypes.get(0);
if (queryExpr.isStream) {
return new BStreamType(TypeTags.STREAM, selectType, errorType, null);
} else if (queryExpr.isTable) {
actualType = getQueryTableType(queryExpr, selectType);
} else {
actualType = resolvedTypes.get(0);
}
if (errorType != null && errorType.tag != TypeTags.NIL) {
return BUnionType.create(null, actualType, errorType);
} else {
return actualType;
}
} else if (selectTypes.size() > 1) {
dlog.error(selectExp.pos, DiagnosticErrorCode.AMBIGUOUS_TYPES, selectTypes);
return actualType;
} else {
return actualType;
}
}
private BType getQueryTableType(BLangQueryExpr queryExpr, BType constraintType) {
final BTableType tableType = new BTableType(TypeTags.TABLE, constraintType, null);
if (!queryExpr.fieldNameIdentifierList.isEmpty()) {
tableType.fieldNameList = queryExpr.fieldNameIdentifierList.stream()
.map(identifier -> ((BLangIdentifier) identifier).value).collect(Collectors.toList());
return BUnionType.create(null, tableType, symTable.errorType);
}
return tableType;
}
private BType getErrorType(BType collectionType, BLangQueryExpr queryExpr) {
if (collectionType.tag == TypeTags.SEMANTIC_ERROR) {
return null;
}
BType returnType = null, errorType = null;
switch (collectionType.tag) {
case TypeTags.STREAM:
errorType = ((BStreamType) collectionType).completionType;
break;
case TypeTags.OBJECT:
returnType = types.getVarTypeFromIterableObject((BObjectType) collectionType);
break;
default:
BSymbol itrSymbol = symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
if (itrSymbol == this.symTable.notFoundSymbol) {
return null;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) itrSymbol;
returnType = types.getResultTypeOfNextInvocation((BObjectType) invokableSymbol.retType);
}
List<BType> errorTypes = new ArrayList<>();
if (returnType != null) {
types.getAllTypes(returnType).stream()
.filter(t -> types.isAssignable(t, symTable.errorType))
.forEach(errorTypes::add);
}
if (checkWithinQueryExpr && queryExpr.isStream) {
if (errorTypes.isEmpty()) {
errorTypes.add(symTable.nilType);
}
errorTypes.add(symTable.errorType);
}
if (!errorTypes.isEmpty()) {
if (errorTypes.size() == 1) {
errorType = errorTypes.get(0);
} else {
errorType = BUnionType.create(null, errorTypes.toArray(new BType[0]));
}
}
return errorType;
}
private BType getNonContextualQueryType(BType staticType, BType basicType) {
BType resultType;
switch (basicType.tag) {
case TypeTags.TABLE:
resultType = symTable.tableType;
break;
case TypeTags.STREAM:
resultType = symTable.streamType;
break;
case TypeTags.XML:
resultType = new BXMLType(staticType, null);
break;
case TypeTags.STRING:
resultType = symTable.stringType;
break;
default:
resultType = new BArrayType(staticType);
break;
}
return resultType;
}
@Override
public void visit(BLangQueryAction queryAction) {
if (prevEnvs.empty()) {
prevEnvs.push(env);
} else {
prevEnvs.push(prevEnvs.peek());
}
queryEnvs.push(prevEnvs.peek());
BLangDoClause doClause = queryAction.getDoClause();
queryFinalClauses.push(doClause);
List<BLangNode> clauses = queryAction.getQueryClauses();
clauses.forEach(clause -> clause.accept(this));
semanticAnalyzer.analyzeStmt(doClause.body, SymbolEnv.createBlockEnv(doClause.body, queryEnvs.peek()));
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(doClause.pos, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
queryFinalClauses.pop();
queryEnvs.pop();
prevEnvs.pop();
}
@Override
public void visit(BLangFromClause fromClause) {
boolean prevBreakToParallelEnv = this.breakToParallelQueryEnv;
this.breakToParallelQueryEnv = true;
SymbolEnv fromEnv = SymbolEnv.createTypeNarrowedEnv(fromClause, queryEnvs.pop());
fromClause.env = fromEnv;
queryEnvs.push(fromEnv);
checkExpr(fromClause.collection, queryEnvs.peek());
types.setInputClauseTypedBindingPatternType(fromClause);
handleInputClauseVariables(fromClause, queryEnvs.peek());
this.breakToParallelQueryEnv = prevBreakToParallelEnv;
}
@Override
public void visit(BLangJoinClause joinClause) {
boolean prevBreakEnv = this.breakToParallelQueryEnv;
this.breakToParallelQueryEnv = true;
SymbolEnv joinEnv = SymbolEnv.createTypeNarrowedEnv(joinClause, queryEnvs.pop());
joinClause.env = joinEnv;
queryEnvs.push(joinEnv);
checkExpr(joinClause.collection, queryEnvs.peek());
types.setInputClauseTypedBindingPatternType(joinClause);
handleInputClauseVariables(joinClause, queryEnvs.peek());
if (joinClause.onClause != null) {
((BLangOnClause) joinClause.onClause).accept(this);
}
this.breakToParallelQueryEnv = prevBreakEnv;
}
@Override
public void visit(BLangLetClause letClause) {
SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(letClause, queryEnvs.pop());
letClause.env = letEnv;
queryEnvs.push(letEnv);
for (BLangLetVariable letVariable : letClause.letVarDeclarations) {
semanticAnalyzer.analyzeDef((BLangNode) letVariable.definitionNode, letEnv);
}
}
@Override
public void visit(BLangWhereClause whereClause) {
whereClause.env = handleFilterClauses(whereClause.expression);
}
@Override
public void visit(BLangSelectClause selectClause) {
SymbolEnv selectEnv = SymbolEnv.createTypeNarrowedEnv(selectClause, queryEnvs.pop());
selectClause.env = selectEnv;
queryEnvs.push(selectEnv);
}
@Override
public void visit(BLangDoClause doClause) {
SymbolEnv letEnv = SymbolEnv.createTypeNarrowedEnv(doClause, queryEnvs.pop());
doClause.env = letEnv;
queryEnvs.push(letEnv);
}
@Override
public void visit(BLangOnConflictClause onConflictClause) {
BType exprType = checkExpr(onConflictClause.expression, queryEnvs.peek(), symTable.errorType);
if (!types.isAssignable(exprType, symTable.errorType)) {
dlog.error(onConflictClause.expression.pos, DiagnosticErrorCode.ERROR_TYPE_EXPECTED,
symTable.errorType, exprType);
}
}
@Override
public void visit(BLangLimitClause limitClause) {
BType exprType = checkExpr(limitClause.expression, queryEnvs.peek());
if (!types.isAssignable(exprType, symTable.intType)) {
dlog.error(limitClause.expression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
symTable.intType, exprType);
}
}
@Override
public void visit(BLangOnClause onClause) {
BType lhsType, rhsType;
BLangNode joinNode = getLastInputNodeFromEnv(queryEnvs.peek());
onClause.lhsEnv = getEnvBeforeInputNode(queryEnvs.peek(), joinNode);
lhsType = checkExpr(onClause.lhsExpr, onClause.lhsEnv);
onClause.rhsEnv = getEnvAfterJoinNode(queryEnvs.peek(), joinNode);
rhsType = checkExpr(onClause.rhsExpr, onClause.rhsEnv != null ? onClause.rhsEnv : queryEnvs.peek());
if (!types.isAssignable(lhsType, rhsType)) {
dlog.error(onClause.rhsExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, lhsType, rhsType);
}
}
@Override
public void visit(BLangOrderByClause orderByClause) {
orderByClause.env = queryEnvs.peek();
for (OrderKeyNode orderKeyNode : orderByClause.getOrderKeyList()) {
BType exprType = checkExpr((BLangExpression) orderKeyNode.getOrderKey(), orderByClause.env);
if (!types.isOrderedType(exprType, false)) {
dlog.error(((BLangOrderKey) orderKeyNode).expression.pos, DiagnosticErrorCode.ORDER_BY_NOT_SUPPORTED);
}
}
}
@Override
public void visit(BLangDo doNode) {
if (doNode.onFailClause != null) {
doNode.onFailClause.accept(this);
}
}
public void visit(BLangOnFailClause onFailClause) {
onFailClause.body.stmts.forEach(stmt -> stmt.accept(this));
}
private SymbolEnv handleFilterClauses (BLangExpression filterExpression) {
checkExpr(filterExpression, queryEnvs.peek(), symTable.booleanType);
BType actualType = filterExpression.getBType();
if (TypeTags.TUPLE == actualType.tag) {
dlog.error(filterExpression.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
symTable.booleanType, actualType);
}
SymbolEnv filterEnv = typeNarrower.evaluateTruth(filterExpression, queryFinalClauses.peek(), queryEnvs.pop());
queryEnvs.push(filterEnv);
return filterEnv;
}
private void handleInputClauseVariables(BLangInputClause bLangInputClause, SymbolEnv blockEnv) {
if (bLangInputClause.variableDefinitionNode == null) {
return;
}
BLangVariable variableNode = (BLangVariable) bLangInputClause.variableDefinitionNode.getVariable();
if (bLangInputClause.isDeclaredWithVar) {
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv);
return;
}
BType typeNodeType = symResolver.resolveTypeNode(variableNode.typeNode, blockEnv);
if (types.isAssignable(bLangInputClause.varType, typeNodeType)) {
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, bLangInputClause.varType, blockEnv);
return;
}
if (typeNodeType != symTable.semanticError) {
dlog.error(variableNode.typeNode.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
bLangInputClause.varType, typeNodeType);
}
semanticAnalyzer.handleDeclaredVarInForeach(variableNode, typeNodeType, blockEnv);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr) {
String operatorType = checkedExpr.getKind() == NodeKind.CHECK_EXPR ? "check" : "checkpanic";
BLangExpression exprWithCheckingKeyword = checkedExpr.expr;
boolean firstVisit = exprWithCheckingKeyword.getBType() == null;
BType checkExprCandidateType;
if (expType == symTable.noType) {
checkExprCandidateType = symTable.noType;
} else {
BType exprType = getCandidateType(checkedExpr, expType);
if (exprType == symTable.semanticError) {
checkExprCandidateType = BUnionType.create(null, expType, symTable.errorType);
} else {
checkExprCandidateType = addDefaultErrorIfNoErrorComponentFound(expType);
}
}
if (checkedExpr.getKind() == NodeKind.CHECK_EXPR && types.isUnionOfSimpleBasicTypes(expType)) {
rewriteWithEnsureTypeFunc(checkedExpr, checkExprCandidateType);
}
BType exprType = checkExpr(checkedExpr.expr, env, checkExprCandidateType);
if (checkedExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = checkedExpr.getBType();
exprType = checkedExpr.expr.getBType();
}
}
boolean isErrorType = types.isAssignable(exprType, symTable.errorType);
if (exprType.tag != TypeTags.UNION && !isErrorType) {
if (exprType.tag == TypeTags.READONLY) {
checkedExpr.equivalentErrorTypeList = new ArrayList<>(1) {{
add(symTable.errorType);
}};
resultType = symTable.anyAndReadonly;
return;
} else if (exprType != symTable.semanticError) {
dlog.error(checkedExpr.expr.pos,
DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS,
operatorType);
}
checkedExpr.setBType(symTable.semanticError);
return;
}
List<BType> errorTypes = new ArrayList<>();
List<BType> nonErrorTypes = new ArrayList<>();
if (!isErrorType) {
for (BType memberType : ((BUnionType) exprType).getMemberTypes()) {
if (memberType.tag == TypeTags.READONLY) {
errorTypes.add(symTable.errorType);
nonErrorTypes.add(symTable.anyAndReadonly);
continue;
}
if (types.isAssignable(memberType, symTable.errorType)) {
errorTypes.add(memberType);
continue;
}
nonErrorTypes.add(memberType);
}
} else {
errorTypes.add(exprType);
}
checkedExpr.equivalentErrorTypeList = errorTypes;
if (errorTypes.isEmpty()) {
dlog.error(checkedExpr.expr.pos,
DiagnosticErrorCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType);
checkedExpr.setBType(symTable.semanticError);
return;
}
BType actualType;
if (nonErrorTypes.size() == 0) {
actualType = symTable.neverType;
} else if (nonErrorTypes.size() == 1) {
actualType = nonErrorTypes.get(0);
} else {
actualType = BUnionType.create(null, new LinkedHashSet<>(nonErrorTypes));
}
if (actualType.tag == TypeTags.NEVER) {
dlog.error(checkedExpr.pos, DiagnosticErrorCode.NEVER_TYPE_NOT_ALLOWED_WITH_CHECKED_EXPR,
operatorType);
}
resultType = types.checkType(checkedExpr, actualType, expType);
}
private void rewriteWithEnsureTypeFunc(BLangCheckedExpr checkedExpr, BType type) {
BType rhsType = getCandidateType(checkedExpr, type);
if (rhsType == symTable.semanticError) {
rhsType = getCandidateType(checkedExpr, rhsType);
}
BType candidateLaxType = getCandidateLaxType(checkedExpr.expr, rhsType);
if (!types.isLax(candidateLaxType)) {
return;
}
ArrayList<BLangExpression> argExprs = new ArrayList<>();
BType typedescType = new BTypedescType(expType, null);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = expType;
typedescExpr.setBType(typedescType);
argExprs.add(typedescExpr);
BLangInvocation invocation = ASTBuilderUtil.createLangLibInvocationNode(FUNCTION_NAME_ENSURE_TYPE,
argExprs, checkedExpr.expr, checkedExpr.pos);
invocation.symbol = symResolver.lookupLangLibMethod(type,
names.fromString(invocation.name.value));
invocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
checkedExpr.expr = invocation;
}
private BType getCandidateLaxType(BLangNode expr, BType rhsType) {
if (expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
return types.getSafeType(rhsType, false, true);
}
return rhsType;
}
private BType getCandidateType(BLangCheckedExpr checkedExpr, BType checkExprCandidateType) {
boolean prevNonErrorLoggingCheck = this.nonErrorLoggingCheck;
this.nonErrorLoggingCheck = true;
int prevErrorCount = this.dlog.errorCount();
this.dlog.resetErrorCount();
this.dlog.mute();
checkedExpr.expr.cloneAttempt++;
BLangExpression clone = nodeCloner.cloneNode(checkedExpr.expr);
BType rhsType;
if (checkExprCandidateType == symTable.semanticError) {
rhsType = checkExpr(clone, env);
} else {
rhsType = checkExpr(clone, env, checkExprCandidateType);
}
this.nonErrorLoggingCheck = prevNonErrorLoggingCheck;
this.dlog.setErrorCount(prevErrorCount);
if (!prevNonErrorLoggingCheck) {
this.dlog.unmute();
}
return rhsType;
}
private BType addDefaultErrorIfNoErrorComponentFound(BType type) {
for (BType t : types.getAllTypes(type)) {
if (types.isAssignable(t, symTable.errorType)) {
return type;
}
}
return BUnionType.create(null, type, symTable.errorType);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
resultType = serviceConstructorExpr.serviceNode.symbol.type;
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
typeTestExpr.typeNode.setBType(symResolver.resolveTypeNode(typeTestExpr.typeNode, env));
checkExpr(typeTestExpr.expr, env);
resultType = types.checkType(typeTestExpr, symTable.booleanType, expType);
}
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
checkExpr(annotAccessExpr.expr, this.env, symTable.typeDesc);
BType actualType = symTable.semanticError;
BSymbol symbol =
this.symResolver.resolveAnnotation(annotAccessExpr.pos, env,
names.fromString(annotAccessExpr.pkgAlias.getValue()),
names.fromString(annotAccessExpr.annotationName.getValue()));
if (symbol == this.symTable.notFoundSymbol) {
this.dlog.error(annotAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_ANNOTATION,
annotAccessExpr.annotationName.getValue());
} else {
annotAccessExpr.annotationSymbol = (BAnnotationSymbol) symbol;
BType annotType = ((BAnnotationSymbol) symbol).attachedType == null ? symTable.trueType :
((BAnnotationSymbol) symbol).attachedType.type;
actualType = BUnionType.create(null, annotType, symTable.nilType);
}
this.resultType = this.types.checkType(annotAccessExpr, actualType, this.expType);
}
private boolean isValidVariableReference(BLangExpression varRef) {
switch (varRef.getKind()) {
case SIMPLE_VARIABLE_REF:
case RECORD_VARIABLE_REF:
case TUPLE_VARIABLE_REF:
case ERROR_VARIABLE_REF:
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
case XML_ATTRIBUTE_ACCESS_EXPR:
return true;
default:
dlog.error(varRef.pos, DiagnosticErrorCode.INVALID_RECORD_BINDING_PATTERN, varRef.getBType());
return false;
}
}
private BType getEffectiveReadOnlyType(Location pos, BType origTargetType) {
if (origTargetType == symTable.readonlyType) {
if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) {
return origTargetType;
}
return ImmutableTypeCloner.getImmutableIntersectionType(pos, types,
(SelectivelyImmutableReferenceType) expType,
env, symTable, anonymousModelHelper, names,
new HashSet<>());
}
if (origTargetType.tag != TypeTags.UNION) {
return origTargetType;
}
boolean hasReadOnlyType = false;
LinkedHashSet<BType> nonReadOnlyTypes = new LinkedHashSet<>();
for (BType memberType : ((BUnionType) origTargetType).getMemberTypes()) {
if (memberType == symTable.readonlyType) {
hasReadOnlyType = true;
continue;
}
nonReadOnlyTypes.add(memberType);
}
if (!hasReadOnlyType) {
return origTargetType;
}
if (types.isInherentlyImmutableType(expType) || !types.isSelectivelyImmutableType(expType)) {
return origTargetType;
}
BUnionType nonReadOnlyUnion = BUnionType.create(null, nonReadOnlyTypes);
nonReadOnlyUnion.add(ImmutableTypeCloner.getImmutableIntersectionType(pos, types,
(SelectivelyImmutableReferenceType)
expType,
env, symTable, anonymousModelHelper,
names, new HashSet<>()));
return nonReadOnlyUnion;
}
private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) {
SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env);
bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv));
return checkExpr(bLangArrowFunction.body.expr, arrowFunctionEnv, expectedRetType);
}
private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) {
if (paramTypes.size() != bLangArrowFunction.params.size()) {
dlog.error(bLangArrowFunction.pos,
DiagnosticErrorCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH,
paramTypes.size(), bLangArrowFunction.params.size());
resultType = symTable.semanticError;
bLangArrowFunction.params.forEach(param -> param.setBType(symTable.semanticError));
return;
}
for (int i = 0; i < bLangArrowFunction.params.size(); i++) {
BLangSimpleVariable paramIdentifier = bLangArrowFunction.params.get(i);
BType bType = paramTypes.get(i);
BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
valueTypeNode.setTypeKind(bType.getKind());
valueTypeNode.pos = symTable.builtinPos;
paramIdentifier.setTypeNode(valueTypeNode);
paramIdentifier.setBType(bType);
}
}
private void checkSelfReferences(Location pos, SymbolEnv env, BVarSymbol varSymbol) {
if (env.enclVarSym == varSymbol) {
dlog.error(pos, DiagnosticErrorCode.SELF_REFERENCE_VAR, varSymbol.name);
}
}
public List<BType> getListWithErrorTypes(int count) {
List<BType> list = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
list.add(symTable.semanticError);
}
return list;
}
private void checkFunctionInvocationExpr(BLangInvocation iExpr) {
Name funcName = names.fromIdNode(iExpr.name);
Name pkgAlias = names.fromIdNode(iExpr.pkgAlias);
BSymbol funcSymbol = symTable.notFoundSymbol;
BSymbol pkgSymbol = symResolver.resolvePrefixSymbol(env, pkgAlias, getCurrentCompUnit(iExpr));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias);
} else {
if (funcSymbol == symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName);
if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
funcSymbol = symbol;
}
if (symTable.rootPkgSymbol.pkgID.equals(symbol.pkgID) &&
(symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) {
funcSymbol = symbol;
}
}
if (funcSymbol == symTable.notFoundSymbol || ((funcSymbol.tag & SymTag.TYPE) == SymTag.TYPE)) {
BSymbol ctor = symResolver.lookupConstructorSpaceSymbolInPackage(iExpr.pos, env, pkgAlias, funcName);
funcSymbol = ctor != symTable.notFoundSymbol ? ctor : funcSymbol;
}
}
if (funcSymbol == symTable.notFoundSymbol || isNotFunction(funcSymbol)) {
if (!missingNodesHelper.isMissingNode(funcName)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.UNDEFINED_FUNCTION, funcName);
}
iExpr.argExprs.forEach(arg -> checkExpr(arg, env));
resultType = symTable.semanticError;
return;
}
if (isFunctionPointer(funcSymbol)) {
iExpr.functionPointerInvocation = true;
markAndRegisterClosureVariable(funcSymbol, iExpr.pos, env);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
boolean langLibPackageID = PackageID.isLangLibPackageID(pkgSymbol.pkgID);
if (langLibPackageID) {
this.env = SymbolEnv.createInvocationEnv(iExpr, this.env);
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
if (langLibPackageID && !iExpr.argExprs.isEmpty()) {
checkInvalidImmutableValueUpdate(iExpr, iExpr.argExprs.get(0).getBType(), funcSymbol);
}
}
protected void markAndRegisterClosureVariable(BSymbol symbol, Location pos, SymbolEnv env) {
BLangInvokableNode encInvokable = env.enclInvokable;
if (symbol.closure || (symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE &&
env.node.getKind() != NodeKind.ARROW_EXPR && env.node.getKind() != NodeKind.EXPR_FUNCTION_BODY &&
encInvokable != null && !encInvokable.flagSet.contains(Flag.LAMBDA)) {
return;
}
if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA)
&& !isFunctionArgument(symbol, encInvokable.requiredParams)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
}
}
if (env.node.getKind() == NodeKind.ARROW_EXPR
&& !isFunctionArgument(symbol, ((BLangArrowFunction) env.node).params)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol) {
resolvedSymbol.closure = true;
((BLangArrowFunction) env.node).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
}
}
if (env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, (BLangRecordTypeNode) env.enclType);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && encInvokable != null &&
!encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(new ClosureVarSymbol(resolvedSymbol, pos));
}
}
}
private boolean isNotFunction(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION
|| (funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
return false;
}
if (isFunctionPointer(funcSymbol)) {
return false;
}
return true;
}
private boolean isFunctionPointer(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) {
return false;
}
return (funcSymbol.tag & SymTag.FUNCTION) == SymTag.VARIABLE
&& funcSymbol.kind == SymbolKind.FUNCTION
&& (funcSymbol.flags & Flags.NATIVE) != Flags.NATIVE;
}
private List<BLangNamedArgsExpression> checkProvidedErrorDetails(BLangErrorConstructorExpr errorConstructorExpr,
BType expectedType) {
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
for (BLangNamedArgsExpression namedArgsExpression : errorConstructorExpr.namedArgs) {
BType target = getErrorCtorNamedArgTargetType(namedArgsExpression, expectedType);
BLangNamedArgsExpression clone = nodeCloner.cloneNode(namedArgsExpression);
BType type = checkExpr(clone, env, target);
if (type == symTable.semanticError) {
checkExpr(namedArgsExpression, env);
} else {
checkExpr(namedArgsExpression, env, target);
}
namedArgs.add(namedArgsExpression);
}
return namedArgs;
}
private BType getErrorCtorNamedArgTargetType(BLangNamedArgsExpression namedArgsExpression, BType expectedType) {
if (expectedType == symTable.semanticError) {
return symTable.semanticError;
}
if (expectedType.tag == TypeTags.MAP) {
return ((BMapType) expectedType).constraint;
}
if (expectedType.tag != TypeTags.RECORD) {
return symTable.semanticError;
}
BRecordType recordType = (BRecordType) expectedType;
BField targetField = recordType.fields.get(namedArgsExpression.name.value);
if (targetField != null) {
return targetField.type;
}
if (!recordType.sealed && !recordType.fields.isEmpty()) {
dlog.error(namedArgsExpression.pos, DiagnosticErrorCode.INVALID_REST_DETAIL_ARG, namedArgsExpression.name,
recordType);
}
return recordType.sealed ? symTable.noType : recordType.restFieldType;
}
private void checkObjectFunctionInvocationExpr(BLangInvocation iExpr, BObjectType objectType) {
if (objectType.getKind() == TypeKind.SERVICE &&
!(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.SERVICE_FUNCTION_INVALID_INVOCATION);
return;
}
Name funcName =
names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, iExpr.name.value));
BSymbol funcSymbol =
symResolver.resolveObjectMethod(iExpr.pos, env, funcName, (BObjectTypeSymbol) objectType.tsymbol);
if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) {
if (!checkLangLibMethodInvocationExpr(iExpr, objectType)) {
dlog.error(iExpr.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, iExpr.name.value,
objectType);
resultType = symTable.semanticError;
return;
}
} else {
iExpr.symbol = funcSymbol;
}
if (iExpr.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value) &&
!(iExpr.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
(Names.SELF.equals(((BLangSimpleVarRef) iExpr.expr).symbol.name)))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_INIT_INVOCATION);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_SYNTAX, iExpr.name.value);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
checkInvocationParamAndReturnType(iExpr);
}
private void checkActionInvocation(BLangInvocation.BLangActionInvocation aInv, BObjectType expType) {
BLangValueExpression varRef = (BLangValueExpression) aInv.expr;
if (((varRef.symbol.tag & SymTag.ENDPOINT) != SymTag.ENDPOINT) && !aInv.async) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION, varRef.getBType());
this.resultType = symTable.semanticError;
aInv.symbol = symTable.notFoundSymbol;
return;
}
BVarSymbol epSymbol = (BVarSymbol) varRef.symbol;
Name remoteMethodQName = names
.fromString(Symbols.getAttachedFuncSymbolName(expType.tsymbol.name.value, aInv.name.value));
Name actionName = names.fromIdNode(aInv.name);
BSymbol remoteFuncSymbol = symResolver
.lookupMemberSymbol(aInv.pos, epSymbol.type.tsymbol.scope, env, remoteMethodQName, SymTag.FUNCTION);
if (remoteFuncSymbol == symTable.notFoundSymbol && !checkLangLibMethodInvocationExpr(aInv, expType)) {
dlog.error(aInv.name.pos, DiagnosticErrorCode.UNDEFINED_METHOD_IN_OBJECT, aInv.name.value, expType);
resultType = symTable.semanticError;
return;
}
if (!Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) && !aInv.async) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_METHOD_INVOCATION_SYNTAX, actionName);
this.resultType = symTable.semanticError;
return;
}
if (Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE) &&
Symbols.isFlagOn(expType.flags, Flags.CLIENT) &&
types.isNeverTypeOrStructureTypeWithARequiredNeverMember
((BType) ((InvokableSymbol) remoteFuncSymbol).getReturnType())) {
dlog.error(aInv.pos, DiagnosticErrorCode.INVALID_CLIENT_REMOTE_METHOD_CALL);
}
aInv.symbol = remoteFuncSymbol;
checkInvocationParamAndReturnType(aInv);
}
private boolean checkLangLibMethodInvocationExpr(BLangInvocation iExpr, BType bType) {
return getLangLibMethod(iExpr, bType) != symTable.notFoundSymbol;
}
private BSymbol getLangLibMethod(BLangInvocation iExpr, BType bType) {
Name funcName = names.fromString(iExpr.name.value);
BSymbol funcSymbol = symResolver.lookupLangLibMethod(bType, funcName);
if (funcSymbol == symTable.notFoundSymbol) {
return symTable.notFoundSymbol;
}
iExpr.symbol = funcSymbol;
iExpr.langLibInvocation = true;
SymbolEnv enclEnv = this.env;
this.env = SymbolEnv.createInvocationEnv(iExpr, this.env);
iExpr.argExprs.add(0, iExpr.expr);
checkInvocationParamAndReturnType(iExpr);
this.env = enclEnv;
return funcSymbol;
}
private void checkInvocationParamAndReturnType(BLangInvocation iExpr) {
BType actualType = checkInvocationParam(iExpr);
resultType = types.checkType(iExpr, actualType, this.expType);
}
private BVarSymbol incRecordParamAllowAdditionalFields(List<BVarSymbol> openIncRecordParams,
Set<String> requiredParamNames) {
if (openIncRecordParams.size() != 1) {
return null;
}
LinkedHashMap<String, BField> fields = ((BRecordType) openIncRecordParams.get(0).type).fields;
for (String paramName : requiredParamNames) {
if (!fields.containsKey(paramName)) {
return null;
}
}
return openIncRecordParams.get(0);
}
private BVarSymbol checkForIncRecordParamAllowAdditionalFields(BInvokableSymbol invokableSymbol,
List<BVarSymbol> incRecordParams) {
Set<String> requiredParamNames = new HashSet<>();
List<BVarSymbol> openIncRecordParams = new ArrayList<>();
for (BVarSymbol paramSymbol : invokableSymbol.params) {
if (Symbols.isFlagOn(Flags.asMask(paramSymbol.getFlags()), Flags.INCLUDED) &&
paramSymbol.type.getKind() == TypeKind.RECORD) {
boolean recordWithDisallowFieldsOnly = true;
LinkedHashMap<String, BField> fields = ((BRecordType) paramSymbol.type).fields;
for (String fieldName : fields.keySet()) {
BField field = fields.get(fieldName);
if (field.symbol.type.tag != TypeTags.NEVER) {
recordWithDisallowFieldsOnly = false;
incRecordParams.add(field.symbol);
requiredParamNames.add(fieldName);
}
}
if (recordWithDisallowFieldsOnly && ((BRecordType) paramSymbol.type).restFieldType != symTable.noType) {
openIncRecordParams.add(paramSymbol);
}
} else {
requiredParamNames.add(paramSymbol.name.value);
}
}
return incRecordParamAllowAdditionalFields(openIncRecordParams, requiredParamNames);
}
private BType checkInvocationParam(BLangInvocation iExpr) {
if (Symbols.isFlagOn(iExpr.symbol.type.flags, Flags.ANY_FUNCTION)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_POINTER_INVOCATION_WITH_TYPE);
return symTable.semanticError;
}
if (iExpr.symbol.type.tag != TypeTags.INVOKABLE) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type);
return symTable.noType;
}
BInvokableSymbol invokableSymbol = ((BInvokableSymbol) iExpr.symbol);
List<BType> paramTypes = ((BInvokableType) invokableSymbol.type).getParameterTypes();
List<BVarSymbol> incRecordParams = new ArrayList<>();
BVarSymbol incRecordParamAllowAdditionalFields = checkForIncRecordParamAllowAdditionalFields(invokableSymbol,
incRecordParams);
int parameterCountForPositionalArgs = paramTypes.size();
int parameterCountForNamedArgs = parameterCountForPositionalArgs + incRecordParams.size();
iExpr.requiredArgs = new ArrayList<>();
for (BVarSymbol symbol : invokableSymbol.params) {
if (!Symbols.isFlagOn(Flags.asMask(symbol.getFlags()), Flags.INCLUDED) ||
symbol.type.tag != TypeTags.RECORD) {
continue;
}
LinkedHashMap<String, BField> fields = ((BRecordType) symbol.type).fields;
if (fields.isEmpty()) {
continue;
}
for (String field : fields.keySet()) {
if (fields.get(field).type.tag != TypeTags.NEVER) {
parameterCountForNamedArgs = parameterCountForNamedArgs - 1;
break;
}
}
}
int i = 0;
BLangExpression vararg = null;
boolean foundNamedArg = false;
for (BLangExpression expr : iExpr.argExprs) {
switch (expr.getKind()) {
case NAMED_ARGS_EXPR:
foundNamedArg = true;
if (i < parameterCountForNamedArgs || incRecordParamAllowAdditionalFields != null) {
iExpr.requiredArgs.add(expr);
} else {
dlog.error(expr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
}
i++;
break;
case REST_ARGS_EXPR:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.REST_ARG_DEFINED_AFTER_NAMED_ARG);
continue;
}
vararg = expr;
break;
default:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG);
}
if (i < parameterCountForPositionalArgs) {
iExpr.requiredArgs.add(expr);
} else {
iExpr.restArgs.add(expr);
}
i++;
break;
}
}
return checkInvocationArgs(iExpr, paramTypes, vararg, incRecordParams,
incRecordParamAllowAdditionalFields);
}
private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, BLangExpression vararg,
List<BVarSymbol> incRecordParams,
BVarSymbol incRecordParamAllowAdditionalFields) {
BInvokableSymbol invokableSymbol = (BInvokableSymbol) iExpr.symbol;
BInvokableType bInvokableType = (BInvokableType) invokableSymbol.type;
BInvokableTypeSymbol invokableTypeSymbol = (BInvokableTypeSymbol) bInvokableType.tsymbol;
List<BVarSymbol> nonRestParams = new ArrayList<>(invokableTypeSymbol.params);
List<BLangExpression> nonRestArgs = iExpr.requiredArgs;
List<BVarSymbol> valueProvidedParams = new ArrayList<>();
List<BVarSymbol> requiredParams = new ArrayList<>();
List<BVarSymbol> requiredIncRecordParams = new ArrayList<>();
for (BVarSymbol nonRestParam : nonRestParams) {
if (nonRestParam.isDefaultable) {
continue;
}
requiredParams.add(nonRestParam);
}
for (BVarSymbol incRecordParam : incRecordParams) {
if (Symbols.isFlagOn(Flags.asMask(incRecordParam.getFlags()), Flags.REQUIRED)) {
requiredIncRecordParams.add(incRecordParam);
}
}
int i = 0;
for (; i < nonRestArgs.size(); i++) {
BLangExpression arg = nonRestArgs.get(i);
if (i == 0 && arg.typeChecked && iExpr.expr != null && iExpr.expr == arg) {
BType expectedType = paramTypes.get(i);
types.checkType(arg.pos, arg.getBType(), expectedType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
types.setImplicitCastExpr(arg, arg.getBType(), expectedType);
}
if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) {
if (i < nonRestParams.size()) {
BVarSymbol param = nonRestParams.get(i);
checkTypeParamExpr(arg, this.env, param.type, iExpr.langLibInvocation);
valueProvidedParams.add(param);
requiredParams.remove(param);
continue;
}
break;
}
if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) {
BLangIdentifier argName = ((NamedArgNode) arg).getName();
BVarSymbol varSym = checkParameterNameForDefaultArgument(argName, ((BLangNamedArgsExpression) arg).expr,
nonRestParams, incRecordParams, incRecordParamAllowAdditionalFields);
if (varSym == null) {
dlog.error(arg.pos, DiagnosticErrorCode.UNDEFINED_PARAMETER, argName);
break;
}
requiredParams.remove(varSym);
requiredIncRecordParams.remove(varSym);
if (valueProvidedParams.contains(varSym)) {
dlog.error(arg.pos, DiagnosticErrorCode.DUPLICATE_NAMED_ARGS, varSym.name.value);
continue;
}
checkTypeParamExpr(arg, this.env, varSym.type, iExpr.langLibInvocation);
valueProvidedParams.add(varSym);
}
}
BVarSymbol restParam = invokableTypeSymbol.restParam;
boolean errored = false;
if (!requiredParams.isEmpty() && vararg == null) {
for (BVarSymbol requiredParam : requiredParams) {
if (!Symbols.isFlagOn(Flags.asMask(requiredParam.getFlags()), Flags.INCLUDED)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER, requiredParam.name,
iExpr.name.value);
errored = true;
}
}
}
if (!requiredIncRecordParams.isEmpty() && !requiredParams.isEmpty()) {
for (BVarSymbol requiredIncRecordParam : requiredIncRecordParams) {
for (BVarSymbol requiredParam : requiredParams) {
if (requiredParam.type == requiredIncRecordParam.owner.type) {
dlog.error(iExpr.pos, DiagnosticErrorCode.MISSING_REQUIRED_PARAMETER,
requiredIncRecordParam.name, iExpr.name.value);
errored = true;
}
}
}
}
if (restParam == null &&
(!iExpr.restArgs.isEmpty() ||
(vararg != null && valueProvidedParams.size() == nonRestParams.size()))) {
dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
errored = true;
}
if (errored) {
return symTable.semanticError;
}
BType listTypeRestArg = restParam == null ? null : restParam.type;
BRecordType mappingTypeRestArg = null;
if (vararg != null && nonRestArgs.size() < nonRestParams.size()) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
List<BType> tupleMemberTypes = new ArrayList<>();
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, null, VIRTUAL);
mappingTypeRestArg = new BRecordType(recordSymbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
BType tupleRestType = null;
BVarSymbol fieldSymbol;
for (int j = nonRestArgs.size(); j < nonRestParams.size(); j++) {
BType paramType = paramTypes.get(j);
BVarSymbol nonRestParam = nonRestParams.get(j);
Name paramName = nonRestParam.name;
tupleMemberTypes.add(paramType);
boolean required = requiredParams.contains(nonRestParam);
fieldSymbol = new BVarSymbol(Flags.asMask(new HashSet<Flag>() {{
add(required ? Flag.REQUIRED : Flag.OPTIONAL); }}), paramName,
pkgID, paramType, recordSymbol, null, VIRTUAL);
fields.put(paramName.value, new BField(paramName, null, fieldSymbol));
}
if (listTypeRestArg != null) {
if (listTypeRestArg.tag == TypeTags.ARRAY) {
tupleRestType = ((BArrayType) listTypeRestArg).eType;
} else if (listTypeRestArg.tag == TypeTags.TUPLE) {
BTupleType restTupleType = (BTupleType) listTypeRestArg;
tupleMemberTypes.addAll(restTupleType.tupleTypes);
if (restTupleType.restType != null) {
tupleRestType = restTupleType.restType;
}
}
}
BTupleType tupleType = new BTupleType(tupleMemberTypes);
tupleType.restType = tupleRestType;
listTypeRestArg = tupleType;
mappingTypeRestArg.sealed = true;
mappingTypeRestArg.restFieldType = symTable.noType;
mappingTypeRestArg.fields = fields;
recordSymbol.type = mappingTypeRestArg;
mappingTypeRestArg.tsymbol = recordSymbol;
}
if (listTypeRestArg == null && (vararg != null || !iExpr.restArgs.isEmpty())) {
dlog.error(iExpr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
return symTable.semanticError;
}
BType restType = null;
if (vararg != null && !iExpr.restArgs.isEmpty()) {
BType elementType = ((BArrayType) listTypeRestArg).eType;
for (BLangExpression restArg : iExpr.restArgs) {
checkTypeParamExpr(restArg, this.env, elementType, true);
}
checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation);
iExpr.restArgs.add(vararg);
restType = this.resultType;
} else if (vararg != null) {
iExpr.restArgs.add(vararg);
if (mappingTypeRestArg != null) {
LinkedHashSet<BType> restTypes = new LinkedHashSet<>();
restTypes.add(listTypeRestArg);
restTypes.add(mappingTypeRestArg);
BType actualType = BUnionType.create(null, restTypes);
checkTypeParamExpr(vararg, this.env, actualType, iExpr.langLibInvocation);
} else {
checkTypeParamExpr(vararg, this.env, listTypeRestArg, iExpr.langLibInvocation);
}
restType = this.resultType;
} else if (!iExpr.restArgs.isEmpty()) {
if (listTypeRestArg.tag == TypeTags.ARRAY) {
BType elementType = ((BArrayType) listTypeRestArg).eType;
for (BLangExpression restArg : iExpr.restArgs) {
checkTypeParamExpr(restArg, this.env, elementType, true);
if (restType != symTable.semanticError && this.resultType == symTable.semanticError) {
restType = this.resultType;
}
}
} else {
BTupleType tupleType = (BTupleType) listTypeRestArg;
List<BType> tupleMemberTypes = tupleType.tupleTypes;
BType tupleRestType = tupleType.restType;
int tupleMemCount = tupleMemberTypes.size();
for (int j = 0; j < iExpr.restArgs.size(); j++) {
BLangExpression restArg = iExpr.restArgs.get(j);
BType memType = j < tupleMemCount ? tupleMemberTypes.get(j) : tupleRestType;
checkTypeParamExpr(restArg, this.env, memType, true);
if (restType != symTable.semanticError && this.resultType == symTable.semanticError) {
restType = this.resultType;
}
}
}
}
BType retType = typeParamAnalyzer.getReturnTypeParams(env, bInvokableType.getReturnType());
if (restType != symTable.semanticError &&
Symbols.isFlagOn(invokableSymbol.flags, Flags.NATIVE) &&
Symbols.isFlagOn(retType.flags, Flags.PARAMETERIZED)) {
retType = unifier.build(retType, expType, iExpr, types, symTable, dlog);
}
boolean langLibPackageID = PackageID.isLangLibPackageID(iExpr.symbol.pkgID);
String sortFuncName = "sort";
if (langLibPackageID && sortFuncName.equals(iExpr.name.value)) {
checkArrayLibSortFuncArgs(iExpr);
}
if (iExpr instanceof ActionNode && ((BLangInvocation.BLangActionInvocation) iExpr).async) {
return this.generateFutureType(invokableSymbol, retType);
} else {
return retType;
}
}
private void checkArrayLibSortFuncArgs(BLangInvocation iExpr) {
if (iExpr.argExprs.size() <= 2 && !types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) {
dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE,
iExpr.argExprs.get(0).getBType());
}
if (iExpr.argExprs.size() != 3) {
return;
}
BLangExpression keyFunction = iExpr.argExprs.get(2);
BType keyFunctionType = keyFunction.getBType();
if (keyFunctionType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
if (keyFunctionType.tag == TypeTags.NIL) {
if (!types.isOrderedType(iExpr.argExprs.get(0).getBType(), false)) {
dlog.error(iExpr.argExprs.get(0).pos, DiagnosticErrorCode.INVALID_SORT_ARRAY_MEMBER_TYPE,
iExpr.argExprs.get(0).getBType());
}
return;
}
Location pos;
BType returnType;
if (keyFunction.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
pos = keyFunction.pos;
returnType = keyFunction.getBType().getReturnType();
} else if (keyFunction.getKind() == NodeKind.ARROW_EXPR) {
BLangArrowFunction arrowFunction = ((BLangArrowFunction) keyFunction);
pos = arrowFunction.body.expr.pos;
returnType = arrowFunction.body.expr.getBType();
if (returnType.tag == TypeTags.SEMANTIC_ERROR) {
return;
}
} else {
BLangLambdaFunction keyLambdaFunction = (BLangLambdaFunction) keyFunction;
pos = keyLambdaFunction.function.pos;
returnType = keyLambdaFunction.function.getBType().getReturnType();
}
if (!types.isOrderedType(returnType, false)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_SORT_FUNC_RETURN_TYPE, returnType);
}
}
private BVarSymbol checkParameterNameForDefaultArgument(BLangIdentifier argName, BLangExpression expr,
List<BVarSymbol> nonRestParams,
List<BVarSymbol> incRecordParams,
BVarSymbol incRecordParamAllowAdditionalFields) {
for (BVarSymbol nonRestParam : nonRestParams) {
if (nonRestParam.getName().value.equals(argName.value)) {
return nonRestParam;
}
}
for (BVarSymbol incRecordParam : incRecordParams) {
if (incRecordParam.getName().value.equals(argName.value)) {
return incRecordParam;
}
}
if (incRecordParamAllowAdditionalFields != null) {
BRecordType incRecordType = (BRecordType) incRecordParamAllowAdditionalFields.type;
checkExpr(expr, env, incRecordType.restFieldType);
if (!incRecordType.fields.containsKey(argName.value)) {
return new BVarSymbol(0, names.fromIdNode(argName), null, symTable.noType, null, argName.pos, VIRTUAL);
}
}
return null;
}
private BFutureType generateFutureType(BInvokableSymbol invocableSymbol, BType retType) {
boolean isWorkerStart = invocableSymbol.name.value.startsWith(WORKER_LAMBDA_VAR_PREFIX);
return new BFutureType(TypeTags.FUTURE, retType, null, isWorkerStart);
}
private void checkTypeParamExpr(BLangExpression arg, SymbolEnv env, BType expectedType,
boolean inferTypeForNumericLiteral) {
checkTypeParamExpr(arg.pos, arg, env, expectedType, inferTypeForNumericLiteral);
}
private void checkTypeParamExpr(Location pos, BLangExpression arg, SymbolEnv env, BType expectedType,
boolean inferTypeForNumericLiteral) {
if (typeParamAnalyzer.notRequireTypeParams(env)) {
checkExpr(arg, env, expectedType);
return;
}
if (requireTypeInference(arg, inferTypeForNumericLiteral)) {
BType expType = typeParamAnalyzer.getMatchingBoundType(expectedType, env);
BType inferredType = checkExpr(arg, env, expType);
typeParamAnalyzer.checkForTypeParamsInArg(pos, inferredType, this.env, expectedType);
return;
}
checkExpr(arg, env, expectedType);
typeParamAnalyzer.checkForTypeParamsInArg(pos, arg.getBType(), this.env, expectedType);
}
private boolean requireTypeInference(BLangExpression expr, boolean inferTypeForNumericLiteral) {
switch (expr.getKind()) {
case GROUP_EXPR:
return requireTypeInference(((BLangGroupExpr) expr).expression, inferTypeForNumericLiteral);
case ARROW_EXPR:
case LIST_CONSTRUCTOR_EXPR:
case RECORD_LITERAL_EXPR:
return true;
case NUMERIC_LITERAL:
return inferTypeForNumericLiteral;
default:
return false;
}
}
private BType checkMappingField(RecordLiteralNode.RecordField field, BType mappingType) {
BType fieldType = symTable.semanticError;
boolean keyValueField = field.isKeyValueField();
boolean spreadOpField = field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP;
boolean readOnlyConstructorField = false;
String fieldName = null;
Location pos = null;
BLangExpression valueExpr = null;
if (keyValueField) {
valueExpr = ((BLangRecordKeyValueField) field).valueExpr;
} else if (!spreadOpField) {
valueExpr = (BLangRecordVarNameField) field;
}
switch (mappingType.tag) {
case TypeTags.RECORD:
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
fieldType = checkRecordLiteralKeyExpr(key.expr, key.computedKey, (BRecordType) mappingType);
readOnlyConstructorField = keyValField.readonly;
pos = key.expr.pos;
fieldName = getKeyValueFieldName(keyValField);
} else if (spreadOpField) {
BLangExpression spreadExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
checkExpr(spreadExpr, this.env);
BType spreadExprType = spreadExpr.getBType();
if (spreadExprType.tag == TypeTags.MAP) {
return types.checkType(spreadExpr.pos, ((BMapType) spreadExprType).constraint,
getAllFieldType((BRecordType) mappingType),
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
if (spreadExprType.tag != TypeTags.RECORD) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadExprType);
return symTable.semanticError;
}
boolean errored = false;
for (BField bField : ((BRecordType) spreadExprType).fields.values()) {
BType specFieldType = bField.type;
BType expectedFieldType = checkRecordLiteralKeyByName(spreadExpr.pos, this.env, bField.name,
(BRecordType) mappingType);
if (expectedFieldType != symTable.semanticError &&
!types.isAssignable(specFieldType, expectedFieldType)) {
dlog.error(spreadExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_FIELD,
expectedFieldType, bField.name, specFieldType);
if (!errored) {
errored = true;
}
}
}
return errored ? symTable.semanticError : symTable.noType;
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
fieldType = checkRecordLiteralKeyExpr(varNameField, false, (BRecordType) mappingType);
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
break;
case TypeTags.MAP:
if (spreadOpField) {
BLangExpression spreadExp = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
BType spreadOpType = checkExpr(spreadExp, this.env);
BType spreadOpMemberType;
switch (spreadOpType.tag) {
case TypeTags.RECORD:
List<BType> types = new ArrayList<>();
BRecordType recordType = (BRecordType) spreadOpType;
for (BField recField : recordType.fields.values()) {
types.add(recField.type);
}
if (!recordType.sealed) {
types.add(recordType.restFieldType);
}
spreadOpMemberType = getRepresentativeBroadType(types);
break;
case TypeTags.MAP:
spreadOpMemberType = ((BMapType) spreadOpType).constraint;
break;
default:
dlog.error(spreadExp.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES_SPREAD_OP,
spreadOpType);
return symTable.semanticError;
}
return types.checkType(spreadExp.pos, spreadOpMemberType, ((BMapType) mappingType).constraint,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
boolean validMapKey;
if (keyValueField) {
BLangRecordKeyValueField keyValField = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValField.key;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(key.expr, key.computedKey);
readOnlyConstructorField = keyValField.readonly;
pos = key.pos;
fieldName = getKeyValueFieldName(keyValField);
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
validMapKey = checkValidJsonOrMapLiteralKeyExpr(varNameField, false);
readOnlyConstructorField = varNameField.readonly;
pos = varNameField.pos;
fieldName = getVarNameFieldName(varNameField);
}
fieldType = validMapKey ? ((BMapType) mappingType).constraint : symTable.semanticError;
break;
}
if (readOnlyConstructorField) {
if (types.isSelectivelyImmutableType(fieldType)) {
fieldType =
ImmutableTypeCloner.getImmutableIntersectionType(pos, types,
(SelectivelyImmutableReferenceType) fieldType,
env, symTable, anonymousModelHelper, names,
new HashSet<>());
} else if (!types.isInherentlyImmutableType(fieldType)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_READONLY_MAPPING_FIELD, fieldName, fieldType);
fieldType = symTable.semanticError;
}
}
if (spreadOpField) {
valueExpr = ((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr;
}
BLangExpression exprToCheck = valueExpr;
if (this.nonErrorLoggingCheck) {
exprToCheck = nodeCloner.cloneNode(valueExpr);
} else {
((BLangNode) field).setBType(fieldType);
}
return checkExpr(exprToCheck, this.env, fieldType);
}
private BType checkRecordLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey, BRecordType recordType) {
Name fieldName;
if (computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
LinkedHashSet<BType> fieldTypes = recordType.fields.values().stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (recordType.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(recordType.restFieldType);
}
return BUnionType.create(null, fieldTypes);
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr;
fieldName = names.fromIdNode(varRef.variableName);
} else if (keyExpr.getKind() == NodeKind.LITERAL && keyExpr.getBType().tag == TypeTags.STRING) {
fieldName = names.fromString((String) ((BLangLiteral) keyExpr).value);
} else {
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY);
return symTable.semanticError;
}
return checkRecordLiteralKeyByName(keyExpr.pos, this.env, fieldName, recordType);
}
private BType checkRecordLiteralKeyByName(Location location, SymbolEnv env, Name key,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(location, env, key, recordType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
return fieldSymbol.type;
}
if (recordType.sealed) {
dlog.error(location, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, key,
recordType.tsymbol.type.getKind().typeName(), recordType);
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType getAllFieldType(BRecordType recordType) {
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BField field : recordType.fields.values()) {
possibleTypes.add(field.type);
}
BType restFieldType = recordType.restFieldType;
if (restFieldType != null && restFieldType != symTable.noType) {
possibleTypes.add(restFieldType);
}
return BUnionType.create(null, possibleTypes);
}
private boolean checkValidJsonOrMapLiteralKeyExpr(BLangExpression keyExpr, boolean computedKey) {
if (computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.getBType() == symTable.semanticError) {
return false;
}
return true;
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF ||
(keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).getBType().tag == TypeTags.STRING)) {
return true;
}
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_KEY);
return false;
}
private BType addNilForNillableAccessType(BType actualType) {
if (actualType.isNullable()) {
return actualType;
}
return BUnionType.create(null, actualType, symTable.nilType);
}
private BType checkRecordRequiredFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol || Symbols.isOptional(fieldSymbol)) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordOptionalFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol || !Symbols.isOptional(fieldSymbol)) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordRestFieldAccess(BLangAccessExpression varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
return symTable.semanticError;
}
if (recordType.sealed) {
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType checkObjectFieldAccess(BLangFieldBasedAccess bLangFieldBasedAccess,
Name fieldName, BObjectType objectType) {
BSymbol fieldSymbol = symResolver.resolveStructField(bLangFieldBasedAccess.pos,
this.env, fieldName, objectType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
bLangFieldBasedAccess.symbol = fieldSymbol;
return fieldSymbol.type;
}
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value,
fieldName.value));
fieldSymbol = symResolver.resolveObjectField(bLangFieldBasedAccess.pos, env, objFuncName, objectType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
dlog.error(bLangFieldBasedAccess.field.pos,
DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName,
objectType.tsymbol.type.getKind().typeName(), objectType.tsymbol);
return symTable.semanticError;
}
if (Symbols.isFlagOn(fieldSymbol.type.flags, Flags.ISOLATED) &&
!Symbols.isFlagOn(objectType.flags, Flags.ISOLATED)) {
fieldSymbol = ASTBuilderUtil.duplicateInvokableSymbol((BInvokableSymbol) fieldSymbol);
fieldSymbol.flags &= ~Flags.ISOLATED;
fieldSymbol.type.flags &= ~Flags.ISOLATED;
}
bLangFieldBasedAccess.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkTupleFieldType(BType tupleType, int indexValue) {
BTupleType bTupleType = (BTupleType) tupleType;
if (bTupleType.tupleTypes.size() <= indexValue && bTupleType.restType != null) {
return bTupleType.restType;
} else if (indexValue < 0 || bTupleType.tupleTypes.size() <= indexValue) {
return symTable.semanticError;
}
return bTupleType.tupleTypes.get(indexValue);
}
private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) {
BLangExpression startTagName = bLangXMLElementLiteral.startTagName;
checkExpr(startTagName, xmlElementEnv, symTable.stringType);
BLangExpression endTagName = bLangXMLElementLiteral.endTagName;
if (endTagName == null) {
return;
}
checkExpr(endTagName, xmlElementEnv, symTable.stringType);
if (startTagName.getKind() == NodeKind.XML_QNAME && endTagName.getKind() == NodeKind.XML_QNAME &&
startTagName.equals(endTagName)) {
return;
}
if (startTagName.getKind() != NodeKind.XML_QNAME && endTagName.getKind() != NodeKind.XML_QNAME) {
return;
}
dlog.error(bLangXMLElementLiteral.pos, DiagnosticErrorCode.XML_TAGS_MISMATCH);
}
private void checkStringTemplateExprs(List<? extends BLangExpression> exprs) {
for (BLangExpression expr : exprs) {
checkExpr(expr, env);
BType type = expr.getBType();
if (type == symTable.semanticError) {
continue;
}
if (!types.isNonNilSimpleBasicTypeOrString(type)) {
dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType,
symTable.decimalType, symTable.stringType,
symTable.booleanType), type);
}
}
}
/**
* Concatenate the consecutive text type nodes, and get the reduced set of children.
*
* @param exprs Child nodes
* @param xmlElementEnv
* @return Reduced set of children
*/
private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) {
List<BLangExpression> newChildren = new ArrayList<>();
List<BLangExpression> tempConcatExpressions = new ArrayList<>();
for (BLangExpression expr : exprs) {
BType exprType;
if (expr.getKind() == NodeKind.QUERY_EXPR) {
exprType = checkExpr(expr, xmlElementEnv, expType);
} else {
exprType = checkExpr(expr, xmlElementEnv);
}
if (TypeTags.isXMLTypeTag(exprType.tag)) {
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
tempConcatExpressions = new ArrayList<>();
}
newChildren.add(expr);
continue;
}
BType type = expr.getBType();
if (type.tag >= TypeTags.JSON) {
if (type != symTable.semanticError && !TypeTags.isXMLTypeTag(type.tag)) {
dlog.error(expr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType,
symTable.decimalType, symTable.stringType,
symTable.booleanType, symTable.xmlType), type);
}
continue;
}
tempConcatExpressions.add(expr);
}
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
}
return newChildren;
}
private BLangExpression getXMLTextLiteral(List<BLangExpression> exprs) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.textFragments = exprs;
xmlTextLiteral.pos = exprs.get(0).pos;
xmlTextLiteral.setBType(symTable.xmlType);
return xmlTextLiteral;
}
private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) {
accessExpr.originalType = actualType;
BUnionType unionType = BUnionType.create(null, actualType);
if (returnsNull(accessExpr)) {
unionType.add(symTable.nilType);
}
BType parentType = accessExpr.expr.getBType();
if (accessExpr.errorSafeNavigation
&& (parentType.tag == TypeTags.SEMANTIC_ERROR || (parentType.tag == TypeTags.UNION
&& ((BUnionType) parentType).getMemberTypes().contains(symTable.errorType)))) {
unionType.add(symTable.errorType);
}
if (unionType.getMemberTypes().size() == 1) {
return unionType.getMemberTypes().toArray(new BType[0])[0];
}
return unionType;
}
private boolean returnsNull(BLangAccessExpression accessExpr) {
BType parentType = accessExpr.expr.getBType();
if (parentType.isNullable() && parentType.tag != TypeTags.JSON) {
return true;
}
if (parentType.tag != TypeTags.MAP) {
return false;
}
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
&& accessExpr.expr.getBType().tag == TypeTags.MAP) {
BType constraintType = ((BMapType) accessExpr.expr.getBType()).constraint;
return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON;
}
return false;
}
private BType checkObjectFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.OBJECT) {
return checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) memType);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
return checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkRecordFieldAccessLhsExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
return checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return symTable.semanticError;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkOptionalRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
fieldType = checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType == symTable.semanticError) {
return fieldType;
}
return BUnionType.create(null, fieldType, symTable.nilType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.isEmpty()) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType;
}
private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
BType actualType = symTable.semanticError;
if (types.isSubTypeOfBaseType(varRefType, TypeTags.OBJECT)) {
actualType = checkObjectFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD)) {
actualType = checkRecordFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
if (actualType != symTable.semanticError) {
fieldAccessExpr.originalType = actualType;
return actualType;
}
if (!fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_NON_REQUIRED_FIELD,
varRefType, fieldName);
return actualType;
}
actualType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE,
fieldName, varRefType.tsymbol.type.getKind().typeName(), varRefType);
}
} else if (types.isLax(varRefType)) {
if (fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_ASSIGNMENT,
varRefType);
return symTable.semanticError;
}
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
BType laxFieldAccessType = getLaxFieldAccessType(varRefType);
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
if (fieldAccessExpr.isLValue) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE);
}
actualType = symTable.xmlType;
fieldAccessExpr.originalType = actualType;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS,
varRefType);
}
return actualType;
}
private void resolveXMLNamespace(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess fieldAccessExpr) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldAccess = fieldAccessExpr;
String nsPrefix = nsPrefixedFieldAccess.nsPrefix.value;
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(nsPrefix));
if (nsSymbol == symTable.notFoundSymbol) {
dlog.error(nsPrefixedFieldAccess.nsPrefix.pos, DiagnosticErrorCode.CANNOT_FIND_XML_NAMESPACE,
nsPrefixedFieldAccess.nsPrefix);
} else if (nsSymbol.getKind() == SymbolKind.PACKAGE) {
nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) findXMLNamespaceFromPackageConst(
nsPrefixedFieldAccess.field.value, nsPrefixedFieldAccess.nsPrefix.value,
(BPackageSymbol) nsSymbol, fieldAccessExpr.pos);
} else {
nsPrefixedFieldAccess.nsSymbol = (BXMLNSSymbol) nsSymbol;
}
}
private boolean hasLaxOriginalType(BLangFieldBasedAccess fieldBasedAccess) {
return fieldBasedAccess.originalType != null && types.isLax(fieldBasedAccess.originalType);
}
private BType getLaxFieldAccessType(BType exprType) {
switch (exprType.tag) {
case TypeTags.JSON:
return symTable.jsonType;
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
return symTable.stringType;
case TypeTags.MAP:
return ((BMapType) exprType).constraint;
case TypeTags.UNION:
BUnionType unionType = (BUnionType) exprType;
if (types.isSameType(symTable.jsonType, unionType)) {
return symTable.jsonType;
}
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
unionType.getMemberTypes().forEach(bType -> memberTypes.add(getLaxFieldAccessType(bType)));
return memberTypes.size() == 1 ? memberTypes.iterator().next() : BUnionType.create(null, memberTypes);
}
return symTable.semanticError;
}
private BType checkOptionalFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
BType actualType = symTable.semanticError;
boolean nillableExprType = false;
BType effectiveType = varRefType;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
effectiveType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
}
}
if (types.isSubTypeOfBaseType(effectiveType, TypeTags.RECORD)) {
actualType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, effectiveType, fieldName);
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS_FOR_FIELD,
varRefType, fieldName);
}
fieldAccessExpr.nilSafeNavigation = nillableExprType;
fieldAccessExpr.originalType = fieldAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (types.isLax(effectiveType)) {
BType laxFieldAccessType = getLaxFieldAccessType(effectiveType);
actualType = accessCouldResultInError(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
actualType = accessCouldResultInError(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
resolveXMLNamespace((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr);
}
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS, varRefType);
}
if (nillableExprType && actualType != symTable.semanticError && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private boolean accessCouldResultInError(BType type) {
if (type.tag == TypeTags.JSON) {
return true;
}
if (type.tag == TypeTags.MAP) {
return false;
}
if (type.tag == TypeTags.XML) {
return true;
}
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream().anyMatch(this::accessCouldResultInError);
} else {
return false;
}
}
private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr) {
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(indexBasedAccessExpr.expr.getBType());
boolean nillableExprType = false;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
if (nillableExprType) {
varRefType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
if (!types.isSubTypeOfMapping(varRefType)) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
}
}
}
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
BType actualType = symTable.semanticError;
if (types.isSubTypeOfMapping(varRefType)) {
checkExpr(indexExpr, this.env, symTable.stringType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkMappingIndexBasedAccess(indexBasedAccessExpr, varRefType);
if (actualType == symTable.semanticError) {
if (indexExpr.getBType().tag == TypeTags.STRING && isConst(indexExpr)) {
String fieldName = getConstFieldName(indexExpr);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.UNDEFINED_STRUCTURE_FIELD,
fieldName, indexBasedAccessExpr.expr.getBType());
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_RECORD_MEMBER_ACCESS_EXPR, indexExpr.getBType());
return actualType;
}
indexBasedAccessExpr.nilSafeNavigation = nillableExprType;
indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (types.isSubTypeOfList(varRefType)) {
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkListIndexBasedAccess(indexBasedAccessExpr, varRefType);
indexBasedAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
if (indexExpr.getBType().tag == TypeTags.INT && isConst(indexExpr)) {
dlog.error(indexBasedAccessExpr.indexExpr.pos,
DiagnosticErrorCode.LIST_INDEX_OUT_OF_RANGE, getConstIndex(indexExpr));
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticErrorCode.INVALID_LIST_MEMBER_ACCESS_EXPR, indexExpr.getBType());
return actualType;
}
} else if (types.isAssignable(varRefType, symTable.stringType)) {
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.getBType() == symTable.semanticError) {
return symTable.semanticError;
}
indexBasedAccessExpr.originalType = symTable.stringType;
actualType = symTable.stringType;
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
if (indexBasedAccessExpr.isLValue) {
indexExpr.setBType(symTable.semanticError);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_XML_SEQUENCE);
return actualType;
}
BType type = checkExpr(indexExpr, this.env, symTable.intType);
if (type == symTable.semanticError) {
return type;
}
indexBasedAccessExpr.originalType = varRefType;
actualType = varRefType;
} else if (varRefType.tag == TypeTags.TABLE) {
if (indexBasedAccessExpr.isLValue) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.CANNOT_UPDATE_TABLE_USING_MEMBER_ACCESS,
varRefType);
return symTable.semanticError;
}
BTableType tableType = (BTableType) indexBasedAccessExpr.expr.getBType();
BType keyTypeConstraint = tableType.keyTypeConstraint;
if (tableType.keyTypeConstraint == null) {
keyTypeConstraint = createTableKeyConstraint(((BTableType) indexBasedAccessExpr.expr.getBType()).
fieldNameList, ((BTableType) indexBasedAccessExpr.expr.getBType()).constraint);
if (keyTypeConstraint == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.MEMBER_ACCESS_NOT_SUPPORT_FOR_KEYLESS_TABLE,
indexBasedAccessExpr.expr);
return symTable.semanticError;
}
}
if (indexExpr.getKind() != NodeKind.TABLE_MULTI_KEY) {
checkExpr(indexExpr, this.env, keyTypeConstraint);
if (indexExpr.getBType() == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
} else {
List<BLangExpression> multiKeyExpressionList = ((BLangTableMultiKeyExpr)
indexBasedAccessExpr.indexExpr).multiKeyIndexExprs;
List<BType> keyConstraintTypes = ((BTupleType) keyTypeConstraint).tupleTypes;
if (keyConstraintTypes.size() != multiKeyExpressionList.size()) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
for (int i = 0; i < multiKeyExpressionList.size(); i++) {
BLangExpression keyExpr = multiKeyExpressionList.get(i);
checkExpr(keyExpr, this.env, keyConstraintTypes.get(i));
if (keyExpr.getBType() == symTable.semanticError) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticErrorCode.INVALID_KEY_CONSTRAINT_PROVIDED_FOR_ACCESS,
keyTypeConstraint);
return symTable.semanticError;
}
}
}
if (expType.tag != TypeTags.NONE) {
BType resultType = checkExpr(indexBasedAccessExpr.expr, env, expType);
if (resultType == symTable.semanticError) {
return symTable.semanticError;
}
}
BType constraint = tableType.constraint;
actualType = addNilForNillableAccessType(constraint);
indexBasedAccessExpr.originalType = indexBasedAccessExpr.leafNode || !nillableExprType ? actualType :
types.getTypeWithoutNil(actualType);
} else if (varRefType == symTable.semanticError) {
indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError);
return symTable.semanticError;
} else {
indexBasedAccessExpr.indexExpr.setBType(symTable.semanticError);
dlog.error(indexBasedAccessExpr.pos, DiagnosticErrorCode.OPERATION_DOES_NOT_SUPPORT_MEMBER_ACCESS,
indexBasedAccessExpr.expr.getBType());
return symTable.semanticError;
}
if (nillableExprType && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private Long getConstIndex(BLangExpression indexExpr) {
return indexExpr.getKind() == NodeKind.NUMERIC_LITERAL ? (Long) ((BLangLiteral) indexExpr).value :
(Long) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
private String getConstFieldName(BLangExpression indexExpr) {
return indexExpr.getKind() == NodeKind.LITERAL ? (String) ((BLangLiteral) indexExpr).value :
(String) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
private BType checkArrayIndexBasedAccess(BLangIndexBasedAccess indexBasedAccess, BType indexExprType,
BArrayType arrayType) {
BType actualType = symTable.semanticError;
switch (indexExprType.tag) {
case TypeTags.INT:
BLangExpression indexExpr = indexBasedAccess.indexExpr;
if (!isConst(indexExpr) || arrayType.state == BArrayState.OPEN) {
actualType = arrayType.eType;
break;
}
actualType = getConstIndex(indexExpr) >= arrayType.size ? symTable.semanticError : arrayType.eType;
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) indexExprType;
boolean validIndexExists = false;
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
if (indexValue >= 0 &&
(arrayType.state == BArrayState.OPEN || indexValue < arrayType.size)) {
validIndexExists = true;
break;
}
}
if (!validIndexExists) {
return symTable.semanticError;
}
actualType = arrayType.eType;
break;
case TypeTags.UNION:
List<BFiniteType> finiteTypes = ((BUnionType) indexExprType).getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.map(matchedType -> (BFiniteType) matchedType)
.collect(Collectors.toList());
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType elementType = checkArrayIndexBasedAccess(indexBasedAccess, finiteType, arrayType);
if (elementType == symTable.semanticError) {
return symTable.semanticError;
}
actualType = arrayType.eType;
}
return actualType;
}
private BType checkListIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) {
if (type.tag == TypeTags.ARRAY) {
return checkArrayIndexBasedAccess(accessExpr, accessExpr.indexExpr.getBType(), (BArrayType) type);
}
if (type.tag == TypeTags.TUPLE) {
return checkTupleIndexBasedAccess(accessExpr, (BTupleType) type, accessExpr.indexExpr.getBType());
}
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkListIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkTupleIndexBasedAccess(BLangIndexBasedAccess accessExpr, BTupleType tuple, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.INT:
if (isConst(indexExpr)) {
actualType = checkTupleFieldType(tuple, getConstIndex(indexExpr).intValue());
} else {
BTupleType tupleExpr = (BTupleType) accessExpr.expr.getBType();
LinkedHashSet<BType> tupleTypes = collectTupleFieldTypes(tupleExpr, new LinkedHashSet<>());
actualType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null,
tupleTypes);
}
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
BType fieldType = checkTupleFieldType(tuple, indexValue);
if (fieldType.tag != TypeTags.SEMANTIC_ERROR) {
possibleTypes.add(fieldType);
}
}
if (possibleTypes.size() == 0) {
return symTable.semanticError;
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
((BUnionType) currentType).getMemberTypes().forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
}
return actualType;
}
private LinkedHashSet<BType> collectTupleFieldTypes(BTupleType tupleType, LinkedHashSet<BType> memberTypes) {
tupleType.tupleTypes
.forEach(memberType -> {
if (memberType.tag == TypeTags.UNION) {
collectMemberTypes((BUnionType) memberType, memberTypes);
} else {
memberTypes.add(memberType);
}
});
return memberTypes;
}
private BType checkMappingIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) {
if (type.tag == TypeTags.MAP) {
BType constraint = ((BMapType) type).constraint;
return accessExpr.isLValue ? constraint : addNilForNillableAccessType(constraint);
}
if (type.tag == TypeTags.RECORD) {
return checkRecordIndexBasedAccess(accessExpr, (BRecordType) type, accessExpr.indexExpr.getBType());
}
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkMappingIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableAccessType(fieldType) : fieldType;
}
private BType checkRecordIndexBasedAccess(BLangIndexBasedAccess accessExpr, BRecordType record, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.STRING:
if (isConst(indexExpr)) {
String fieldName = IdentifierUtils.escapeSpecialCharacters(getConstFieldName(indexExpr));
actualType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType != symTable.semanticError) {
return actualType;
}
actualType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
actualType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
return actualType;
}
if (actualType == symTable.neverType) {
return actualType;
}
return addNilForNillableAccessType(actualType);
}
if (accessExpr.isLValue) {
return actualType;
}
return addNilForNillableAccessType(actualType);
}
LinkedHashSet<BType> fieldTypes = record.fields.values().stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (record.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(record.restFieldType);
}
if (fieldTypes.stream().noneMatch(BType::isNullable)) {
fieldTypes.add(symTable.nilType);
}
actualType = BUnionType.create(null, fieldTypes);
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.getValueSpace()) {
String fieldName = (String) ((BLangLiteral) finiteMember).value;
BType fieldType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
}
if (fieldType != symTable.semanticError) {
fieldType = addNilForNillableAccessType(fieldType);
}
}
if (fieldType.tag == TypeTags.SEMANTIC_ERROR) {
continue;
}
possibleTypes.add(fieldType);
}
if (possibleTypes.isEmpty()) {
return symTable.semanticError;
}
if (possibleTypes.stream().noneMatch(BType::isNullable)) {
possibleTypes.add(symTable.nilType);
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
((BUnionType) currentType).getMemberTypes().forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.getValueSpace()));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
}
return actualType;
}
private List<BType> getTypesList(BType type) {
if (type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) type;
return new ArrayList<>(unionType.getMemberTypes());
} else {
return Lists.of(type);
}
}
private LinkedHashSet<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.getBType());
LinkedHashSet<BType> matchExprTypes = new LinkedHashSet<>();
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
BType patternExprType = pattern.expr.getBType();
matchExprTypes.addAll(getTypesList(patternExprType));
if (type.tag == TypeTags.SEMANTIC_ERROR || patternExprType.tag == TypeTags.SEMANTIC_ERROR) {
return new LinkedHashSet<BType>() {
{
add(symTable.semanticError);
}
};
}
assignable = this.types.isAssignable(type, pattern.variable.getBType());
if (assignable) {
break;
}
}
if (!assignable) {
matchExprTypes.add(type);
}
}
return matchExprTypes;
}
private boolean couldHoldTableValues(BType type, List<BType> encounteredTypes) {
if (encounteredTypes.contains(type)) {
return false;
}
encounteredTypes.add(type);
switch (type.tag) {
case TypeTags.UNION:
for (BType bType1 : ((BUnionType) type).getMemberTypes()) {
if (couldHoldTableValues(bType1, encounteredTypes)) {
return true;
}
}
return false;
case TypeTags.MAP:
return couldHoldTableValues(((BMapType) type).constraint, encounteredTypes);
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
if (couldHoldTableValues(field.type, encounteredTypes)) {
return true;
}
}
return !recordType.sealed && couldHoldTableValues(recordType.restFieldType, encounteredTypes);
case TypeTags.ARRAY:
return couldHoldTableValues(((BArrayType) type).eType, encounteredTypes);
case TypeTags.TUPLE:
for (BType bType : ((BTupleType) type).getTupleTypes()) {
if (couldHoldTableValues(bType, encounteredTypes)) {
return true;
}
}
return false;
}
return false;
}
private boolean isConst(BLangExpression expression) {
if (ConstantAnalyzer.isValidConstantExpressionNode(expression)) {
return true;
}
if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return false;
}
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
}
private Name getCurrentCompUnit(BLangNode node) {
return names.fromString(node.pos.lineRange().filePath());
}
private BType getRepresentativeBroadType(List<BType> inferredTypeList) {
for (int i = 0; i < inferredTypeList.size(); i++) {
BType type = inferredTypeList.get(i);
if (type.tag == TypeTags.SEMANTIC_ERROR) {
return type;
}
for (int j = i + 1; j < inferredTypeList.size(); j++) {
BType otherType = inferredTypeList.get(j);
if (otherType.tag == TypeTags.SEMANTIC_ERROR) {
return otherType;
}
if (types.isAssignable(otherType, type)) {
inferredTypeList.remove(j);
j -= 1;
continue;
}
if (types.isAssignable(type, otherType)) {
inferredTypeList.remove(i);
i -= 1;
break;
}
}
}
if (inferredTypeList.size() == 1) {
return inferredTypeList.get(0);
}
return BUnionType.create(null, inferredTypeList.toArray(new BType[0]));
}
private BType defineInferredRecordType(BLangRecordLiteral recordLiteral, BType expType) {
PackageID pkgID = env.enclPkg.symbol.pkgID;
BRecordTypeSymbol recordSymbol = createRecordTypeSymbol(pkgID, recordLiteral.pos, VIRTUAL);
Map<String, FieldInfo> nonRestFieldTypes = new LinkedHashMap<>();
List<BType> restFieldTypes = new ArrayList<>();
for (RecordLiteralNode.RecordField field : recordLiteral.fields) {
if (field.isKeyValueField()) {
BLangRecordKeyValueField keyValue = (BLangRecordKeyValueField) field;
BLangRecordKey key = keyValue.key;
BLangExpression expression = keyValue.valueExpr;
BLangExpression keyExpr = key.expr;
if (key.computedKey) {
checkExpr(keyExpr, env, symTable.stringType);
BType exprType = checkExpr(expression, env, expType);
if (isUniqueType(restFieldTypes, exprType)) {
restFieldTypes.add(exprType);
}
} else {
addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(keyExpr),
keyValue.readonly ? checkExpr(expression, env, symTable.readonlyType) :
checkExpr(expression, env, expType),
true, keyValue.readonly);
}
} else if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
BType type = checkExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, env, expType);
int typeTag = type.tag;
if (typeTag == TypeTags.MAP) {
BType constraintType = ((BMapType) type).constraint;
if (isUniqueType(restFieldTypes, constraintType)) {
restFieldTypes.add(constraintType);
}
}
if (type.tag != TypeTags.RECORD) {
continue;
}
BRecordType recordType = (BRecordType) type;
for (BField recField : recordType.fields.values()) {
addToNonRestFieldTypes(nonRestFieldTypes, recField.name.value, recField.type,
!Symbols.isOptional(recField.symbol), false);
}
if (!recordType.sealed) {
BType restFieldType = recordType.restFieldType;
if (isUniqueType(restFieldTypes, restFieldType)) {
restFieldTypes.add(restFieldType);
}
}
} else {
BLangRecordVarNameField varNameField = (BLangRecordVarNameField) field;
addToNonRestFieldTypes(nonRestFieldTypes, getKeyName(varNameField),
varNameField.readonly ? checkExpr(varNameField, env, symTable.readonlyType) :
checkExpr(varNameField, env, expType),
true, varNameField.readonly);
}
}
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
boolean allReadOnlyNonRestFields = true;
for (Map.Entry<String, FieldInfo> entry : nonRestFieldTypes.entrySet()) {
FieldInfo fieldInfo = entry.getValue();
List<BType> types = fieldInfo.types;
if (types.contains(symTable.semanticError)) {
return symTable.semanticError;
}
String key = entry.getKey();
Name fieldName = names.fromString(key);
BType type = types.size() == 1 ? types.get(0) : BUnionType.create(null, types.toArray(new BType[0]));
Set<Flag> flags = new HashSet<>();
if (fieldInfo.required) {
flags.add(Flag.REQUIRED);
} else {
flags.add(Flag.OPTIONAL);
}
if (fieldInfo.readonly) {
flags.add(Flag.READONLY);
} else if (allReadOnlyNonRestFields) {
allReadOnlyNonRestFields = false;
}
BVarSymbol fieldSymbol = new BVarSymbol(Flags.asMask(flags), fieldName, pkgID, type, recordSymbol,
symTable.builtinPos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordType = new BRecordType(recordSymbol);
recordType.fields = fields;
if (restFieldTypes.contains(symTable.semanticError)) {
return symTable.semanticError;
}
if (restFieldTypes.isEmpty()) {
recordType.sealed = true;
recordType.restFieldType = symTable.noType;
} else if (restFieldTypes.size() == 1) {
recordType.restFieldType = restFieldTypes.get(0);
} else {
recordType.restFieldType = BUnionType.create(null, restFieldTypes.toArray(new BType[0]));
}
recordSymbol.type = recordType;
recordType.tsymbol = recordSymbol;
if (expType == symTable.readonlyType || (recordType.sealed && allReadOnlyNonRestFields)) {
recordType.flags |= Flags.READONLY;
recordSymbol.flags |= Flags.READONLY;
}
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(recordType, pkgID, symTable,
recordLiteral.pos);
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
TypeDefBuilderHelper.addTypeDefinition(recordType, recordSymbol, recordTypeNode, env);
return recordType;
}
private BRecordTypeSymbol createRecordTypeSymbol(PackageID pkgID, Location location,
SymbolOrigin origin) {
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(Flags.ANONYMOUS,
names.fromString(anonymousModelHelper.getNextAnonymousTypeKey(pkgID)),
pkgID, null, env.scope.owner, location, origin);
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false,
symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol,
bInvokableType, location);
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
return recordSymbol;
}
private String getKeyName(BLangExpression key) {
return key.getKind() == NodeKind.SIMPLE_VARIABLE_REF ?
((BLangSimpleVarRef) key).variableName.value : (String) ((BLangLiteral) key).value;
}
private void addToNonRestFieldTypes(Map<String, FieldInfo> nonRestFieldTypes, String keyString,
BType exprType, boolean required, boolean readonly) {
if (!nonRestFieldTypes.containsKey(keyString)) {
nonRestFieldTypes.put(keyString, new FieldInfo(new ArrayList<BType>() {{ add(exprType); }}, required,
readonly));
return;
}
FieldInfo fieldInfo = nonRestFieldTypes.get(keyString);
List<BType> typeList = fieldInfo.types;
if (isUniqueType(typeList, exprType)) {
typeList.add(exprType);
}
if (required && !fieldInfo.required) {
fieldInfo.required = true;
}
}
private boolean isUniqueType(List<BType> typeList, BType type) {
boolean isRecord = type.tag == TypeTags.RECORD;
for (BType bType : typeList) {
if (isRecord) {
if (type == bType) {
return false;
}
} else if (types.isSameType(type, bType)) {
return false;
}
}
return true;
}
private BType checkXmlSubTypeLiteralCompatibility(Location location, BXMLSubType mutableXmlSubType,
BType expType) {
if (expType == symTable.semanticError) {
return expType;
}
boolean unionExpType = expType.tag == TypeTags.UNION;
if (expType == mutableXmlSubType) {
return expType;
}
if (!unionExpType && types.isAssignable(mutableXmlSubType, expType)) {
return mutableXmlSubType;
}
BXMLSubType immutableXmlSubType = (BXMLSubType)
ImmutableTypeCloner.getEffectiveImmutableType(location, types, mutableXmlSubType, env, symTable,
anonymousModelHelper, names);
if (expType == immutableXmlSubType) {
return expType;
}
if (!unionExpType && types.isAssignable(immutableXmlSubType, expType)) {
return immutableXmlSubType;
}
if (!unionExpType) {
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType);
return symTable.semanticError;
}
List<BType> compatibleTypes = new ArrayList<>();
for (BType memberType : ((BUnionType) expType).getMemberTypes()) {
if (compatibleTypes.contains(memberType)) {
continue;
}
if (memberType == mutableXmlSubType || memberType == immutableXmlSubType) {
compatibleTypes.add(memberType);
continue;
}
if (types.isAssignable(mutableXmlSubType, memberType) && !compatibleTypes.contains(mutableXmlSubType)) {
compatibleTypes.add(mutableXmlSubType);
continue;
}
if (types.isAssignable(immutableXmlSubType, memberType) && !compatibleTypes.contains(immutableXmlSubType)) {
compatibleTypes.add(immutableXmlSubType);
}
}
if (compatibleTypes.isEmpty()) {
dlog.error(location, DiagnosticErrorCode.INCOMPATIBLE_TYPES, expType, mutableXmlSubType);
return symTable.semanticError;
}
if (compatibleTypes.size() == 1) {
return compatibleTypes.get(0);
}
dlog.error(location, DiagnosticErrorCode.AMBIGUOUS_TYPES, expType);
return symTable.semanticError;
}
private void markChildrenAsImmutable(BLangXMLElementLiteral bLangXMLElementLiteral) {
for (BLangExpression modifiedChild : bLangXMLElementLiteral.modifiedChildren) {
BType childType = modifiedChild.getBType();
if (Symbols.isFlagOn(childType.flags, Flags.READONLY) || !types.isSelectivelyImmutableType(childType)) {
continue;
}
modifiedChild.setBType(ImmutableTypeCloner.getEffectiveImmutableType(modifiedChild.pos, types,
(SelectivelyImmutableReferenceType) childType,
env, symTable, anonymousModelHelper, names));
if (modifiedChild.getKind() == NodeKind.XML_ELEMENT_LITERAL) {
markChildrenAsImmutable((BLangXMLElementLiteral) modifiedChild);
}
}
}
private void logUndefinedSymbolError(Location pos, String name) {
if (!missingNodesHelper.isMissingNode(name)) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, name);
}
}
private void markTypeAsIsolated(BType actualType) {
actualType.flags |= Flags.ISOLATED;
actualType.tsymbol.flags |= Flags.ISOLATED;
}
private boolean isObjectConstructorExpr(BLangTypeInit cIExpr, BType actualType) {
return cIExpr.getType() != null && Symbols.isFlagOn(actualType.tsymbol.flags, Flags.ANONYMOUS);
}
private BLangClassDefinition getClassDefinitionForObjectConstructorExpr(BLangTypeInit cIExpr, SymbolEnv env) {
List<BLangClassDefinition> classDefinitions = env.enclPkg.classDefinitions;
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) cIExpr.getType();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(userDefinedType.pos, env,
names.fromIdNode(userDefinedType.pkgAlias),
names.fromIdNode(userDefinedType.typeName));
for (BLangClassDefinition classDefinition : classDefinitions) {
if (classDefinition.symbol == symbol) {
return classDefinition;
}
}
return null;
}
private void handleObjectConstrExprForReadOnly(BLangTypeInit cIExpr, BObjectType actualObjectType,
BLangClassDefinition classDefForConstructor, SymbolEnv env,
boolean logErrors) {
boolean hasNeverReadOnlyField = false;
for (BField field : actualObjectType.fields.values()) {
BType fieldType = field.type;
if (!types.isInherentlyImmutableType(fieldType) && !types.isSelectivelyImmutableType(fieldType, false)) {
analyzeObjectConstructor(classDefForConstructor, env);
hasNeverReadOnlyField = true;
if (!logErrors) {
return;
}
dlog.error(field.pos,
DiagnosticErrorCode.INVALID_FIELD_IN_OBJECT_CONSTUCTOR_EXPR_WITH_READONLY_REFERENCE,
fieldType);
}
}
if (hasNeverReadOnlyField) {
return;
}
classDefForConstructor.flagSet.add(Flag.READONLY);
actualObjectType.flags |= Flags.READONLY;
actualObjectType.tsymbol.flags |= Flags.READONLY;
ImmutableTypeCloner.markFieldsAsImmutable(classDefForConstructor, env, actualObjectType, types,
anonymousModelHelper, symTable, names, cIExpr.pos);
analyzeObjectConstructor(classDefForConstructor, env);
}
private void markConstructedObjectIsolatedness(BObjectType actualObjectType) {
if (Symbols.isFlagOn(actualObjectType.flags, Flags.READONLY)) {
markTypeAsIsolated(actualObjectType);
return;
}
for (BField field : actualObjectType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.FINAL) ||
!types.isSubTypeOfReadOnlyOrIsolatedObjectUnion(field.type)) {
return;
}
}
markTypeAsIsolated(actualObjectType);
}
private void markLeafNode(BLangAccessExpression accessExpression) {
BLangNode parent = accessExpression.parent;
if (parent == null) {
accessExpression.leafNode = true;
return;
}
NodeKind kind = parent.getKind();
while (kind == NodeKind.GROUP_EXPR) {
parent = parent.parent;
if (parent == null) {
accessExpression.leafNode = true;
break;
}
kind = parent.getKind();
}
if (kind != NodeKind.FIELD_BASED_ACCESS_EXPR && kind != NodeKind.INDEX_BASED_ACCESS_EXPR) {
accessExpression.leafNode = true;
}
}
private static class FieldInfo {
List<BType> types;
boolean required;
boolean readonly;
private FieldInfo(List<BType> types, boolean required, boolean readonly) {
this.types = types;
this.required = required;
this.readonly = readonly;
}
}
} |
No, I'm not sure if it's the case. | public void setNodeName(final TransactionManagerConfiguration transactions) {
try {
arjPropertyManager.getCoreEnvironmentBean().setNodeIdentifier(transactions.nodeName);
jtaPropertyManager.getJTAEnvironmentBean().setXaRecoveryNodes(Collections.singletonList(transactions.nodeName));
TxControl.setXANodeName(transactions.nodeName);
} catch (CoreEnvironmentBeanException e) {
e.printStackTrace();
}
} | TxControl.setXANodeName(transactions.nodeName); | public void setNodeName(final TransactionManagerConfiguration transactions) {
try {
arjPropertyManager.getCoreEnvironmentBean().setNodeIdentifier(transactions.nodeName);
jtaPropertyManager.getJTAEnvironmentBean().setXaRecoveryNodes(Collections.singletonList(transactions.nodeName));
TxControl.setXANodeName(transactions.nodeName);
} catch (CoreEnvironmentBeanException e) {
e.printStackTrace();
}
} | class NarayanaJtaRecorder {
private static Properties defaultProperties;
private static final Logger log = Logger.getLogger(NarayanaJtaRecorder.class);
public void setDefaultProperties(Properties properties) {
try {
Field field = PropertiesFactory.class.getDeclaredField("delegatePropertiesFactory");
field.setAccessible(true);
field.set(null, new QuarkusPropertiesFactory(properties));
} catch (Exception e) {
log.error("Could not override transaction properties factory", e);
}
defaultProperties = properties;
}
public void setDefaultTimeout(TransactionManagerConfiguration transactions) {
transactions.defaultTransactionTimeout.ifPresent(defaultTimeout -> {
arjPropertyManager.getCoordinatorEnvironmentBean().setDefaultTimeout((int) defaultTimeout.getSeconds());
TxControl.setDefaultTimeout((int) defaultTimeout.getSeconds());
});
}
public static Properties getDefaultProperties() {
return defaultProperties;
}
public void disableTransactionStatusManager() {
arjPropertyManager.getCoordinatorEnvironmentBean()
.setTransactionStatusManagerEnable(false);
}
} | class NarayanaJtaRecorder {
private static Properties defaultProperties;
private static final Logger log = Logger.getLogger(NarayanaJtaRecorder.class);
public void setDefaultProperties(Properties properties) {
try {
Field field = PropertiesFactory.class.getDeclaredField("delegatePropertiesFactory");
field.setAccessible(true);
field.set(null, new QuarkusPropertiesFactory(properties));
} catch (Exception e) {
log.error("Could not override transaction properties factory", e);
}
defaultProperties = properties;
}
public void setDefaultTimeout(TransactionManagerConfiguration transactions) {
transactions.defaultTransactionTimeout.ifPresent(defaultTimeout -> {
arjPropertyManager.getCoordinatorEnvironmentBean().setDefaultTimeout((int) defaultTimeout.getSeconds());
TxControl.setDefaultTimeout((int) defaultTimeout.getSeconds());
});
}
public static Properties getDefaultProperties() {
return defaultProperties;
}
public void disableTransactionStatusManager() {
arjPropertyManager.getCoordinatorEnvironmentBean()
.setTransactionStatusManagerEnable(false);
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.